diff --git a/Makefile b/Makefile index 31b8076c84b..6e1a8ca434a 100644 --- a/Makefile +++ b/Makefile @@ -205,7 +205,7 @@ reformat: git_hooks $(WRAPAPP) dune exec --profile=$(DUNE_PROFILE) src/app/reformat/reformat.exe -- -path . reformat-diff: - ocamlformat --doc-comments=before --inplace $(shell git status -s | cut -c 4- | grep '\.mli\?$$' | while IFS= read -r f; do stat "$$f" >/dev/null 2>&1 && echo "$$f"; done) || true + @ocamlformat --doc-comments=before --inplace $(shell git status -s | cut -c 4- | grep '\.mli\?$$' | while IFS= read -r f; do stat "$$f" >/dev/null 2>&1 && echo "$$f"; done) || true check-format: $(WRAPAPP) dune exec --profile=$(DUNE_PROFILE) src/app/reformat/reformat.exe -- -path . -check diff --git a/automation/services/watchdog/metrics.py b/automation/services/watchdog/metrics.py index 199cbc8c6b2..d7a6a16e18a 100644 --- a/automation/services/watchdog/metrics.py +++ b/automation/services/watchdog/metrics.py @@ -94,6 +94,10 @@ def check_google_storage_bucket(v1, namespace, recent_google_bucket_blocks): newest_age = min([ age for age in blob_ages ]) + end = time.time() + + print("Checking google storage bucket took {} seconds".format(end-now)) + recent_google_bucket_blocks.set(newest_age) # ======================================================================== @@ -125,6 +129,7 @@ def get_chain_id(v1, namespace): def check_seed_list_up(v1, namespace, seeds_reachable): print('checking seed list up') + start = time.time() seed_peers_list_url = os.environ.get('SEED_PEERS_URL') @@ -150,6 +155,8 @@ def check_seed_list_up(v1, namespace, seeds_reachable): res = json.loads(val) #checklibp2p returns whether or not the connection to a peerID errored fraction_up = sum(res.values())/len(res.values()) + end = time.time() + print("checking seed connection took {} seconds".format(end-start)) seeds_reachable.set(fraction_up) # ======================================================================== diff --git a/automation/services/watchdog/node_status_metrics.py b/automation/services/watchdog/node_status_metrics.py index 18ec2103e92..01a13793e33 100644 --- a/automation/services/watchdog/node_status_metrics.py +++ b/automation/services/watchdog/node_status_metrics.py @@ -24,6 +24,8 @@ def peer_to_multiaddr(peer): def collect_node_status_metrics(v1, namespace, nodes_synced_near_best_tip, nodes_synced, nodes_queried, nodes_responded, seed_nodes_queried, seed_nodes_responded, nodes_errored, context_deadline_exceeded, failed_security_protocol_negotiation, connection_refused_errors, size_limit_exceeded_errors, timed_out_errors, stream_reset_errors, other_connection_errors, prover_errors): print('collecting node status metrics') + start = time.time() + pods = v1.list_namespaced_pod(namespace, watch=False) pod_names = [ p['metadata']['name'] for p in pods.to_dict()['items'] if p['status']['phase'] == 'Running' ] @@ -82,6 +84,9 @@ def collect_node_status_metrics(v1, namespace, nodes_synced_near_best_tip, nodes other_connection_errors.set(err_others) nodes_synced.set(synced_fraction) + end = time.time() + print("Updating Coda_watchdog_nodes_synced took {} seconds".format(end-start)) + # ------------------------------------------------- # TODO: prover_erros @@ -145,6 +150,9 @@ def get_deepest_child(p): print("Number of peers with 'Synced' status: {}\nPeers not synced near the best tip: {}".format(sum(all_synced_peers), peers_out_of_sync)) + end2 = time.time() + print("Updating Coda_watchdog_nodes_synced_near_best_tip took {} seconds".format(end2-end)) + nodes_synced_near_best_tip.set(synced_near_best_tip_fraction) # ======================================================================== @@ -155,6 +163,8 @@ def collect_node_status(v1, namespace, seeds, pods, seed_nodes_responded, seed_n all_resps = [] peer_set = set() + start = time.time() + def contains_error(resp): try: resp['error'] @@ -203,6 +213,8 @@ def add_resp(raw, peers, seed, seed_node_responded, seed_node_queried): valid_resps = peer_table.values() + end = time.time() + print("Node status collection took {} seconds".format(end-start)) return (len(peer_set), valid_resps, error_resps) diff --git a/automation/services/watchdog/version.txt b/automation/services/watchdog/version.txt index 5f749c13663..75274d83294 100644 --- a/automation/services/watchdog/version.txt +++ b/automation/services/watchdog/version.txt @@ -1 +1 @@ -0.4.11 +0.4.12 diff --git a/buildkite/src/Jobs/Release/ArchiveNodeArtifact.dhall b/buildkite/src/Jobs/Release/ArchiveNodeArtifact.dhall index e8023a2e71d..eff8a24ac52 100644 --- a/buildkite/src/Jobs/Release/ArchiveNodeArtifact.dhall +++ b/buildkite/src/Jobs/Release/ArchiveNodeArtifact.dhall @@ -31,6 +31,7 @@ Pipeline.build S.strictlyStart (S.contains "src"), S.strictlyStart (S.contains "scripts/archive"), S.strictlyStart (S.contains "automation"), + S.strictlyStart (S.contains "dockerfiles"), S.strictlyStart (S.contains "buildkite/src/Jobs/Release/ArchiveNodeArtifact") ], path = "Release", diff --git a/dockerfiles/Dockerfile-mina-archive b/dockerfiles/Dockerfile-mina-archive index 0dad5877e3f..4dc18972ddf 100644 --- a/dockerfiles/Dockerfile-mina-archive +++ b/dockerfiles/Dockerfile-mina-archive @@ -13,7 +13,7 @@ RUN echo "Building image with version $deb_version" COPY scripts/archive-entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh -COPY --chown=${UID} scripts/puppeteer/* / +COPY --chown=${UID} puppeteer-context/* / RUN chmod +x /mina_daemon_puppeteer.py /find_puppeteer.sh /start.sh /stop.sh # Workaround terrible postgresql package requirements with man diff --git a/dockerfiles/Dockerfile-mina-daemon b/dockerfiles/Dockerfile-mina-daemon index c0224262dad..205b2b17777 100644 --- a/dockerfiles/Dockerfile-mina-daemon +++ b/dockerfiles/Dockerfile-mina-daemon @@ -68,7 +68,7 @@ COPY --chown=${UID} scripts/daemon-entrypoint.sh /entrypoint.sh # Solve this by marking scripts executable in git COPY --chown=${UID} ./auxiliary_entrypoints /entrypoint.d -COPY --chown=${UID} scripts/puppeteer/* / +COPY --chown=${UID} puppeteer-context/* / RUN chmod +x /mina_daemon_puppeteer.py /find_puppeteer.sh /start.sh /stop.sh ENV CODA_TIME_OFFSET 0 diff --git a/dockerfiles/scripts/puppeteer/find_puppeteer.sh b/dockerfiles/puppeteer-context/find_puppeteer.sh similarity index 100% rename from dockerfiles/scripts/puppeteer/find_puppeteer.sh rename to dockerfiles/puppeteer-context/find_puppeteer.sh diff --git a/dockerfiles/puppeteer-context/mina_daemon_puppeteer.py b/dockerfiles/puppeteer-context/mina_daemon_puppeteer.py index e3f1a7dbd2e..9cef949fe22 100644 --- a/dockerfiles/puppeteer-context/mina_daemon_puppeteer.py +++ b/dockerfiles/puppeteer-context/mina_daemon_puppeteer.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + # This is a temporary hack for the integration test framework to be able to stop # and start nodes dyamically in a kubernetes environment. This script takes # mina arguments and will start and monitor a mina process with those arguments. @@ -35,16 +37,17 @@ def do_GET(s): s.end_headers() s.wfile.write(b'The daemon is currently offline.
This broadcast was brought to you by the puppeteer mock server') -# just nooping on this signal suffices, since merely trapping it will cause -# `signal.pause()` to resume def handle_child_termination(signum, frame): - pass + print("puppeteer script: SIGCHLD received " ) + os.waitpid(-1, os.WNOHANG) def handle_start_request(signum, frame): + print("puppeteer script: SIGUSR1 handle_start_request received, setting active_daemon_request to True" ) global active_daemon_request active_daemon_request = True def handle_stop_request(signum, frame): + print("puppeteer script: SIGUSR2 handle_stop_request received, setting inactive_daemon_request to True" ) global inactive_daemon_request inactive_daemon_request = True @@ -68,6 +71,7 @@ def wait_for_pid(pid): time.sleep(0.25) def start_daemon(): + print("puppeteer script: start_daemon called" ) global mina_process with open('mina.log', 'a') as f: mina_process = subprocess.Popen( @@ -75,16 +79,23 @@ def start_daemon(): stdout=f, stderr=subprocess.STDOUT ) + print("puppeteer script: touching /root/daemon-active" ) Path('daemon-active').touch() def stop_daemon(): + print("puppeteer script: stop_daemon called" ) global mina_process mina_process.send_signal(signal.SIGTERM) child_pids = get_child_processes(mina_process.pid) + print("stop_daemon, child_pids: " ) + print(*child_pids) mina_process.wait() for child_pid in child_pids: + print("waiting for child_pid: " + str(child_pid) ) wait_for_pid(child_pid) + print("done waiting for: " + str(child_pid) ) + print("puppeteer script: removing /root/daemon-active" ) Path('daemon-active').unlink() mina_process = None @@ -92,31 +103,41 @@ def stop_daemon(): # however, you would need to do a lot of starts and stops to hit this condition def inactive_loop(): + print("puppeteer script: inactive_loop beginning" ) global active_daemon_request - + server = None try: server = HTTPServer(('0.0.0.0', 3085), MockRequestHandler) while True: server.handle_request() signal.sigtimedwait(ALL_SIGNALS, 0) if active_daemon_request: + print("inactive_loop: active_daemon_request received, starting daemon" ) start_daemon() active_daemon_request = False break + except Exception as err: + print("puppeteer script: inactive_loop experienced an error: ") + print(err) finally: - server.shutdown() - + if server != None: + server.server_close() + print("puppeteer script: mock server closed. inactive_loop terminating" ) + active_loop() def active_loop(): + print("puppeteer script: active_loop beginning" ) global mina_process, inactive_daemon_request while True: signal.pause() status = mina_process.poll() if status != None: + print("active_loop: status not None, cleaning up and exiting") cleanup_and_exit(status) elif inactive_daemon_request: + print("active_loop: inactive daemon request detected, stopping daemon") stop_daemon() inactive_daemon_request = False break @@ -130,6 +151,7 @@ def cleanup_and_exit(status): sys.exit(status) if __name__ == '__main__': + print("puppeteer script: starting...") signal.signal(signal.SIGCHLD, handle_child_termination) signal.signal(signal.SIGUSR1, handle_stop_request) signal.signal(signal.SIGUSR2, handle_start_request) @@ -145,4 +167,4 @@ def cleanup_and_exit(status): ['tail', '-q', '-f', 'mina.log', '-f', '.mina-config/mina-prover.log', '-f', '.mina-config/mina-verifier.log', '-f' , '.mina-config/mina-best-tip.log'] ) - inactive_loop() + inactive_loop() \ No newline at end of file diff --git a/dockerfiles/puppeteer-context/start.sh b/dockerfiles/puppeteer-context/start.sh new file mode 100644 index 00000000000..609157c17e1 --- /dev/null +++ b/dockerfiles/puppeteer-context/start.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +kill -s SIGUSR2 $(/find_puppeteer.sh) +while [ ! -f /root/daemon-active ]; do sleep 1; done diff --git a/dockerfiles/scripts/puppeteer/stop.sh b/dockerfiles/puppeteer-context/stop.sh similarity index 50% rename from dockerfiles/scripts/puppeteer/stop.sh rename to dockerfiles/puppeteer-context/stop.sh index dc79d327237..a430fd00569 100644 --- a/dockerfiles/scripts/puppeteer/stop.sh +++ b/dockerfiles/puppeteer-context/stop.sh @@ -1,4 +1,4 @@ #!/bin/bash kill -s SIGUSR1 $(/find_puppeteer.sh) -while [ -f daemon-active ]; do sleep 1; done \ No newline at end of file +while [ -f /root/daemon-active ]; do sleep 1; done diff --git a/dockerfiles/scripts/cron_job_dump_ledger.sh b/dockerfiles/scripts/cron_job_dump_ledger.sh index ccd707006f1..f34abcd65b9 100755 --- a/dockerfiles/scripts/cron_job_dump_ledger.sh +++ b/dockerfiles/scripts/cron_job_dump_ledger.sh @@ -38,23 +38,23 @@ done mina ledger export next-epoch-ledger > next_epoch_ledger.json echo "next epoch ledger dumped!" -# DATE="$(date +%F_%H%M)" +DATE="$(date +%F_%H%M)" #extract the epoch number out of mina client status. if the output format of mina client status changes, then this is gonna break EPOCHNUM="$(mina client status | grep "Best tip consensus time" | grep -o "epoch=[0-9]*" | sed "s/[^0-9]*//g" )" # rename the file in the required file name format STAKING_HASH="$(mina ledger hash --ledger-file staking_epoch_ledger.json)" STAKING_MD5="$(md5sum staking_epoch_ledger.json | cut -d " " -f 1 )" -LEDGER_FILENAME=staking-"$EPOCHNUM"-"$STAKING_HASH"-"$STAKING_MD5".json +LEDGER_FILENAME=staking-"$EPOCHNUM"-"$STAKING_HASH"-"$STAKING_MD5"-"$DATE".json mv ./staking_epoch_ledger.json ./$LEDGER_FILENAME NEXT_STAKING_HASH="$(mina ledger hash --ledger-file next_epoch_ledger.json)" NEXT_STAKING_MD5="$(md5sum next_epoch_ledger.json | cut -d " " -f 1 )" -NEXT_FILENAME=next-staking-"$EPOCHNUM"-"$NEXT_STAKING_HASH"-"$NEXT_STAKING_MD5".json +NEXT_FILENAME=next-staking-"$EPOCHNUM"-"$NEXT_STAKING_HASH"-"$NEXT_STAKING_MD5"-"$DATE".json mv ./next_epoch_ledger.json ./$NEXT_FILENAME EXPORTED_LOGS="local-logs" -LOGS_FILENAME="daemon-logs-epoch-$EPOCHNUM.tgz" +LOGS_FILENAME="daemon-logs-epoch-$EPOCHNUM-"$DATE".tgz" mina client export-local-logs --tarfile $EXPORTED_LOGS mv /root/.mina-config/exported_logs/$EXPORTED_LOGS.tar.gz $LOGS_FILENAME diff --git a/dockerfiles/scripts/puppeteer/mina_daemon_puppeteer.py b/dockerfiles/scripts/puppeteer/mina_daemon_puppeteer.py deleted file mode 100644 index 60db225450d..00000000000 --- a/dockerfiles/scripts/puppeteer/mina_daemon_puppeteer.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env python3 - -# This is a temporary hack for the integration test framework to be able to stop -# and start nodes dyamically in a kubernetes environment. This script takes -# mina arguments and will start and monitor a mina process with those arguments. -# If a SIGUSR1 signal is sent, it will stop this process, and if a SIGUSR2 is -# sent, it will resume the process. Since this script is a hack, there are some -# shortcomings of the script. Most notably: -# - the script will stack overflow after a lot of restarts are issued -# - the script does not attempt to handle errors from the tail child process - -import os -from pathlib import Path -import signal -import subprocess -import sys -import time -from socketserver import TCPServer -from http.server import HTTPServer, BaseHTTPRequestHandler - -# all signals handled by this program -ALL_SIGNALS = [signal.SIGCHLD, signal.SIGUSR1, signal.SIGUSR2] - -active_daemon_request = False -inactive_daemon_request = False -tail_process = None -mina_process = None -daemon_args = sys.argv[1:] if len(sys.argv) > 1 else [] - -TCPServer.allow_reuse_address = True -HTTPServer.timeout = 1 - -class MockRequestHandler(BaseHTTPRequestHandler): - def do_GET(s): - s.send_response(200) - s.send_header('Content-Type', 'text/html') - s.end_headers() - s.wfile.write(b'The daemon is currently offline.
This broadcast was brought to you by the puppeteer mock server') - -# just nooping on this signal suffices, since merely trapping it will cause -# `signal.pause()` to resume -def handle_child_termination(signum, frame): - pass - -def handle_start_request(signum, frame): - global active_daemon_request - active_daemon_request = True - -def handle_stop_request(signum, frame): - global inactive_daemon_request - inactive_daemon_request = True - -def get_child_processes(pid): - result = subprocess.run( - ['ps', '-o', 'pid=', '--ppid', str(pid)], - stdout=subprocess.PIPE - ) - output = result.stdout.decode('ascii') - return list(map(int, filter(lambda s: len(s) > 0, output.split(' ')))) - -def pid_is_running(pid): - try: - os.kill(pid, 0) - except ProcessLookupError: - return False - return True - -def wait_for_pid(pid): - while pid_is_running(pid): - time.sleep(0.25) - -def start_daemon(): - global mina_process - with open('mina.log', 'a') as f: - mina_process = subprocess.Popen( - ['mina'] + daemon_args, - stdout=f, - stderr=subprocess.STDOUT - ) - Path('daemon-active').touch() - -def stop_daemon(): - global mina_process - mina_process.send_signal(signal.SIGTERM) - - child_pids = get_child_processes(mina_process.pid) - mina_process.wait() - for child_pid in child_pids: - wait_for_pid(child_pid) - Path('daemon-active').unlink() - mina_process = None - -# technically, doing the loops like this will eventually result in a stack overflow -# however, you would need to do a lot of starts and stops to hit this condition - -def inactive_loop(): - global active_daemon_request - server = None - try: - server = HTTPServer(('0.0.0.0', 3085), MockRequestHandler) - while True: - server.handle_request() - signal.sigtimedwait(ALL_SIGNALS, 0) - if active_daemon_request: - start_daemon() - active_daemon_request = False - break - except Exception as err: - print("inactive_loop experienced an error: ") - print(err) - finally: - if server != None: - server.shutdown() - - active_loop() - -def active_loop(): - global mina_process, inactive_daemon_request - - while True: - signal.pause() - status = mina_process.poll() - if status != None: - cleanup_and_exit(status) - elif inactive_daemon_request: - stop_daemon() - inactive_daemon_request = False - break - - inactive_loop() - -def cleanup_and_exit(status): - time.sleep(5) - tail_process.terminate() - tail_process.wait() - sys.exit(status) - -if __name__ == '__main__': - signal.signal(signal.SIGCHLD, handle_child_termination) - signal.signal(signal.SIGUSR1, handle_stop_request) - signal.signal(signal.SIGUSR2, handle_start_request) - - Path('.mina-config').mkdir(exist_ok=True) - Path('mina.log').touch() - Path('.mina-config/mina-prover.log').touch() - Path('.mina-config/mina-verifier.log').touch() - Path('.mina-config/mina-best-tip.log').touch() - - # currently does not handle tail process dying - tail_process = subprocess.Popen( - ['tail', '-q', '-f', 'mina.log', '-f', '.mina-config/mina-prover.log', '-f', '.mina-config/mina-verifier.log', '-f' , '.mina-config/mina-best-tip.log'] - ) - - inactive_loop() \ No newline at end of file diff --git a/dockerfiles/scripts/puppeteer/start.sh b/dockerfiles/scripts/puppeteer/start.sh deleted file mode 100644 index 9c391b204a9..00000000000 --- a/dockerfiles/scripts/puppeteer/start.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -kill -s SIGUSR2 $(/find_puppeteer.sh) -while [ ! -f daemon-active ]; do sleep 1; done \ No newline at end of file diff --git a/helm/staking-ledger-cron/mainnet-dump-staking-ledger-cronjob.yaml b/helm/staking-ledger-cron/mainnet-dump-staking-ledger-cronjob.yaml index 9992edd5984..c43c94787d0 100644 --- a/helm/staking-ledger-cron/mainnet-dump-staking-ledger-cronjob.yaml +++ b/helm/staking-ledger-cron/mainnet-dump-staking-ledger-cronjob.yaml @@ -38,16 +38,17 @@ spec: done; mina ledger export next-epoch-ledger>next_epoch_ledger.json; echo "next epoch ledger dumped!"; + DATE="$(date +%F_%H%M)" EPOCHNUM="$(mina client status|grep "Best tip consensus time"|grep -o "epoch=[0-9]*"|sed "s/[^0-9]*//g")"; STAKING_HASH="$(mina ledger hash --ledger-file staking_epoch_ledger.json)"; STAKING_MD5="$(md5sum staking_epoch_ledger.json|cut -d " " -f 1)"; - LEDGER_FILENAME=staking-"$EPOCHNUM"-"$STAKING_HASH"-"$STAKING_MD5".json; + LEDGER_FILENAME=staking-"$EPOCHNUM"-"$STAKING_HASH"-"$STAKING_MD5"-"$DATE".json; mv ./staking_epoch_ledger.json ./$LEDGER_FILENAME;NEXT_STAKING_HASH="$(mina ledger hash --ledger-file next_epoch_ledger.json)"; NEXT_STAKING_MD5="$(md5sum next_epoch_ledger.json|cut -d " " -f 1)"; - NEXT_FILENAME=next-staking-"$EPOCHNUM"-"$NEXT_STAKING_HASH"-"$NEXT_STAKING_MD5".json; + NEXT_FILENAME=next-staking-"$EPOCHNUM"-"$NEXT_STAKING_HASH"-"$NEXT_STAKING_MD5"-"$DATE".json; mv ./next_epoch_ledger.json ./$NEXT_FILENAME; EXPORTED_LOGS="local-logs"; - LOGS_FILENAME="daemon-logs-epoch-$EPOCHNUM.tgz"; + LOGS_FILENAME="daemon-logs-epoch-$EPOCHNUM-"$DATE".tgz"; mina client export-local-logs --tarfile $EXPORTED_LOGS; mv /root/.mina-config/exported_logs/$EXPORTED_LOGS.tar.gz $LOGS_FILENAME; echo "upload to a GCP cloud storage bucket"; diff --git a/src/.ocamlformat b/src/.ocamlformat new file mode 100644 index 00000000000..9a9f9a6031c --- /dev/null +++ b/src/.ocamlformat @@ -0,0 +1,66 @@ +profile=ocamlformat +quiet=false +max-iters=10 +comment-check=true +wrap-fun-args=true +wrap-comments=false +type-decl-indent=2 +type-decl=compact +stritem-extension-indent=0 +space-around-variants=true +space-around-records=true +space-around-lists=true +space-around-arrays=true +single-case=compact +sequence-style=separator +sequence-blank-line=preserve-one +parse-docstrings=false +parens-tuple-patterns=multi-line-only +parens-tuple=always +parens-ite=false +ocp-indent-compat=false +nested-match=wrap +module-item-spacing=sparse +max-indent=68 +match-indent-nested=never +match-indent=0 +margin=80 +let-open=preserve +let-module=compact +let-binding-spacing=compact +let-binding-indent=2 +let-and=compact +leading-nested-match-parens=false +infix-precedence=indent +indicate-nested-or-patterns=space +indicate-multiline-delimiters=space +indent-after-in=0 +if-then-else=compact +function-indent-nested=never +function-indent=2 +field-space=loose +extension-indent=2 +exp-grouping=parens +dock-collection-brackets=false +doc-comments-tag-only=default +doc-comments-padding=2 +doc-comments=before +disambiguate-non-breaking-match=false +disable=false +cases-matching-exp-indent=normal +cases-exp-indent=4 +break-struct=force +break-string-literals=auto +break-sequences=false +break-separators=before +break-infix-before-func=true +break-infix=wrap +break-fun-sig=wrap +break-fun-decl=wrap +break-collection-expressions=fit-or-vertical +break-cases=nested +break-before-in=fit-or-vertical +assignment-operator=end-line +align-variants-decl=false +align-constructors-decl=false +align-cases=false diff --git a/src/app/archive_blocks/archive_blocks.ml b/src/app/archive_blocks/archive_blocks.ml index 6e926ae363d..f8e9ed969e2 100644 --- a/src/app/archive_blocks/archive_blocks.ml +++ b/src/app/archive_blocks/archive_blocks.ml @@ -10,7 +10,7 @@ let main ~archive_uri ~precomputed ~extensional ~success_file ~failure_file match path with | Some path -> let file = Out_channel.create ~append:true path in - fun line -> Out_channel.output_lines file [line] + fun line -> Out_channel.output_lines file [ line ] | None -> fun _line -> () in @@ -23,7 +23,7 @@ let main ~archive_uri ~precomputed ~extensional ~success_file ~failure_file match Caqti_async.connect_pool archive_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti connection to Postgresql" ; exit 1 | Ok pool -> @@ -34,17 +34,18 @@ let main ~archive_uri ~precomputed ~extensional ~success_file ~failure_file match%map add_block_aux block with | Ok () -> if log_successes then - [%log info] "Added block" ~metadata:[("file", `String file)] ; + [%log info] "Added block" ~metadata:[ ("file", `String file) ] ; add_to_success_file file | Error err -> [%log error] "Error when adding block" ~metadata: [ ("file", `String file) - ; ("error", `String (Caqti_error.show err)) ] ; + ; ("error", `String (Caqti_error.show err)) + ] ; add_to_failure_file file ) | Error err -> [%log error] "Could not create block from JSON" - ~metadata:[("file", `String file); ("error", `String err)] ; + ~metadata:[ ("file", `String file); ("error", `String err) ] ; return (add_to_failure_file file) in let add_precomputed_block = @@ -70,15 +71,16 @@ let main ~archive_uri ~precomputed ~extensional ~success_file ~failure_file with | Yojson.Json_error err -> [%log error] "Could not parse JSON from file" - ~metadata:[("file", `String file); ("error", `String err)] ; + ~metadata:[ ("file", `String file); ("error", `String err) ] ; return (add_to_failure_file file) | exn -> (* should be unreachable *) [%log error] "Internal error when processing file" ~metadata: [ ("file", `String file) - ; ("error", `String (Exn.to_string exn)) ] ; - return (add_to_failure_file file) ) ) + ; ("error", `String (Exn.to_string exn)) + ] ; + return (add_to_failure_file file))) let () = Command.( @@ -86,32 +88,32 @@ let () = (let open Let_syntax in async ~summary:"Write blocks to an archive database" (let%map archive_uri = - Param.flag "--archive-uri" ~aliases:["archive-uri"] + Param.flag "--archive-uri" ~aliases:[ "archive-uri" ] ~doc: "URI URI for connecting to the archive database (e.g., \ postgres://$USER@localhost:5432/archiver)" Param.(required string) and precomputed = - Param.(flag "--precomputed" ~aliases:["precomputed"] no_arg) + Param.(flag "--precomputed" ~aliases:[ "precomputed" ] no_arg) ~doc:"Blocks are in precomputed format" and extensional = - Param.(flag "--extensional" ~aliases:["extensional"] no_arg) + Param.(flag "--extensional" ~aliases:[ "extensional" ] no_arg) ~doc:"Blocks are in extensional format" and success_file = - Param.flag "--successful-files" ~aliases:["successful-files"] + Param.flag "--successful-files" ~aliases:[ "successful-files" ] ~doc: "PATH Appends the list of files that were processed successfully" (Flag.optional Param.string) and failure_file = - Param.flag "--failed-files" ~aliases:["failed-files"] + Param.flag "--failed-files" ~aliases:[ "failed-files" ] ~doc:"PATH Appends the list of files that failed to be processed" (Flag.optional Param.string) and log_successes = - Param.flag "--log-successful" ~aliases:["log-successful"] + Param.flag "--log-successful" ~aliases:[ "log-successful" ] ~doc: "true/false Whether to log messages for files that were \ processed successfully" (Flag.optional_with_default true Param.bool) and files = Param.anon Anons.(sequence ("FILES" %: Param.string)) in - main ~archive_uri ~precomputed ~extensional ~success_file - ~failure_file ~log_successes ~files))) + main ~archive_uri ~precomputed ~extensional ~success_file ~failure_file + ~log_successes ~files))) diff --git a/src/app/best_tip_merger/best_tip_merger.ml b/src/app/best_tip_merger/best_tip_merger.ml index 475244962a5..06c769b4642 100644 --- a/src/app/best_tip_merger/best_tip_merger.ml +++ b/src/app/best_tip_merger/best_tip_merger.ml @@ -7,8 +7,9 @@ open Mina_base module Node = struct module T = struct type t = - { state: Transition_frontier.Extensions.Best_tip_diff.Log_event.t - ; peer_ids: String.Set.t } + { state : Transition_frontier.Extensions.Best_tip_diff.Log_event.t + ; peer_ids : String.Set.t + } [@@deriving sexp, compare] end @@ -22,11 +23,12 @@ module Input = struct * peers: set of peers whose logs were processed * seen_state_hashes: map of states that were obtained from the logs. Used to keep the roots updated*) type t = - { all_states: (State_hash.t, Node.t State_hash.Map.t) Hashtbl.t - ; init_states: (State_hash.t, Node.t) Hashtbl.t + { all_states : (State_hash.t, Node.t State_hash.Map.t) Hashtbl.t + ; init_states : (State_hash.t, Node.t) Hashtbl.t (*generate from seen state hashes later on*) - ; peers: String.Set.t - ; seen_state_hashes: State_hash.Set.t } + ; peers : String.Set.t + ; seen_state_hashes : State_hash.Set.t + } type added_transitions = Transition_frontier.Extensions.Best_tip_diff.Log_event.t list @@ -43,7 +45,7 @@ module Input = struct Option.map msg.event_id ~f:(fun e -> Structured_log_events.equal_id e Transition_frontier.Extensions.Best_tip_diff.Log_event - .new_best_tip_event_structured_events_id ) + .new_best_tip_event_structured_events_id) in match tf_event_id with | Some true -> @@ -63,8 +65,9 @@ module Input = struct let acc' = List.fold ~init:acc added_transitions ~f:(fun acc'' tr -> let new_node = - { Node.state= tr - ; peer_ids= String.Set.singleton peer_id } + { Node.state = tr + ; peer_ids = String.Set.singleton peer_id + } in let parent_hash = Mina_state.Protocol_state.previous_state_hash @@ -83,24 +86,26 @@ module Input = struct | None -> new_node | Some node -> - { state= node.state - ; peer_ids= Set.add node.peer_ids peer_id } ) ; + { state = node.state + ; peer_ids = Set.add node.peer_ids peer_id + }) ; seen_state_hashes in Hashtbl.update t.all_states parent_hash ~f:(function | None -> State_hash.Map.singleton new_state_hash new_node | Some map -> ( - match Map.find map new_state_hash with - | None -> - Map.add_exn map ~key:new_state_hash - ~data:new_node - | Some {state; peer_ids} -> - Map.set map ~key:new_state_hash - ~data: - {state; peer_ids= Set.add peer_ids peer_id} - ) ) ; - {acc'' with seen_state_hashes} ) + match Map.find map new_state_hash with + | None -> + Map.add_exn map ~key:new_state_hash + ~data:new_node + | Some { state; peer_ids } -> + Map.set map ~key:new_state_hash + ~data: + { state + ; peer_ids = Set.add peer_ids peer_id + } )) ; + { acc'' with seen_state_hashes }) in (* remove any previous roots for which there are ancestors now*) List.iter (Hashtbl.keys acc'.init_states) ~f:(fun root -> @@ -110,18 +115,18 @@ module Input = struct in if State_hash.Set.mem acc'.seen_state_hashes parent then (* no longer a root because a node for its parent was seen*) - Hashtbl.remove acc'.init_states root ) ; - {acc' with peers} + Hashtbl.remove acc'.init_states root) ; + { acc' with peers } | None | Some false -> [%log error] "Skipping log line $line because it is not a \ best-tip-change log" - ~metadata:[("line", `String line)] ; + ~metadata:[ ("line", `String line) ] ; (*skipping any other logs*) acc ) | Error err -> [%log error] "Could not process log line $line: $error" - ~metadata:[("line", `String line); ("error", `String err)] ; - acc ) + ~metadata:[ ("line", `String line); ("error", `String err) ] ; + acc) in [%log info] "Finished processing log file: %s" log_file ; res @@ -130,7 +135,7 @@ end (*Output is a rose tree and consists of all the forks seen from an initial state; Multiple rose trees is there are logs with different initial states*) module Output = struct type node = - | Root of {state: State_hash.t; peer_ids: String.Set.t} + | Root of { state : State_hash.t; peer_ids : String.Set.t } | Node of Node.t type t = node Rose_tree.t list @@ -145,7 +150,7 @@ module Output = struct | Some peer_ids -> Set.union peer_ids root_state.peer_ids | None -> - root_state.peer_ids ) ) + root_state.peer_ids)) in List.fold ~init:[] (Map.to_alist roots) ~f:(fun acc_trees (root, peer_ids) -> @@ -158,16 +163,18 @@ module Output = struct let successors_with_min_peers = if min_peers > 1 then List.filter successors ~f:(fun s -> - Set.length s.peer_ids >= min_peers ) + Set.length s.peer_ids >= min_peers) else successors in List.map successors_with_min_peers ~f:(fun s -> Rose_tree.T - ( Node {state= s.state; peer_ids= s.peer_ids} - , go s.state.state_hash ) ) + ( Node { state = s.state; peer_ids = s.peer_ids } + , go s.state.state_hash )) + in + let root_node = + Rose_tree.T (Root { state = root; peer_ids }, go root) in - let root_node = Rose_tree.T (Root {state= root; peer_ids}, go root) in - root_node :: acc_trees ) + root_node :: acc_trees) end module type Graph_node_intf = sig @@ -192,7 +199,7 @@ module Display = struct | Node of Transition_frontier.Extensions.Best_tip_diff.Log_event.t [@@deriving yojson] - type node = {state: state; peers: int} [@@deriving yojson] + type node = { state : state; peers : int } [@@deriving yojson] type t = node Rose_tree.t list [@@deriving yojson] @@ -202,22 +209,23 @@ module Display = struct Rose_tree.map tree ~f:(fun (t : Output.node) -> match t with | Root s -> - {state= Root s.state; peers= Set.length s.peer_ids} + { state = Root s.state; peers = Set.length s.peer_ids } | Node s -> - {state= Node s.state; peers= Set.length s.peer_ids} ) ) + { state = Node s.state; peers = Set.length s.peer_ids })) end module Compact_display = struct type state = | Root of State_hash.t | Node of - { current: State_hash.t - ; parent: State_hash.t - ; blockchain_length: Mina_numbers.Length.t - ; global_slot: Mina_numbers.Global_slot.t } + { current : State_hash.t + ; parent : State_hash.t + ; blockchain_length : Mina_numbers.Length.t + ; global_slot : Mina_numbers.Global_slot.t + } [@@deriving yojson] - type node = {state: state; peers: int} [@@deriving yojson] + type node = { state : state; peers : int } [@@deriving yojson] type t = node Rose_tree.t list [@@deriving yojson] @@ -226,36 +234,38 @@ module Compact_display = struct Rose_tree.map tree ~f:(fun (t : Output.node) -> match t with | Root s -> - {state= Root s.state; peers= Set.length s.peer_ids} + { state = Root s.state; peers = Set.length s.peer_ids } | Node t -> let state : state = Node - { current= t.state.state_hash - ; parent= t.state.protocol_state.previous_state_hash - ; blockchain_length= + { current = t.state.state_hash + ; parent = t.state.protocol_state.previous_state_hash + ; blockchain_length = Mina_state.Protocol_state.consensus_state t.state.protocol_state |> Consensus.Data.Consensus_state.blockchain_length - ; global_slot= + ; global_slot = Mina_state.Protocol_state.consensus_state t.state.protocol_state - |> Consensus.Data.Consensus_state.curr_global_slot } + |> Consensus.Data.Consensus_state.curr_global_slot + } in - {state; peers= Set.length t.peer_ids} ) ) + { state; peers = Set.length t.peer_ids })) end module Graph_node = struct type state = | Root of State_hash.t | Node of - { current: State_hash.t - ; length: Mina_numbers.Length.t - ; slot: Mina_numbers.Global_slot.t } + { current : State_hash.t + ; length : Mina_numbers.Length.t + ; slot : Mina_numbers.Global_slot.t + } [@@deriving yojson, equal, hash] - type t = {state: state; peers: int} [@@deriving yojson, equal, hash] + type t = { state : state; peers : int } [@@deriving yojson, equal, hash] - type display = {state: string; length: string; slot: string; peers: int} + type display = { state : string; length : string; slot : string; peers : int } [@@deriving yojson] let name (t : t) = @@ -275,7 +285,7 @@ module Graph_node = struct ( Mina_numbers.Length.to_string s.length , Mina_numbers.Global_slot.to_string s.slot ) in - {state; slot; length; peers= t.peers} + { state; slot; length; peers = t.peers } let compare (t : t) (t' : t) = let state_hash = function Root s -> s | Node s -> s.current in @@ -293,11 +303,12 @@ module Visualization = struct Graph_node.Root s | Node s -> Node - { current= s.current - ; length= s.blockchain_length - ; slot= s.global_slot } + { current = s.current + ; length = s.blockchain_length + ; slot = s.global_slot + } in - {Graph_node.state; peers= node.peers} + { Graph_node.state; peers = node.peers } in let rec go (Rose_tree.T (node, subtrees)) graph = let node = to_graph_node node in @@ -305,7 +316,7 @@ module Visualization = struct List.fold ~init:graph_with_node subtrees ~f:(fun gr (T (child_node, _) as child_tree) -> let gr' = add_edge gr node (to_graph_node child_node) in - go child_tree gr' ) + go child_tree gr') in go t empty @@ -314,7 +325,7 @@ module Visualization = struct let filename = output_dir ^/ "tree_" ^ Int.to_string i ^ ".dot" in Out_channel.with_file filename ~f:(fun output_channel -> let graph = to_graph tree in - output_graph output_channel graph ) ) + output_graph output_channel graph)) end let main ~input_dir ~output_dir ~output_format ~min_peers () = @@ -322,13 +333,14 @@ let main ~input_dir ~output_dir ~output_format ~min_peers () = Sys.ls_dir input_dir >>| List.filter_map ~f:(fun n -> if Filename.check_suffix n ".log" then Some (input_dir ^/ n) - else None ) + else None) in let t : Input.t = - { Input.all_states= Hashtbl.create (module State_hash) - ; peers= String.Set.empty - ; init_states= Hashtbl.create (module State_hash) - ; seen_state_hashes= State_hash.Set.empty } + { Input.all_states = Hashtbl.create (module State_hash) + ; peers = String.Set.empty + ; init_states = Hashtbl.create (module State_hash) + ; seen_state_hashes = State_hash.Set.empty + } in let logrotate_max_size = 1024 * 1024 * 1 in let logrotate_num_rotate = 1 in @@ -341,7 +353,7 @@ let main ~input_dir ~output_dir ~output_format ~min_peers () = let logger = Logger.create () in let t' = List.fold ~init:t files ~f:(fun t log_file -> - Input.of_logs ~logger ~log_file t ) + Input.of_logs ~logger ~log_file t) in [%log info] "Consolidating best-tip history.." ; let output = Output.of_input t' ~min_peers in @@ -370,16 +382,16 @@ let () = "Consolidates best tip history from multiple log files into a rose \ tree representation" (let%map input_dir = - Param.flag "--input-dir" ~aliases:["-input-dir"] + Param.flag "--input-dir" ~aliases:[ "-input-dir" ] ~doc: "PATH Directory containing one or more mina-best-tip.log files" Param.(required string) and output_dir = - Param.flag "--output-dir" ~aliases:["-output-dir"] + Param.flag "--output-dir" ~aliases:[ "-output-dir" ] ~doc:"PATH Directory to save the output" Param.(required string) and output_format = - Param.flag "--output-format" ~aliases:["-output-format"] + Param.flag "--output-format" ~aliases:[ "-output-format" ] ~doc: "Full|Compact Information shown for each block. Full= Protocol \ state and Compact= Current state hash, previous state hash, \ @@ -388,7 +400,7 @@ let () = and log_json = Cli_lib.Flag.Log.json and log_level = Cli_lib.Flag.Log.level and min_peers = - Param.flag "--min-peers" ~aliases:["-min-peers"] + Param.flag "--min-peers" ~aliases:[ "-min-peers" ] ~doc: "Int(>0) Keep blocks that were accepted by at least min-peers \ number of peers and prune the rest (Default=1)" diff --git a/src/app/client_sdk/client_sdk.ml b/src/app/client_sdk/client_sdk.ml index 85b757bbbbb..3a19acc6985 100644 --- a/src/app/client_sdk/client_sdk.ml +++ b/src/app/client_sdk/client_sdk.ml @@ -1,13 +1,10 @@ (* client_sdk.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] -[%%error -"Client SDK cannot be built if \"consensus_mechanism\" is defined"] +[%%error "Client SDK cannot be built if \"consensus_mechanism\" is defined"] [%%endif] @@ -84,8 +81,7 @@ let _ = Mina_base_nonconsensus.Signed_command_payload.dummy in let signature = - Mina_base_nonconsensus.Signed_command.sign_payload sk - dummy_payload + Mina_base_nonconsensus.Signed_command.sign_payload sk dummy_payload in let message = Mina_base_nonconsensus.Signed_command.to_input dummy_payload @@ -99,8 +95,7 @@ let _ = else raise_js_error "Could not sign a transaction with private key" (** sign arbitrary string with private key *) - method signString (sk_base58_check_js : string_js) (str_js : string_js) - = + method signString (sk_base58_check_js : string_js) (str_js : string_js) = let sk_base58_check = Js.to_string sk_base58_check_js in let sk = Private_key.of_base58_check_exn sk_base58_check in let str = Js.to_string str_js in @@ -119,9 +114,7 @@ let _ = |> Public_key.Compressed.of_base58_check_exn |> Public_key.decompress_exn in - let inner_curve = - Snark_params_nonconsensus.Inner_curve.of_affine pk - in + let inner_curve = Snark_params_nonconsensus.Inner_curve.of_affine pk in let str = Js.to_string str_js in if String_sign.Schnorr.verify signature inner_curve str then Js._true else Js._false @@ -156,13 +149,13 @@ let _ = |> Public_key.decompress_exn in let signature = signature_of_js_object signed_payment##.signature in - let signed = Signed_command.Poly.{payload; signer; signature} in + let signed = Signed_command.Poly.{ payload; signer; signature } in if Signed_command.check_signature signed then Js._true else Js._false (** sign payment transaction payload with private key *) method signStakeDelegation (sk_base58_check_js : string_js) - (stake_delegation_js : stake_delegation_js) - : signed_stake_delegation = + (stake_delegation_js : stake_delegation_js) : signed_stake_delegation + = let sk_base58_check = Js.to_string sk_base58_check_js in let sk = Private_key.of_base58_check_exn sk_base58_check in let payload = payload_of_stake_delegation_js stake_delegation_js in @@ -180,7 +173,7 @@ let _ = (** verify signed delegations *) method verifyStakeDelegationSignature - (signed_stake_delegation : signed_stake_delegation) : bool Js.t = + (signed_stake_delegation : signed_stake_delegation) : bool Js.t = let payload : Signed_command_payload.t = payload_of_stake_delegation_js signed_stake_delegation##.stakeDelegation @@ -193,7 +186,7 @@ let _ = let signature = signature_of_js_object signed_stake_delegation##.signature in - let signed = Signed_command.Poly.{payload; signer; signature} in + let signed = Signed_command.Poly.{ payload; signer; signature } in if Signed_command.check_signature signed then Js._true else Js._false (** sign a transaction in Rosetta rendered format *) @@ -205,7 +198,7 @@ let _ = Js.to_string unsignedRosettaTxn |> Yojson.Safe.from_string in let make_error err = - let json = `Assoc [("error", `String err)] in + let json = `Assoc [ ("error", `String err) ] in Js.string (Yojson.Safe.to_string json) in let make_signed_transaction command nonce = @@ -220,12 +213,12 @@ let _ = Signed_command.sign_payload sk payload |> Signature.Raw.encode in let signed_txn = - Transaction.Signed.{command; nonce; signature} + Transaction.Signed.{ command; nonce; signature } in match Transaction.Signed.render signed_txn with | Ok signed -> let json = Transaction.Signed.Rendered.to_yojson signed in - let json' = `Assoc [("data", json)] in + let json' = `Assoc [ ("data", json) ] in Js.string (Yojson.Safe.to_string json') | Error errs -> make_error (Rosetta_lib_nonconsensus.Errors.show errs) ) @@ -234,21 +227,23 @@ let _ = in match Transaction.Unsigned.Rendered.of_yojson unsigned_txn_json with | Ok - { random_oracle_input= _ - ; payment= Some payment - ; stake_delegation= None - ; create_token= None - ; create_token_account= None - ; mint_tokens= None } -> + { random_oracle_input = _ + ; payment = Some payment + ; stake_delegation = None + ; create_token = None + ; create_token_account = None + ; mint_tokens = None + } -> let command = Transaction.Unsigned.of_rendered_payment payment in make_signed_transaction command payment.nonce | Ok - { random_oracle_input= _ - ; payment= None - ; stake_delegation= Some delegation - ; create_token= None - ; create_token_account= None - ; mint_tokens= None } -> + { random_oracle_input = _ + ; payment = None + ; stake_delegation = Some delegation + ; create_token = None + ; create_token_account = None + ; mint_tokens = None + } -> let command = Transaction.Unsigned.of_rendered_delegation delegation in @@ -261,7 +256,7 @@ let _ = make_error msg method signedRosettaTransactionToSignedCommand - (signedRosettaTxn : string_js) = + (signedRosettaTxn : string_js) = let signed_txn_json = Js.to_string signedRosettaTxn |> Yojson.Safe.from_string in @@ -269,7 +264,7 @@ let _ = match Transaction.to_mina_signed signed_txn_json with | Ok signed_cmd -> let cmd_json = Signed_command.to_yojson signed_cmd in - `Assoc [("data", cmd_json)] + `Assoc [ ("data", cmd_json) ] | Error err -> let open Core_kernel in let err_msg = @@ -277,7 +272,7 @@ let _ = "Could not parse JSON for signed Rosetta transaction: %s" (Error.to_string_hum err) in - `Assoc [("error", `String err_msg)] + `Assoc [ ("error", `String err_msg) ] in Js.string (Yojson.Safe.to_string result_json) diff --git a/src/app/client_sdk/js_util.ml b/src/app/client_sdk/js_util.ml index db0c473c04c..5a76d4eb4fb 100644 --- a/src/app/client_sdk/js_util.ml +++ b/src/app/client_sdk/js_util.ml @@ -14,16 +14,16 @@ let raise_js_error s = Js.raise_js_error (new%js Js.error_constr (Js.string s)) type string_js = Js.js_string Js.t type keypair_js = - < privateKey: string_js Js.readonly_prop - ; publicKey: string_js Js.readonly_prop > + < privateKey : string_js Js.readonly_prop + ; publicKey : string_js Js.readonly_prop > Js.t type payload_common_js = - < fee: string_js Js.prop - ; feePayer: string_js Js.prop - ; nonce: string_js Js.prop - ; validUntil: string_js Js.prop - ; memo: string_js Js.prop > + < fee : string_js Js.prop + ; feePayer : string_js Js.prop + ; nonce : string_js Js.prop + ; validUntil : string_js Js.prop + ; memo : string_js Js.prop > Js.t let payload_common_of_js (payload_common_js : payload_common_js) = @@ -41,17 +41,17 @@ let payload_common_of_js (payload_common_js : payload_common_js) = let memo_js = payload_common_js##.memo in let memo = Js.to_string memo_js |> Memo.create_from_string_exn in Signed_command_payload.Common.Poly. - {fee; fee_token; fee_payer_pk; nonce; valid_until; memo} + { fee; fee_token; fee_payer_pk; nonce; valid_until; memo } type payment_payload_js = - < source: string_js Js.prop - ; receiver: string_js Js.prop - ; amount: string_js Js.prop > + < source : string_js Js.prop + ; receiver : string_js Js.prop + ; amount : string_js Js.prop > Js.t type payment_js = - < common: payload_common_js Js.prop - ; paymentPayload: payment_payload_js Js.prop > + < common : payload_common_js Js.prop + ; paymentPayload : payment_payload_js Js.prop > Js.t let payment_body_of_js payment_payload = @@ -68,19 +68,19 @@ let payment_body_of_js payment_payload = payment_payload##.amount |> Js.to_string |> Currency.Amount.of_string in Signed_command_payload.Body.Payment - Payment_payload.Poly.{source_pk; receiver_pk; token_id; amount} + Payment_payload.Poly.{ source_pk; receiver_pk; token_id; amount } let payload_of_payment_js payment_js : Signed_command_payload.t = let common = payload_common_of_js payment_js##.common in let body = payment_body_of_js payment_js##.paymentPayload in - Signed_command_payload.Poly.{common; body} + Signed_command_payload.Poly.{ common; body } type stake_delegation_payload_js = - < delegator: string_js Js.prop ; newDelegate: string_js Js.prop > Js.t + < delegator : string_js Js.prop ; newDelegate : string_js Js.prop > Js.t type stake_delegation_js = - < common: payload_common_js Js.prop - ; delegationPayload: stake_delegation_payload_js Js.prop > + < common : payload_common_js Js.prop + ; delegationPayload : stake_delegation_payload_js Js.prop > Js.t let stake_delegation_body_of_js delegation_payload = @@ -93,15 +93,15 @@ let stake_delegation_body_of_js delegation_payload = |> Signature_lib.Public_key.of_base58_check_decompress_exn in Signed_command_payload.Body.Stake_delegation - (Set_delegate {delegator; new_delegate}) + (Set_delegate { delegator; new_delegate }) let payload_of_stake_delegation_js payment_js : Signed_command_payload.t = let common = payload_common_of_js payment_js##.common in let body = stake_delegation_body_of_js payment_js##.delegationPayload in - Signed_command_payload.Poly.{common; body} + Signed_command_payload.Poly.{ common; body } type signature_js = - < field: string_js Js.readonly_prop ; scalar: string_js Js.readonly_prop > + < field : string_js Js.readonly_prop ; scalar : string_js Js.readonly_prop > Js.t let signature_to_js_object ((field, scalar) : Signature.t) = @@ -119,13 +119,13 @@ let signature_of_js_object (signature_js : signature_js) : Signature.t = (field, scalar) type signed_payment = - < payment: payment_js Js.readonly_prop - ; sender: string_js Js.readonly_prop - ; signature: signature_js Js.readonly_prop > + < payment : payment_js Js.readonly_prop + ; sender : string_js Js.readonly_prop + ; signature : signature_js Js.readonly_prop > Js.t type signed_stake_delegation = - < stakeDelegation: stake_delegation_js Js.readonly_prop - ; sender: string_js Js.readonly_prop - ; signature: signature_js Js.readonly_prop > + < stakeDelegation : stake_delegation_js Js.readonly_prop + ; sender : string_js Js.readonly_prop + ; signature : signature_js Js.readonly_prop > Js.t diff --git a/src/app/client_sdk/string_sign.ml b/src/app/client_sdk/string_sign.ml index 2b4966138e4..f2ba36e5ea4 100644 --- a/src/app/client_sdk/string_sign.ml +++ b/src/app/client_sdk/string_sign.ml @@ -8,37 +8,37 @@ module Message = struct let nybble_bits = function | 0x0 -> - [false; false; false; false] + [ false; false; false; false ] | 0x1 -> - [false; false; false; true] + [ false; false; false; true ] | 0x2 -> - [false; false; true; false] + [ false; false; true; false ] | 0x3 -> - [false; false; true; true] + [ false; false; true; true ] | 0x4 -> - [false; true; false; false] + [ false; true; false; false ] | 0x5 -> - [false; true; false; true] + [ false; true; false; true ] | 0x6 -> - [false; true; true; false] + [ false; true; true; false ] | 0x7 -> - [false; true; true; true] + [ false; true; true; true ] | 0x8 -> - [true; false; false; false] + [ true; false; false; false ] | 0x9 -> - [true; false; false; true] + [ true; false; false; true ] | 0xA -> - [true; false; true; false] + [ true; false; true; false ] | 0xB -> - [true; false; true; true] + [ true; false; true; true ] | 0xC -> - [true; true; false; false] + [ true; true; false; false ] | 0xD -> - [true; true; false; true] + [ true; true; false; true ] | 0xE -> - [true; true; true; false] + [ true; true; true; false ] | 0xF -> - [true; true; true; true] + [ true; true; true; true ] | _ -> failwith "nybble_bits: expected value from 0 to 0xF" @@ -47,20 +47,21 @@ module Message = struct let n = Char.to_int c in let hi = Int.(shift_right (bit_and n 0xF0) 4) in let lo = Int.bit_and n 0x0F in - List.concat_map [hi; lo] ~f:nybble_bits + List.concat_map [ hi; lo ] ~f:nybble_bits let string_bits s = let open Core_kernel in List.(concat_map (String.to_list s) ~f:char_bits) let derive t ~private_key ~public_key:pk = - let pk_bits {Public_key.Compressed.Poly.x; is_odd} = + let pk_bits { Public_key.Compressed.Poly.x; is_odd } = is_odd :: Field.unpack x in List.concat [ Tock.Field.unpack private_key ; pk_bits (Public_key.compress (Inner_curve.to_affine_exn pk)) - ; string_bits t ] + ; string_bits t + ] |> Array.of_list |> Blake2.bits_to_string |> Blake2.digest_string |> Blake2.to_raw_string |> Blake2.string_to_bits |> Array.to_list |> Base.(Fn.flip List.take (Int.min 256 (Tock.Field.size_in_bits - 1))) @@ -69,14 +70,15 @@ module Message = struct let hash t ~public_key ~r = let string_to_input s = Random_oracle.Input. - { field_elements= [||] - ; bitstrings= - Stdlib.(Array.of_seq (Seq.map char_bits (String.to_seq s))) } + { field_elements = [||] + ; bitstrings = + Stdlib.(Array.of_seq (Seq.map char_bits (String.to_seq s))) + } in let input = let px, py = Inner_curve.to_affine_exn public_key in Random_oracle.Input.append (string_to_input t) - {field_elements= [|px; py; r|]; bitstrings= [||]} + { field_elements = [| px; py; r |]; bitstrings = [||] } in let open Random_oracle in hash ~init:Hash_prefix.signature (pack_input input) diff --git a/src/app/client_sdk/tests/test_signatures.ml b/src/app/client_sdk/tests/test_signatures.ml index 114b578b914..57802af9682 100644 --- a/src/app/client_sdk/tests/test_signatures.ml +++ b/src/app/client_sdk/tests/test_signatures.ml @@ -2,13 +2,11 @@ for comparison against signatures generated in client SDK *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -36,7 +34,7 @@ let keypair = "EKFKgDtU3rcuFTVSEpmpXSkukjmX4cKefYREi6Sdsk7E7wsT7KRw" in let public_key = Public_key.decompress_exn signer_pk in - Keypair.{public_key; private_key} + Keypair.{ public_key; private_key } (* payment receiver *) let receiver = @@ -55,7 +53,7 @@ let make_common ~fee ~fee_payer_pk ~nonce ~valid_until memo = let valid_until = Mina_numbers.Global_slot.of_int valid_until in let memo = Signed_command_memo.create_from_string_exn memo in Signed_command_payload.Common.Poly. - {fee; fee_token; fee_payer_pk; nonce; valid_until; memo} + { fee; fee_token; fee_payer_pk; nonce; valid_until; memo } let make_payment ~amount ~fee ~fee_payer_pk ~source_pk ~receiver_pk ~nonce ~valid_until memo = @@ -64,9 +62,9 @@ let make_payment ~amount ~fee ~fee_payer_pk ~source_pk ~receiver_pk ~nonce let token_id = Token_id.default in let body = Signed_command_payload.Body.Payment - {source_pk; receiver_pk; token_id; amount} + { source_pk; receiver_pk; token_id; amount } in - Signed_command_payload.Poly.{common; body} + Signed_command_payload.Poly.{ common; body } let payments = let receiver_pk = receiver in @@ -77,16 +75,17 @@ let payments = ; make_payment ~receiver_pk ~source_pk ~fee_payer_pk ~amount:2048 ~fee:15 ~nonce:212 ~valid_until:305 "this is not a pipe" ; make_payment ~receiver_pk ~source_pk ~fee_payer_pk ~amount:109 ~fee:2001 - ~nonce:3050 ~valid_until:9000 "blessed be the geek" ] + ~nonce:3050 ~valid_until:9000 "blessed be the geek" + ] let make_stake_delegation ~delegator ~new_delegate ~fee ~fee_payer_pk ~nonce ~valid_until memo = let common = make_common ~fee ~fee_payer_pk ~nonce ~valid_until memo in let body = Signed_command_payload.Body.Stake_delegation - (Stake_delegation.Set_delegate {delegator; new_delegate}) + (Stake_delegation.Set_delegate { delegator; new_delegate }) in - Signed_command_payload.Poly.{common; body} + Signed_command_payload.Poly.{ common; body } let delegations = let delegator = signer_pk in @@ -96,11 +95,12 @@ let delegations = ; make_stake_delegation ~fee_payer_pk ~delegator ~new_delegate ~fee:10 ~nonce:1000 ~valid_until:8192 "enough stake to kill a vampire" ; make_stake_delegation ~fee_payer_pk ~delegator ~new_delegate ~fee:8 - ~nonce:1010 ~valid_until:100000 "another memo" ] + ~nonce:1010 ~valid_until:100000 "another memo" + ] let transactions = payments @ delegations -type jsSignature = {privateKey: Field.t; publicKey: Inner_curve.Scalar.t} +type jsSignature = { privateKey : Field.t; publicKey : Inner_curve.Scalar.t } let get_signature payload = (Signed_command.sign keypair payload :> Signed_command.With_valid_signature.t) @@ -120,13 +120,13 @@ let main () = eprintf !"Signature (%d) failed to verify: %{sexp: Signed_command.t}\n%!" i signature ; - exit 1 ) ) ; + exit 1 )) ; printf "[\n" ; List.iter signatures ~f:(fun signature -> - let Signed_command.Poly.{signature= field, scalar; _} = + let Signed_command.Poly.{ signature = field, scalar; _ } = (signature :> Signed_command.t) in - print_signature field scalar ) ; + print_signature field scalar) ; printf "]\n" let _ = main () diff --git a/src/app/delegation_compliance/delegation_compliance.ml b/src/app/delegation_compliance/delegation_compliance.ml index cd9bc3af066..384f02b8823 100644 --- a/src/app/delegation_compliance/delegation_compliance.ml +++ b/src/app/delegation_compliance/delegation_compliance.ml @@ -10,44 +10,47 @@ open Async open Mina_base open Signature_lib -type input = {epoch: int; staking_ledger: Runtime_config.Ledger.t} +type input = { epoch : int; staking_ledger : Runtime_config.Ledger.t } [@@deriving yojson] type delegation_source = O1 | Mina_foundation [@@deriving yojson] type payout_information = - { payout_pk: Public_key.Compressed.t - ; payout_id: int - ; delegation_source: delegation_source - ; delegatee: Public_key.Compressed.t - ; delegatee_id: int - ; payments: Sql.User_command.t list - ; payments_to_slot_3500: Sql.User_command.t list - ; payments_past_slot_3500: Sql.User_command.t list } + { payout_pk : Public_key.Compressed.t + ; payout_id : int + ; delegation_source : delegation_source + ; delegatee : Public_key.Compressed.t + ; delegatee_id : int + ; payments : Sql.User_command.t list + ; payments_to_slot_3500 : Sql.User_command.t list + ; payments_past_slot_3500 : Sql.User_command.t list + } [@@deriving yojson] type csv_data = - { payout_addr: Public_key.Compressed.t - ; balance: Currency.Balance.t - ; delegatee: Public_key.Compressed.t - ; delegation: Currency.Amount.t - ; blocks_won: int - ; payout_obligation: Currency.Amount.t - ; payout_received: Currency.Amount.t - ; deficit: Currency.Amount.t - ; check: bool } + { payout_addr : Public_key.Compressed.t + ; balance : Currency.Balance.t + ; delegatee : Public_key.Compressed.t + ; delegation : Currency.Amount.t + ; blocks_won : int + ; payout_obligation : Currency.Amount.t + ; payout_received : Currency.Amount.t + ; deficit : Currency.Amount.t + ; check : bool + } module Delegatee_payout_address = struct type t = - { delegatee: Public_key.Compressed.Stable.Latest.t - ; payout_addr: Public_key.Compressed.Stable.Latest.t } + { delegatee : Public_key.Compressed.Stable.Latest.t + ; payout_addr : Public_key.Compressed.Stable.Latest.t + } [@@deriving hash, bin_io_unversioned, compare, sexp] end module Deficit = Hashable.Make_binable (Delegatee_payout_address) type previous_epoch_status = - {payout_received: Currency.Amount.t; deficit: Currency.Amount.t} + { payout_received : Currency.Amount.t; deficit : Currency.Amount.t } (* map from delegatee, payout address to payment_received, deficit from previous epoch *) let deficit_tbl : previous_epoch_status Deficit.Table.t = @@ -63,7 +66,8 @@ let csv_data_of_strings ss = ; payout_obligation ; payout_received ; deficit - ; check ] -> + ; check + ] -> let payout_addr = Public_key.Compressed.of_base58_check_exn payout_address in @@ -87,7 +91,8 @@ let csv_data_of_strings ss = ; payout_obligation ; payout_received ; deficit - ; check } + ; check + } | _ -> failwith "Incorrect number of fields in CSV line" @@ -100,8 +105,7 @@ let currency_string_of_int64 i64 = |> Currency.Amount.to_formatted_string (* map from global slots to state hash, ledger hash pairs *) -let global_slot_hashes_tbl : (Int64.t, State_hash.t * Ledger_hash.t) Hashtbl.t - = +let global_slot_hashes_tbl : (Int64.t, State_hash.t * Ledger_hash.t) Hashtbl.t = Int64.Table.create () (* cache of account keys *) @@ -139,15 +143,15 @@ let pk_of_pk_id pool pk_id : Account.key Deferred.t = Caqti_async.Pool.use (fun db -> Sql.Public_key.run db pk_id) pool with | Ok (Some pk) -> ( - match Signature_lib.Public_key.Compressed.of_base58_check pk with - | Ok pk -> - Hashtbl.add_exn pk_tbl ~key:pk_id ~data:pk ; - pk - | Error err -> - Error.tag_arg err "Error decoding public key" - (("public_key", pk), ("id", pk_id)) - [%sexp_of: (string * string) * (string * int)] - |> Error.raise ) + match Signature_lib.Public_key.Compressed.of_base58_check pk with + | Ok pk -> + Hashtbl.add_exn pk_tbl ~key:pk_id ~data:pk ; + pk + | Error err -> + Error.tag_arg err "Error decoding public key" + (("public_key", pk), ("id", pk_id)) + [%sexp_of: (string * string) * (string * int)] + |> Error.raise ) | Ok None -> failwithf "Could not find public key with id %d" pk_id () | Error msg -> @@ -184,17 +188,17 @@ let compute_delegated_stake staking_ledger delegatee = failwith "Error summing delegated stake" else accum | None -> - accum ) + accum) let account_balance ledger pk = let account_id = Account_id.create pk Token_id.default in match Ledger.location_of_account ledger account_id with | Some location -> ( - match Ledger.get ledger location with - | Some account -> - account.balance - | None -> - failwith "account_balance: Could not find account for public key" ) + match Ledger.get ledger location with + | Some account -> + account.balance + | None -> + failwith "account_balance: Could not find account for public key" ) | None -> failwith "account_balance: Could not find location for account" @@ -202,13 +206,13 @@ let get_account_balance_as_amount ledger pk = let account_id = Account_id.create pk Token_id.default in match Ledger.location_of_account ledger account_id with | Some location -> ( - match Ledger.get ledger location with - | Some account -> - Currency.Balance.to_amount account.balance - | None -> - failwith - "get_account_balance_as_amount: Could not find account for public key" - ) + match Ledger.get ledger location with + | Some account -> + Currency.Balance.to_amount account.balance + | None -> + failwith + "get_account_balance_as_amount: Could not find account for public \ + key" ) | None -> failwith "get_account_balance_as_amount: Could not find location for account" @@ -224,7 +228,7 @@ let block_ids_in_epoch pool delegatee_id epoch = query_db pool ~f:(fun db -> Sql.Block.get_block_ids_for_creator_in_slot_bounds db - ~creator:delegatee_id ~low_slot ~high_slot ) + ~creator:delegatee_id ~low_slot ~high_slot) ~item:"block ids for delegatee in epoch" let write_csv_header ~csv_out_channel = @@ -238,13 +242,14 @@ let write_csv_header ~csv_out_channel = ; "Payout obligation" ; "Payout received" ; "Deficit" - ; "Check" ] + ; "Check" + ] in Out_channel.output_string csv_out_channel line ; Out_channel.newline csv_out_channel -let write_csv_line ~csv_out_channel ~payout_addr ~balance ~delegatee - ~delegation ~blocks_won ~payout_obligation ~payout_received = +let write_csv_line ~csv_out_channel ~payout_addr ~balance ~delegatee ~delegation + ~blocks_won ~payout_obligation ~payout_received = let check = Currency.Amount.( >= ) payout_received payout_obligation in let deficit = match Currency.Amount.( - ) payout_obligation payout_received with @@ -263,7 +268,8 @@ let write_csv_line ~csv_out_channel ~payout_addr ~balance ~delegatee ; Currency.Amount.to_formatted_string payout_obligation ; Currency.Amount.to_formatted_string payout_received ; Currency.Amount.to_formatted_string deficit - ; Bool.to_string check ] + ; Bool.to_string check + ] in Out_channel.output_string csv_out_channel line ; Out_channel.newline csv_out_channel @@ -276,8 +282,9 @@ let write_csv_line_of_csv_data ~csv_out_channel ; blocks_won ; payout_obligation ; payout_received - ; deficit= _ - ; check= _ } = + ; deficit = _ + ; check = _ + } = write_csv_line ~csv_out_channel ~payout_addr ~balance ~delegatee ~delegation ~blocks_won ~payout_obligation ~payout_received @@ -317,27 +324,28 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri In_channel.input_lines prelim_csv_in_channel |> List.tl_exn in let split_lines = - List.map lines ~f:(String.split_on_chars ~on:[',']) + List.map lines ~f:(String.split_on_chars ~on:[ ',' ]) in let csv_datas = List.map split_lines ~f:csv_data_of_strings in List.iter csv_datas - ~f:(fun ({payout_addr; delegatee; payout_received; deficit; _} : - csv_data) + ~f:(fun + ({ payout_addr; delegatee; payout_received; deficit; _ } : + csv_data) -> - let key : Delegatee_payout_address.t = {delegatee; payout_addr} in - let data : previous_epoch_status = {payout_received; deficit} in + let key : Delegatee_payout_address.t = { delegatee; payout_addr } in + let data : previous_epoch_status = { payout_received; deficit } in match Deficit.Table.add deficit_tbl ~key ~data with | `Ok -> () | `Duplicate -> - failwith "Duplicate deficit table entry" ) ; + failwith "Duplicate deficit table entry") ; csv_datas in let archive_uri = Uri.of_string archive_uri in match Caqti_async.connect_pool ~max_size:128 archive_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti pool for Postgresql" ; exit 1 | Ok pool -> @@ -454,26 +462,26 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri CSV for previous epoch" | Some _, false -> [%log info] - "Producing preliminary CSV for current epoch and finalized CSV \ - for previous epoch" ) ; + "Producing preliminary CSV for current epoch and finalized CSV for \ + previous epoch" ) ; let block_ids = (* examine blocks in current epoch *) let min_slot = input.epoch * slots_per_epoch in let max_slot_int64 = min_slot + slots_per_epoch - 1 |> Int64.of_int in let min_slot_int64 = Int64.of_int min_slot in let relevant_block_infos = - List.filter block_infos ~f:(fun {global_slot; _} -> + List.filter block_infos ~f:(fun { global_slot; _ } -> Int64.( >= ) global_slot min_slot_int64 - && Int64.( <= ) global_slot max_slot_int64 ) + && Int64.( <= ) global_slot max_slot_int64) in - let ids = List.map relevant_block_infos ~f:(fun {id; _} -> id) in + let ids = List.map relevant_block_infos ~f:(fun { id; _ } -> id) in (* build mapping from global slots to state and ledger hashes *) List.iter block_infos - ~f:(fun {global_slot; state_hash; ledger_hash; _} -> + ~f:(fun { global_slot; state_hash; ledger_hash; _ } -> Hashtbl.add_exn global_slot_hashes_tbl ~key:global_slot ~data: ( State_hash.of_string state_hash - , Ledger_hash.of_string ledger_hash ) ) ; + , Ledger_hash.of_string ledger_hash )) ; Int.Set.of_list ids in (* check that genesis block is in chain to target hash @@ -502,8 +510,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri | `Ok -> () | `Duplicate -> - failwith "Duplicate account in initial staking ledger" )) - ) ; + failwith "Duplicate account in initial staking ledger"))) ; let slot_3500 = (input.epoch * slots_per_epoch) + 3500 |> Int64.of_int in [%log info] "Computing delegation information for payout addresses" ; let%bind payout_infos = @@ -540,7 +547,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri query_db pool ~f:(fun db -> Sql.User_command.run_payments_by_source_and_receiver db - ~source_id:delegatee_id ~receiver_id:payout_id ) + ~source_id:delegatee_id ~receiver_id:payout_id) ~item:"payments from delegatee" in let compare_by_global_slot p1 p2 = @@ -554,7 +561,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri let payments_from_delegatee = List.filter payments_from_delegatee_raw ~f:(fun payment -> Int.Set.mem block_ids payment.block_id - && Int64.(>=) payment.global_slot min_payment_slot ) + && Int64.( >= ) payment.global_slot min_payment_slot) |> List.sort ~compare:compare_by_global_slot in let payment_amount_and_slot (user_cmd : Sql.User_command.t) = @@ -564,24 +571,23 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ~f:(fun amt -> `String ( Int64.to_string amt |> Currency.Amount.of_string - |> Currency.Amount.to_formatted_string ) ) ) - ; ( "global_slot" - , `String (Int64.to_string user_cmd.global_slot) ) ] + |> Currency.Amount.to_formatted_string )) ) + ; ("global_slot", `String (Int64.to_string user_cmd.global_slot)) + ] in let payment_sender_amount_and_slot sender_pk (user_cmd : Sql.User_command.t) = `Assoc [ ( "sender" - , `String (Public_key.Compressed.to_base58_check sender_pk) - ) + , `String (Public_key.Compressed.to_base58_check sender_pk) ) ; ( "amount" , Option.value_map user_cmd.amount ~default:`Null ~f:(fun amt -> `String ( Int64.to_string amt |> Currency.Amount.of_string - |> Currency.Amount.to_formatted_string ) ) ) - ; ( "global_slot" - , `String (Int64.to_string user_cmd.global_slot) ) ] + |> Currency.Amount.to_formatted_string )) ) + ; ("global_slot", `String (Int64.to_string user_cmd.global_slot)) + ] in [%log info] "Direct payments from delegatee $delegatee to payout address \ @@ -592,21 +598,22 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ; ( "payments" , `List (List.map payments_from_delegatee - ~f:payment_amount_and_slot) ) ] ; + ~f:payment_amount_and_slot) ) + ] ; let%bind coinbase_receiver_ids = match%map Caqti_async.Pool.use (fun db -> Sql.Coinbase_receivers_for_block_creator.run db - ~block_creator_id:delegatee_id ) + ~block_creator_id:delegatee_id) pool with | Ok ids -> ids | Error err -> failwithf - "Error getting coinbase receiver ids from blocks where \ - the delegatee %s is the block creator, %s" + "Error getting coinbase receiver ids from blocks where the \ + delegatee %s is the block creator, %s" delegatee_str (Caqti_error.show err) () in let%bind payments_by_coinbase_receivers = @@ -622,7 +629,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ~f:(fun db -> Sql.User_command.run_payments_by_source_and_receiver db ~source_id:coinbase_receiver_id - ~receiver_id:payout_id ) + ~receiver_id:payout_id) ~item: (sprintf "Payments from coinbase receiver with id %d to \ @@ -633,16 +640,15 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri (* only payments in canonical chain *) List.filter payments_raw ~f:(fun payment -> Int.Set.mem block_ids payment.block_id - && Int64.(>=) payment.global_slot min_payment_slot ) + && Int64.( >= ) payment.global_slot min_payment_slot) |> List.sort ~compare:compare_by_global_slot in - Ok ((cb_receiver_pk, payments) :: accum) ) + Ok ((cb_receiver_pk, payments) :: accum)) with | Ok payments -> payments | Error err -> - failwithf - "Error getting payments from coinbase receivers: %s" + failwithf "Error getting payments from coinbase receivers: %s" (Caqti_error.show err) () in if not (List.is_empty payments_by_coinbase_receivers) then @@ -663,7 +669,9 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ; ( "payments" , `List (List.map payments - ~f:payment_amount_and_slot) ) ] )) ) ] ; + ~f:payment_amount_and_slot) ) + ])) ) + ] ; let payments_from_coinbase_receivers = (* to check compliance, don't need to know the payment source *) List.concat_map payments_by_coinbase_receivers @@ -677,7 +685,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri query_db pool ~f:(fun db -> Sql.User_command.run_payments_by_receiver db - ~receiver_id:payout_id ) + ~receiver_id:payout_id) ~item:"Payments to payment address" in (* only payments in canonical chain @@ -685,16 +693,16 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri *) List.filter payments_raw ~f:(fun payment -> Int.Set.mem block_ids payment.block_id - && Int64.(>=) payment.global_slot min_payment_slot + && Int64.( >= ) payment.global_slot min_payment_slot && not (List.mem payments_from_known_senders payment - ~equal:Sql.User_command.equal) ) + ~equal:Sql.User_command.equal)) |> List.sort ~compare:compare_by_global_slot in let%map senders_and_payments_from_anyone = Deferred.List.map payments_from_anyone ~f:(fun payment -> let%map sender_pk = pk_of_pk_id pool payment.source_id in - (sender_pk, payment) ) + (sender_pk, payment)) in if not (List.is_empty senders_and_payments_from_anyone) then [%log info] @@ -709,13 +717,12 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ~f:(fun (sender_pk, payment) -> ( "payment" , payment_sender_amount_and_slot sender_pk payment - ) )) ) ] ; - let payments = - payments_from_known_senders @ payments_from_anyone - in + ))) ) + ] ; + let payments = payments_from_known_senders @ payments_from_anyone in let payments_to_slot_3500, payments_past_slot_3500 = List.partition_tf payments ~f:(fun payment -> - Int64.( <= ) payment.global_slot slot_3500 ) + Int64.( <= ) payment.global_slot slot_3500) in { payout_pk ; payout_id @@ -724,7 +731,8 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ; delegatee_id ; payments ; payments_to_slot_3500 - ; payments_past_slot_3500 } ) + ; payments_past_slot_3500 + }) in let epoch_uint32 = input.epoch |> Unsigned.UInt32.of_int in let%bind () = @@ -760,14 +768,17 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri Int64.( + ) amount total in let deficit_tbl_key : Delegatee_payout_address.t = - { payout_addr= payout_info.payout_pk - ; delegatee= payout_info.delegatee } + { payout_addr = payout_info.payout_pk + ; delegatee = payout_info.delegatee + } in - let { payout_received= prev_payout_received - ; deficit= prev_epoch_deficit } = + let { payout_received = prev_payout_received + ; deficit = prev_epoch_deficit + } = if input.epoch = 0 then - { payout_received= Currency.Amount.zero - ; deficit= Currency.Amount.zero } + { payout_received = Currency.Amount.zero + ; deficit = Currency.Amount.zero + } else Deficit.Table.find_exn deficit_tbl deficit_tbl_key in let total_to_slot_3500 = @@ -816,7 +827,8 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ; ( "remaining_deficit" , `String (Currency.Amount.to_formatted_string - remaining_deficit) ) ] + remaining_deficit) ) + ] else [%log info] "Deficit in epoch %d from delegatee $delegatee to payout \ @@ -829,12 +841,12 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ) ; ( "payout_addr" , Public_key.Compressed.to_yojson payout_info.payout_pk - ) ] ; + ) + ] ; ( if input.epoch > 0 then let deficit_reduction = match - Currency.Amount.( - ) prev_epoch_deficit - remaining_deficit + Currency.Amount.( - ) prev_epoch_deficit remaining_deficit with | Some diff -> diff @@ -852,8 +864,9 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri failwith "Overflow calculating updated payout received" in let data = - { payout_received= updated_payout_received - ; deficit= remaining_deficit } + { payout_received = updated_payout_received + ; deficit = remaining_deficit + } in Deficit.Table.set deficit_tbl ~key:deficit_tbl_key ~data ) ; let to_slot_3500_available = @@ -870,8 +883,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri |> Unsigned.UInt64.to_int64 ) else total_to_slot_3500 in - if Int64.( > ) to_slot_3500_available_for_this_epoch Int64.zero - then + if Int64.( > ) to_slot_3500_available_for_this_epoch Int64.zero then [%log info] "Total payments through slot 3500 in next epoch were %s, of \ which allocated %s to this epoch" @@ -900,8 +912,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri (Float.( / ) ( Currency.Amount.to_string delegated_amount |> Float.of_string ) - ( Currency.Amount.to_string delegated_stake - |> Float.of_string )) + (Currency.Amount.to_string delegated_stake |> Float.of_string)) in let coinbase_amount = Float.( * ) 0.95 720.0 in [%log info] @@ -923,8 +934,8 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri in [%log info] "In epoch %d, delegatee %s produced %d blocks; for payout \ - address %s, the payout obligation per-block is %0.9f, the \ - total obligation is %s" + address %s, the payout obligation per-block is %0.9f, the total \ + obligation is %s" input.epoch (Public_key.Compressed.to_base58_check payout_info.delegatee) num_blocks_produced @@ -968,7 +979,7 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri ~blocks_won:num_blocks_produced ~payout_obligation:total_payout_obligation ~payout_received:payment_total_as_amount ) ; - return () ) + return ()) in ( match preliminary_csv_file_opt with | None -> @@ -979,18 +990,21 @@ let main ~input_file ~csv_file ~preliminary_csv_file_opt ~archive_uri let csv_out_channel = Out_channel.create finalized_csv_file in let updated_csv_datas = List.map csv_datas - ~f:(fun ({payout_addr; delegatee; _} as csv_data) -> + ~f:(fun ({ payout_addr; delegatee; _ } as csv_data) -> let key : Delegatee_payout_address.t = - {payout_addr; delegatee} + { payout_addr; delegatee } in - let {payout_received; deficit} = + let { payout_received; deficit } = Deficit.Table.find_exn deficit_tbl key in let current_check = Currency.Amount.equal deficit Currency.Amount.zero in - {csv_data with payout_received; deficit; check= current_check} - ) + { csv_data with + payout_received + ; deficit + ; check = current_check + }) in write_csv_header ~csv_out_channel ; List.iter updated_csv_datas diff --git a/src/app/delegation_compliance/payout_addresses.ml b/src/app/delegation_compliance/payout_addresses.ml index 025b523f296..14125998176 100644 --- a/src/app/delegation_compliance/payout_addresses.ml +++ b/src/app/delegation_compliance/payout_addresses.ml @@ -243,7 +243,8 @@ let foundation_addresses = ; "B62qndvr7iZsJeFdeYVGYXP6oJY64T5BHjPBTSrfdHrwkxuEYCfa1LF" ; "B62qovdiupo2b2UxNhASffHV4CKE5abZD4TmSdSJZzf623fkrgLvouR" ; "B62qoFmrHvDzz1H7qztEkvkaqNUbGUCaZJJJjXb8xMmxVotSaWDmyMw" - ; "B62qnkYYpjAHWvWkYsy7ANo9omZSQ53bwhKgHXUk8g2XANkjEq4AfNS" ] + ; "B62qnkYYpjAHWvWkYsy7ANo9omZSQ53bwhKgHXUk8g2XANkjEq4AfNS" + ] let o1_addresses = [ "B62qjMbmoXjUXSjqiR3z9zn8uSF62kExrbQ1mQq51w2ztVbSgEZGUmH" @@ -485,4 +486,5 @@ let o1_addresses = ; "B62qrFTdexNZeedkMCGSEUDtc14atEyevSj6Bzix1UUacaXmiKLsggw" ; "B62qpFjUdtZSGzXa6pYJuSv1R5vxJPorZLPgui8Gfg4YPrYEeL5pyz3" ; "B62qiydA5hjPMZ8wm7WkmZvcJWGio1A3X91Cu7PDP6WqqipQXEZu1gH" - ; "B62qqNHRcLYT3HvUHyQ972BeaZi9Nm4GCRWj4uUbB9iAHhc7ZXiBmFC" ] + ; "B62qqNHRcLYT3HvUHyQ972BeaZi9Nm4GCRWj4uUbB9iAHhc7ZXiBmFC" + ] diff --git a/src/app/delegation_compliance/sql.ml b/src/app/delegation_compliance/sql.ml index a74f9adc148..81e0b11560a 100644 --- a/src/app/delegation_compliance/sql.ml +++ b/src/app/delegation_compliance/sql.ml @@ -4,12 +4,12 @@ open Core_kernel module Block_info = struct type t = - {id: int; global_slot: int64; state_hash: string; ledger_hash: string} + { id : int; global_slot : int64; state_hash : string; ledger_hash : string } [@@deriving hlist] let typ = let open Archive_lib.Processor.Caqti_type_spec in - let spec = Caqti_type.[int; int64; string; string] in + let spec = Caqti_type.[ int; int64; string; string ] in let encode t = Ok (hlist_to_tuple spec (to_hlist t)) in let decode t = Ok (of_hlist (tuple_to_hlist spec t)) in Caqti_type.custom ~encode ~decode (to_rep spec) @@ -68,26 +68,27 @@ let find_command_ids_query s = module User_command = struct type t = - { type_: string - ; fee_payer_id: int - ; source_id: int - ; receiver_id: int - ; fee: int64 - ; fee_token: int64 - ; token: int64 - ; amount: int64 option - ; valid_until: int64 option - ; memo: string - ; nonce: int64 - ; block_id: int - ; global_slot: int64 - ; txn_global_slot: int64 - ; sequence_no: int - ; status: string - ; created_token: int64 option - ; fee_payer_balance: int - ; source_balance: int option - ; receiver_balance: int option } + { type_ : string + ; fee_payer_id : int + ; source_id : int + ; receiver_id : int + ; fee : int64 + ; fee_token : int64 + ; token : int64 + ; amount : int64 option + ; valid_until : int64 option + ; memo : string + ; nonce : int64 + ; block_id : int + ; global_slot : int64 + ; txn_global_slot : int64 + ; sequence_no : int + ; status : string + ; created_token : int64 option + ; fee_payer_balance : int + ; source_balance : int option + ; receiver_balance : int option + } [@@deriving yojson, hlist, equal] let typ = @@ -113,7 +114,8 @@ module User_command = struct ; option int64 ; int ; option int - ; option int ] + ; option int + ] in let encode t = Ok (hlist_to_tuple spec (to_hlist t)) in let decode t = Ok (of_hlist (tuple_to_hlist spec t)) in @@ -174,8 +176,8 @@ module User_command = struct |sql} - let run_payments_by_source_and_receiver - (module Conn : Caqti_async.CONNECTION) ~source_id ~receiver_id = + let run_payments_by_source_and_receiver (module Conn : Caqti_async.CONNECTION) + ~source_id ~receiver_id = Conn.collect_list query_payments_by_source_and_receiver (source_id, receiver_id) diff --git a/src/app/dhall_types/dump_dhall_types.ml b/src/app/dhall_types/dump_dhall_types.ml index 97e54b09f75..6f61a65dd2a 100644 --- a/src/app/dhall_types/dump_dhall_types.ml +++ b/src/app/dhall_types/dump_dhall_types.ml @@ -4,7 +4,7 @@ open Core (* Dhall types (as OCaml values) and their names to be used in Dhall *) let types_and_files = - [(Runtime_config.Json_layout.dhall_type, "runtime_config")] + [ (Runtime_config.Json_layout.dhall_type, "runtime_config") ] let print_dhall_type (ty, nm) = let s = Ppx_dhall_type.Dhall_type.to_string ty in @@ -14,7 +14,8 @@ let print_dhall_type (ty, nm) = Printf.fprintf oc "let %s : Type = %s in %s" nm s nm ; Caml.close_out oc ; ignore - (Unix.create_process ~prog:"dhall" ~args:["format"; "--inplace"; dhall_file]) + (Unix.create_process ~prog:"dhall" + ~args:[ "format"; "--inplace"; dhall_file ]) let main ~output_dir () = let output_dir = diff --git a/src/app/display_public_key/display_public_key.ml b/src/app/display_public_key/display_public_key.ml index 94a4a0ba0c7..2feacb7292b 100644 --- a/src/app/display_public_key/display_public_key.ml +++ b/src/app/display_public_key/display_public_key.ml @@ -2,7 +2,7 @@ open Async open Signature_lib let main privkey_path = - let%map {public_key; _} = + let%map { public_key; _ } = Secrets.Keypair.Terminal_stdin.read_exn privkey_path in printf "%s\n%!" diff --git a/src/app/extract_blocks/extract_blocks.ml b/src/app/extract_blocks/extract_blocks.ml index 0cdc739b1a2..24fe3a33667 100644 --- a/src/app/extract_blocks/extract_blocks.ml +++ b/src/app/extract_blocks/extract_blocks.ml @@ -37,7 +37,7 @@ let fill_in_block pool (block : Archive_lib.Processor.Block.t) : let%bind snarked_ledger_hash_str = query_db ~f:(fun db -> - Sql.Snarked_ledger_hashes.run db block.snarked_ledger_hash_id ) + Sql.Snarked_ledger_hashes.run db block.snarked_ledger_hash_id) ~item:"snarked ledger hash" in let snarked_ledger_hash = @@ -54,7 +54,7 @@ let fill_in_block pool (block : Archive_lib.Processor.Block.t) : let%bind staking_epoch_ledger_hash_str = query_db ~f:(fun db -> - Sql.Snarked_ledger_hashes.run db staking_epoch_ledger_hash_id ) + Sql.Snarked_ledger_hashes.run db staking_epoch_ledger_hash_id) ~item:"staking epoch ledger hash" in let staking_epoch_ledger_hash = @@ -97,8 +97,9 @@ let fill_in_block pool (block : Archive_lib.Processor.Block.t) : ; global_slot ; global_slot_since_genesis ; timestamp - ; user_cmds= [] - ; internal_cmds= [] } + ; user_cmds = [] + ; internal_cmds = [] + } let fill_in_user_commands pool block_state_hash = let query_db ~item ~f = query_db pool ~item ~f in @@ -115,24 +116,23 @@ let fill_in_user_commands pool block_state_hash = let balance_of_id_opt id_opt ~item = Option.value_map id_opt ~default:(Deferred.return None) ~f:(fun id -> let%map balance = balance_of_id id ~item in - Some balance ) + Some balance) in let open Deferred.Let_syntax in let%bind block_id = query_db ~item:"blocks" ~f:(fun db -> - Processor.Block.find db ~state_hash:block_state_hash ) + Processor.Block.find db ~state_hash:block_state_hash) in let%bind user_command_ids_and_sequence_nos = query_db ~item:"user command id, sequence no" ~f:(fun db -> - Sql.Blocks_and_user_commands.run db ~block_id ) + Sql.Blocks_and_user_commands.run db ~block_id) in (* create extensional user command for each id, seq no *) Deferred.List.map user_command_ids_and_sequence_nos ~f:(fun (user_command_id, sequence_no) -> let%bind user_cmd = query_db ~item:"user commands" ~f:(fun db -> - Processor.User_command.Signed_command.load db ~id:user_command_id - ) + Processor.User_command.Signed_command.load db ~id:user_command_id) in let typ = user_cmd.typ in let%bind fee_payer = pk_of_id ~item:"fee payer" user_cmd.fee_payer_id in @@ -147,22 +147,21 @@ let fill_in_user_commands pool block_state_hash = let nonce = user_cmd.nonce |> Account.Nonce.of_int in let amount = Option.map user_cmd.amount ~f:(fun amt -> - Unsigned.UInt64.of_int64 amt |> Currency.Amount.of_uint64 ) + Unsigned.UInt64.of_int64 amt |> Currency.Amount.of_uint64) in let fee = user_cmd.fee |> Unsigned.UInt64.of_int64 |> Currency.Fee.of_uint64 in let valid_until = Option.map user_cmd.valid_until ~f:(fun valid -> - Unsigned.UInt32.of_int64 valid - |> Mina_numbers.Global_slot.of_uint32 ) + Unsigned.UInt32.of_int64 valid |> Mina_numbers.Global_slot.of_uint32) in let memo = user_cmd.memo |> Signed_command_memo.of_string in let hash = user_cmd.hash |> Transaction_hash.of_base58_check_exn in let%bind block_user_cmd = query_db ~item:"block user commands" ~f:(fun db -> Processor.Block_and_signed_command.load db ~block_id - ~user_command_id ) + ~user_command_id) in let status = block_user_cmd.status in let failure_reason = @@ -172,7 +171,7 @@ let fill_in_user_commands pool block_state_hash = s | Error err -> failwithf "Not a transaction status failure: %s, error: %s" s - err () ) + err ()) in let%bind source_balance = balance_of_id_opt block_user_cmd.source_balance_id @@ -181,7 +180,7 @@ let fill_in_user_commands pool block_state_hash = let fee_payer_account_creation_fee_paid = Option.map block_user_cmd.fee_payer_account_creation_fee_paid ~f:(fun amt -> - Unsigned.UInt64.of_int64 amt |> Currency.Amount.of_uint64 ) + Unsigned.UInt64.of_int64 amt |> Currency.Amount.of_uint64) in let%bind fee_payer_balance = balance_of_id block_user_cmd.fee_payer_balance_id @@ -190,7 +189,7 @@ let fill_in_user_commands pool block_state_hash = let receiver_account_creation_fee_paid = Option.map block_user_cmd.receiver_account_creation_fee_paid ~f:(fun amt -> - Unsigned.UInt64.of_int64 amt |> Currency.Amount.of_uint64 ) + Unsigned.UInt64.of_int64 amt |> Currency.Amount.of_uint64) in let%bind receiver_balance = balance_of_id_opt block_user_cmd.receiver_balance_id @@ -198,7 +197,7 @@ let fill_in_user_commands pool block_state_hash = in let created_token = Option.map block_user_cmd.created_token ~f:(fun tok -> - Unsigned.UInt64.of_int64 tok |> Token_id.of_uint64 ) + Unsigned.UInt64.of_int64 tok |> Token_id.of_uint64) in return { Extensional.User_command.sequence_no @@ -221,7 +220,8 @@ let fill_in_user_commands pool block_state_hash = ; fee_payer_balance ; receiver_account_creation_fee_paid ; receiver_balance - ; created_token } ) + ; created_token + }) let fill_in_internal_commands pool block_state_hash = let query_db ~item ~f = query_db pool ~item ~f in @@ -232,24 +232,26 @@ let fill_in_internal_commands pool block_state_hash = let open Deferred.Let_syntax in let%bind block_id = query_db ~item:"blocks" ~f:(fun db -> - Processor.Block.find db ~state_hash:block_state_hash ) + Processor.Block.find db ~state_hash:block_state_hash) in let%bind internal_cmd_info = query_db ~item: - "internal command id, global_slot, sequence no, secondary sequence \ - no, receiver_balance_id" ~f:(fun db -> - Sql.Blocks_and_internal_commands.run db ~block_id ) + "internal command id, global_slot, sequence no, secondary sequence no, \ + receiver_balance_id" ~f:(fun db -> + Sql.Blocks_and_internal_commands.run db ~block_id) in Deferred.List.map internal_cmd_info - ~f:(fun { internal_command_id - ; sequence_no - ; secondary_sequence_no - ; receiver_balance_id } + ~f:(fun + { internal_command_id + ; sequence_no + ; secondary_sequence_no + ; receiver_balance_id + } -> let%bind _pubkey, receiver_balance_int64 = query_db ~item:"receiver balance" ~f:(fun db -> - Processor.Balance.load db ~id:receiver_balance_id ) + Processor.Balance.load db ~id:receiver_balance_id) in let receiver_balance = Unsigned.UInt64.of_int64 receiver_balance_int64 @@ -258,7 +260,7 @@ let fill_in_internal_commands pool block_state_hash = (* pieces from the internal_commands table *) let%bind internal_cmd = query_db ~item:"blocks internal commands" ~f:(fun db -> - Processor.Internal_command.load db ~id:internal_command_id ) + Processor.Internal_command.load db ~id:internal_command_id) in let typ = internal_cmd.typ in let%bind receiver = pk_of_id ~item:"receiver" internal_cmd.receiver_id in @@ -277,27 +279,28 @@ let fill_in_internal_commands pool block_state_hash = ; receiver_balance ; fee ; token - ; hash } + ; hash + } in - return cmd ) + return cmd) let check_state_hash ~logger state_hash_opt = match state_hash_opt with | None -> () | Some state_hash -> ( - match State_hash.of_base58_check state_hash with - | Ok _ -> - () - | Error err -> - [%log error] "Error decoding state hash" - ~metadata: - [ ("state_hash", `String state_hash) - ; ("error", Error_json.error_to_yojson err) ] ; - Core.exit 1 ) + match State_hash.of_base58_check state_hash with + | Ok _ -> + () + | Error err -> + [%log error] "Error decoding state hash" + ~metadata: + [ ("state_hash", `String state_hash) + ; ("error", Error_json.error_to_yojson err) + ] ; + Core.exit 1 ) -let main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks () - = +let main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks () = ( match (start_state_hash_opt, end_state_hash_opt, all_blocks) with | None, None, true | None, Some _, false | Some _, Some _, false -> () @@ -314,7 +317,7 @@ let main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks () match Caqti_async.connect_pool ~max_size:128 archive_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti pool for Postgresql" ; exit 1 | Ok pool -> @@ -333,12 +336,12 @@ let main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks () let%map blocks = query_db pool ~f:(fun db -> - Sql.Subchain.start_from_unparented db ~end_state_hash ) + Sql.Subchain.start_from_unparented db ~end_state_hash) ~item:"blocks starting from unparented" in let end_block_found = List.exists blocks ~f:(fun block -> - String.equal block.state_hash end_state_hash ) + String.equal block.state_hash end_state_hash) in if not end_block_found then ( [%log error] @@ -354,16 +357,16 @@ let main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks () query_db pool ~f:(fun db -> Sql.Subchain.start_from_specified db ~start_state_hash - ~end_state_hash ) + ~end_state_hash) ~item:"blocks starting from specified" in let start_block_found = List.exists blocks ~f:(fun block -> - String.equal block.state_hash start_state_hash ) + String.equal block.state_hash start_state_hash) in let end_block_found = List.exists blocks ~f:(fun block -> - String.equal block.state_hash end_state_hash ) + String.equal block.state_hash end_state_hash) in if not (start_block_found && end_block_found) then ( [%log error] @@ -393,9 +396,9 @@ let main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks () let user_cmds = List.sort unsorted_user_cmds ~compare:(fun (cmd1 : Extensional.User_command.t) cmd2 -> - Int.compare cmd1.sequence_no cmd2.sequence_no ) + Int.compare cmd1.sequence_no cmd2.sequence_no) in - {block with user_cmds} ) + { block with user_cmds }) in [%log info] "Querying for internal commands in blocks" ; let%bind blocks_with_all_cmds = @@ -409,23 +412,22 @@ let main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks () ~compare:(fun (cmd1 : Extensional.Internal_command.t) cmd2 -> [%compare: int * int] (cmd1.sequence_no, cmd1.secondary_sequence_no) - (cmd2.sequence_no, cmd2.secondary_sequence_no) ) + (cmd2.sequence_no, cmd2.secondary_sequence_no)) in - {block with internal_cmds} ) + { block with internal_cmds }) in [%log info] "Writing blocks" ; let%map () = Deferred.List.iter blocks_with_all_cmds ~f:(fun block -> [%log info] "Writing block with $state_hash" - ~metadata:[("state_hash", State_hash.to_yojson block.state_hash)] ; - let output_file = - State_hash.to_string block.state_hash ^ ".json" - in + ~metadata: + [ ("state_hash", State_hash.to_yojson block.state_hash) ] ; + let output_file = State_hash.to_string block.state_hash ^ ".json" in Async_unix.Writer.with_file output_file ~f:(fun writer -> return (Async.fprintf writer "%s\n%!" ( Extensional.Block.to_yojson block - |> Yojson.Safe.pretty_to_string )) ) ) + |> Yojson.Safe.pretty_to_string )))) in () @@ -446,9 +448,9 @@ let () = and start_state_hash_opt = Param.flag "--start-state-hash" ~doc: - "State hash of the block that begins a chain (default: start \ - at the block closest to the end block without a parent, \ - possibly the genesis block)" + "State hash of the block that begins a chain (default: start at \ + the block closest to the end block without a parent, possibly \ + the genesis block)" Param.(optional string) and end_state_hash_opt = Param.flag "--end-state-hash" @@ -458,5 +460,4 @@ let () = Param.flag "--all-blocks" Param.no_arg ~doc:"Extract all blocks in the archive database" in - main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt - ~all_blocks))) + main ~archive_uri ~start_state_hash_opt ~end_state_hash_opt ~all_blocks))) diff --git a/src/app/extract_blocks/sql.ml b/src/app/extract_blocks/sql.ml index 4b0bf99d279..26cc999040d 100644 --- a/src/app/extract_blocks/sql.ml +++ b/src/app/extract_blocks/sql.ml @@ -36,8 +36,8 @@ module Subchain = struct Archive_lib.Processor.Block.typ (make_sql ~join_condition: - "b.id = chain.parent_id AND (chain.state_hash <> $2 OR \ - b.state_hash = $2)") + "b.id = chain.parent_id AND (chain.state_hash <> $2 OR b.state_hash \ + = $2)") let start_from_unparented (module Conn : Caqti_async.CONNECTION) ~end_state_hash = @@ -100,15 +100,16 @@ end module Blocks_and_internal_commands = struct type t = - { internal_command_id: int - ; sequence_no: int - ; secondary_sequence_no: int - ; receiver_balance_id: int } + { internal_command_id : int + ; sequence_no : int + ; secondary_sequence_no : int + ; receiver_balance_id : int + } [@@deriving hlist] let typ = let open Archive_lib.Processor.Caqti_type_spec in - let spec = Caqti_type.[int; int; int; int] in + let spec = Caqti_type.[ int; int; int; int ] in let encode t = Ok (hlist_to_tuple spec (to_hlist t)) in let decode t = Ok (of_hlist (tuple_to_hlist spec t)) in Caqti_type.custom ~encode ~decode (to_rep spec) diff --git a/src/app/generate_keypair/generate_keypair.ml b/src/app/generate_keypair/generate_keypair.ml index 498328d8609..cce54ad0b52 100644 --- a/src/app/generate_keypair/generate_keypair.ml +++ b/src/app/generate_keypair/generate_keypair.ml @@ -5,10 +5,10 @@ open Async let () = let is_version_cmd s = - List.mem ["version"; "-version"] s ~equal:String.equal + List.mem [ "version"; "-version" ] s ~equal:String.equal in - match (Sys.get_argv ()) with - | [|_generate_keypair_exe; version|] when is_version_cmd version -> + match Sys.get_argv () with + | [| _generate_keypair_exe; version |] when is_version_cmd version -> Mina_version.print_version () | _ -> Command.run Cli_lib.Commands.generate_keypair diff --git a/src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.ml b/src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.ml index fab3c864b29..5c11bf02d96 100644 --- a/src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.ml +++ b/src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.ml @@ -48,9 +48,9 @@ let slots_per_month_float = Float.of_int slots_per_month let valid_mina_amount amount = let is_num_string s = String.for_all s ~f:Char.is_digit in match String.split ~on:'.' amount with - | [whole] -> + | [ whole ] -> is_num_string whole - | [whole; decimal] when String.length decimal <= 9 -> + | [ whole; decimal ] when String.length decimal <= 9 -> is_num_string whole && is_num_string decimal | _ -> false @@ -67,7 +67,7 @@ let amount_geq_min_balance ~amount ~initial_min_balance = *) let generate_delegate_account ~logger delegatee_pk = [%log info] "Generating account for delegatee $delegatee" - ~metadata:[("delegatee", `String delegatee_pk)] ; + ~metadata:[ ("delegatee", `String delegatee_pk) ] ; let pk = Some delegatee_pk in let balance = Currency.Balance.zero in let timing = None in @@ -76,7 +76,8 @@ let generate_delegate_account ~logger delegatee_pk = pk ; balance ; timing - ; delegate } + ; delegate + } let generate_missing_delegate_accounts ~logger = (* for each delegate that doesn't have a corresponding account, @@ -85,7 +86,7 @@ let generate_missing_delegate_accounts ~logger = let delegates = String.Table.keys delegates_tbl in let missing_delegates = List.filter delegates ~f:(fun delegate -> - not (String.Table.mem accounts_tbl delegate) ) + not (String.Table.mem accounts_tbl delegate)) in let delegate_accounts = List.map missing_delegates ~f:(generate_delegate_account ~logger) @@ -96,7 +97,7 @@ let runtime_config_account ~logger ~wallet_pk ~amount ~initial_min_balance ~cliff_time_months ~cliff_amount ~unlock_frequency ~unlock_amount ~delegatee_pk = [%log info] "Processing record for $wallet_pk" - ~metadata:[("wallet_pk", `String wallet_pk)] ; + ~metadata:[ ("wallet_pk", `String wallet_pk) ] ; let pk = Some wallet_pk in let balance = Currency.Balance.of_formatted_string amount in let initial_minimum_balance = @@ -136,7 +137,8 @@ let runtime_config_account ~logger ~wallet_pk ~amount ~initial_min_balance ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } + ; vesting_increment + } in let delegate = (* 0 or empty string denotes "no delegation" *) @@ -146,7 +148,8 @@ let runtime_config_account ~logger ~wallet_pk ~amount ~initial_min_balance pk ; balance ; timing - ; delegate } + ; delegate + } let account_of_tsv ~logger tsv = match String.split tsv ~on:'\t' with @@ -159,15 +162,15 @@ let account_of_tsv ~logger tsv = ; cliff_amount ; unlock_frequency ; unlock_amount - ; delegatee_pk ] -> + ; delegatee_pk + ] -> Some (runtime_config_account ~logger ~wallet_pk ~amount ~initial_min_balance ~cliff_time_months ~cliff_amount ~unlock_frequency ~unlock_amount ~delegatee_pk) | _ -> (* should not occur, we've already validated the record *) - failwithf "TSV line does not contain expected number of fields: %s" tsv - () + failwithf "TSV line does not contain expected number of fields: %s" tsv () let validate_fields ~wallet_pk ~amount ~initial_min_balance ~cliff_time_months ~cliff_amount ~unlock_frequency ~unlock_amount ~delegatee_pk = @@ -187,7 +190,7 @@ let validate_fields ~wallet_pk ~amount ~initial_min_balance ~cliff_time_months in let valid_cliff_amount = valid_mina_amount cliff_amount in let valid_unlock_frequency = - List.mem ["0"; "1"] unlock_frequency ~equal:String.equal + List.mem [ "0"; "1" ] unlock_frequency ~equal:String.equal in let valid_unlock_amount = valid_mina_amount unlock_amount in let valid_delegatee_pk = @@ -222,12 +225,13 @@ let validate_fields ~wallet_pk ~amount ~initial_min_balance ~cliff_time_months ; ("timing", valid_timing) ; ("unlock_frequency", valid_unlock_frequency) ; ("unlock_amount", valid_unlock_amount) - ; ("delegatee_pk", valid_delegatee_pk) ] + ; ("delegatee_pk", valid_delegatee_pk) + ] in let valid_str = "VALID" in let invalid_fields = List.map valid_field_descs ~f:(fun (field, valid) -> - if valid then valid_str else field ) + if valid then valid_str else field) |> List.filter ~f:(fun field -> not (String.equal field valid_str)) |> String.concat ~sep:"," in @@ -244,10 +248,10 @@ let validate_record tsv = ; cliff_amount ; unlock_frequency ; unlock_amount - ; delegatee_pk ] -> - validate_fields ~wallet_pk ~amount ~initial_min_balance - ~cliff_time_months ~cliff_amount ~unlock_frequency ~unlock_amount - ~delegatee_pk + ; delegatee_pk + ] -> + validate_fields ~wallet_pk ~amount ~initial_min_balance ~cliff_time_months + ~cliff_amount ~unlock_frequency ~unlock_amount ~delegatee_pk | _ -> Some "TSV line does not contain expected number of fields" @@ -259,7 +263,7 @@ let main ~tsv_file ~output_file () = let validation_errors = In_channel.with_file tsv_file ~f:(fun in_channel -> [%log info] "Opened TSV file $tsv_file for validation" - ~metadata:[("tsv_file", `String tsv_file)] ; + ~metadata:[ ("tsv_file", `String tsv_file) ] ; let rec go num_accounts validation_errors = match In_channel.input_line in_channel with | Some line -> @@ -274,7 +278,8 @@ let main ~tsv_file ~output_file () = $invalid_fields" ~metadata: [ ("row", `Int (num_accounts + 2)) - ; ("invalid_fields", `String invalid_fields) ] ; + ; ("invalid_fields", `String invalid_fields) + ] ; true in go (num_accounts + 1) validation_errors @@ -283,7 +288,7 @@ let main ~tsv_file ~output_file () = in (* skip first line *) let _headers = In_channel.input_line in_channel in - go 0 false ) + go 0 false) in if validation_errors then ( [%log fatal] "Input has validation errors, exiting" ; @@ -293,7 +298,7 @@ let main ~tsv_file ~output_file () = let provided_accounts, num_accounts = In_channel.with_file tsv_file ~f:(fun in_channel -> [%log info] "Opened TSV file $tsv_file for translation" - ~metadata:[("tsv_file", `String tsv_file)] ; + ~metadata:[ ("tsv_file", `String tsv_file) ] ; let rec go accounts num_accounts = match In_channel.input_line in_channel with | Some line -> ( @@ -308,7 +313,7 @@ let main ~tsv_file ~output_file () = in (* skip first line *) let _headers = In_channel.input_line in_channel in - go [] 0 ) + go [] 0) in [%log info] "Processed %d records" num_accounts ; let generated_accounts, num_generated = @@ -324,7 +329,7 @@ let main ~tsv_file ~output_file () = List.iter jsons ~f:(fun json -> Out_channel.output_string out_channel (Yojson.Safe.pretty_to_string json) ; - Out_channel.newline out_channel ) ) ; + Out_channel.newline out_channel)) ; return () let () = diff --git a/src/app/missing_blocks_auditor/missing_blocks_auditor.ml b/src/app/missing_blocks_auditor/missing_blocks_auditor.ml index 5205263797a..a37fdc85da3 100644 --- a/src/app/missing_blocks_auditor/missing_blocks_auditor.ml +++ b/src/app/missing_blocks_auditor/missing_blocks_auditor.ml @@ -9,7 +9,7 @@ let main ~archive_uri () = match Caqti_async.connect_pool ~max_size:128 archive_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti pool for Postgresql" ; exit 1 | Ok pool -> @@ -23,7 +23,7 @@ let main ~archive_uri () = return blocks | Error msg -> [%log error] "Error getting missing blocks" - ~metadata:[("error", `String (Caqti_error.show msg))] ; + ~metadata:[ ("error", `String (Caqti_error.show msg)) ] ; exit 1 in List.iter missing_blocks ~f:(fun (block_id, state_hash, parent_hash) -> @@ -31,7 +31,8 @@ let main ~archive_uri () = ~metadata: [ ("block_id", `Int block_id) ; ("state_hash", `String state_hash) - ; ("parent_hash", `String parent_hash) ] ) ; + ; ("parent_hash", `String parent_hash) + ]) ; () let () = diff --git a/src/app/missing_blocks_auditor/sql.ml b/src/app/missing_blocks_auditor/sql.ml index c0deec31dff..a9df5523680 100644 --- a/src/app/missing_blocks_auditor/sql.ml +++ b/src/app/missing_blocks_auditor/sql.ml @@ -11,6 +11,5 @@ module Unparented_blocks = struct WHERE parent_id IS NULL |} - let run (module Conn : Caqti_async.CONNECTION) () = - Conn.collect_list query () + let run (module Conn : Caqti_async.CONNECTION) () = Conn.collect_list query () end diff --git a/src/app/patch_archive_test/patch_archive_test.ml b/src/app/patch_archive_test/patch_archive_test.ml index 9a0060f1e5f..804e6c5b949 100644 --- a/src/app/patch_archive_test/patch_archive_test.ml +++ b/src/app/patch_archive_test/patch_archive_test.ml @@ -35,12 +35,12 @@ let complete_prog_path prog_path = else Sys.getcwd () ^/ prog_path let extract_blocks ~uri ~working_dir ~extract_blocks_path = - let args = ["--archive-uri"; Uri.to_string uri; "--all-blocks"] in + let args = [ "--archive-uri"; Uri.to_string uri; "--all-blocks" ] in let prog = complete_prog_path extract_blocks_path in Process.run_lines_exn ~working_dir ~prog ~args () let archive_blocks ~uri ~archive_blocks_path ~block_kind files = - let args = ["--archive-uri"; Uri.to_string uri; block_kind] @ files in + let args = [ "--archive-uri"; Uri.to_string uri; block_kind ] @ files in Process.run_lines_exn ~prog:archive_blocks_path ~args () let compare_blocks ~logger ~original_blocks_dir ~copy_blocks_dir = @@ -57,7 +57,7 @@ let compare_blocks ~logger ~original_blocks_dir ~copy_blocks_dir = block | Error err -> failwithf "Could not parse extensional block in file %s, error: %s" - fn err () ) + fn err ()) in let%bind original_blocks = blocks_in_dir original_blocks_dir in let%bind copy_blocks = blocks_in_dir copy_blocks_dir in @@ -67,10 +67,11 @@ let compare_blocks ~logger ~original_blocks_dir ~copy_blocks_dir = let original_diff = diff_list original_blocks copy_blocks in [%log error] "Original database contains these blocks not in the copy" ~metadata: - [("blocks", `List (List.map original_diff ~f:(fun s -> `String s)))] ; + [ ("blocks", `List (List.map original_diff ~f:(fun s -> `String s))) ] ; let copy_diff = diff_list copy_blocks original_blocks in [%log error] "Copied database contains these blocks not in the original" - ~metadata:[("blocks", `List (List.map copy_diff ~f:(fun s -> `String s)))] ; + ~metadata: + [ ("blocks", `List (List.map copy_diff ~f:(fun s -> `String s))) ] ; Core_kernel.exit 1 ) ; [%log info] "After patching, original and copied databases contain the same set of \ @@ -81,12 +82,11 @@ let compare_blocks ~logger ~original_blocks_dir ~copy_blocks_dir = String.Set.fold original_blocks ~init:false ~f:(fun acc block_file -> let original_block = get_block (original_blocks_dir ^/ block_file) in let copied_block = get_block (copy_blocks_dir ^/ block_file) in - if - not (Archive_lib.Extensional.Block.equal original_block copied_block) + if not (Archive_lib.Extensional.Block.equal original_block copied_block) then ( [%log error] "Original, copied blocks differ in file %s" block_file ; true ) - else acc ) + else acc) in if found_difference then ( [%log fatal] @@ -127,7 +127,7 @@ let main ~archive_uri ~num_blocks_to_patch ~archive_blocks_path match Caqti_async.connect_pool ~max_size:128 archive_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti pool for Postgresql" ; exit 1 | Ok pool -> @@ -162,7 +162,7 @@ let main ~archive_uri ~num_blocks_to_patch ~archive_blocks_path match Caqti_async.connect_pool ~max_size:128 copy_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti pool for Postgresql" ; exit 1 | Ok pool -> @@ -186,9 +186,9 @@ let main ~archive_uri ~num_blocks_to_patch ~archive_blocks_path otherwise, we get a foreign key constraint violation *) [%log info] - "Removing parent references to block with state hash \ - $state_hash in copied database" - ~metadata:[("state_hash", `String state_hash)] ; + "Removing parent references to block with state hash $state_hash \ + in copied database" + ~metadata:[ ("state_hash", `String state_hash) ] ; let%bind id = query_db pool ~f:(fun db -> Sql.Block.run db ~state_hash) @@ -201,16 +201,14 @@ let main ~archive_uri ~num_blocks_to_patch ~archive_blocks_path in [%log info] "Deleting block with state hash $state_hash from copied database" - ~metadata:[("state_hash", `String state_hash)] ; + ~metadata:[ ("state_hash", `String state_hash) ] ; query_db pool ~f:(fun db -> Sql.Block.run_delete db ~state_hash) - ~item:"state hash of block to delete" ) + ~item:"state hash of block to delete") in (* patch the copy with precomputed or extensional blocks, using the archive_blocks tool *) [%log info] "Patching the copy with supplied blocks" ; - let block_kind = - if precomputed then "-precomputed" else "-extensional" - in + let block_kind = if precomputed then "-precomputed" else "-extensional" in let%bind _lines = archive_blocks ~uri:copy_uri ~archive_blocks_path ~block_kind files in @@ -262,24 +260,24 @@ let () = Param.(required string) and num_blocks_to_patch = Param.( - flag "--num-blocks-to-patch" ~aliases:["num-blocks-to-patch"] + flag "--num-blocks-to-patch" ~aliases:[ "num-blocks-to-patch" ] Param.(required int)) ~doc:"NN Number of blocks to remove and patch" and archive_blocks_path = Param.( - flag "--archive-blocks-path" ~aliases:["archive-blocks-path"] + flag "--archive-blocks-path" ~aliases:[ "archive-blocks-path" ] Param.(required string)) ~doc:"PATH Path to archive_blocks executable" and extract_blocks_path = Param.( - flag "--extract-blocks-path" ~aliases:["extract-blocks-path"] + flag "--extract-blocks-path" ~aliases:[ "extract-blocks-path" ] Param.(required string)) ~doc:"PATH Path to extract_blocks executable" and precomputed = - Param.(flag "--precomputed" ~aliases:["precomputed"] no_arg) + Param.(flag "--precomputed" ~aliases:[ "precomputed" ] no_arg) ~doc:"Blocks are in precomputed format" and extensional = - Param.(flag "--extensional" ~aliases:["extensional"] no_arg) + Param.(flag "--extensional" ~aliases:[ "extensional" ] no_arg) ~doc:"Blocks are in extensional format" and files = Param.anon Anons.(sequence ("FILES" %: Param.string)) in main ~archive_uri ~num_blocks_to_patch ~archive_blocks_path diff --git a/src/app/replayer/replayer.ml b/src/app/replayer/replayer.ml index c7b783c222f..fcd89921386 100644 --- a/src/app/replayer/replayer.ml +++ b/src/app/replayer/replayer.ml @@ -24,15 +24,17 @@ open Mina_base *) type input = - { target_epoch_ledgers_state_hash: State_hash.t - ; genesis_ledger: Runtime_config.Ledger.t } + { target_epoch_ledgers_state_hash : State_hash.t + ; genesis_ledger : Runtime_config.Ledger.t + } [@@deriving yojson] type output = - { target_epoch_ledgers_state_hash: State_hash.t - ; target_fork_state_hash: State_hash.t - ; target_genesis_ledger: Runtime_config.Ledger.t - ; target_epoch_data: Runtime_config.Epoch_data.t } + { target_epoch_ledgers_state_hash : State_hash.t + ; target_fork_state_hash : State_hash.t + ; target_genesis_ledger : Runtime_config.Ledger.t + ; target_epoch_data : Runtime_config.Epoch_data.t + } [@@deriving yojson] let error_count = ref 0 @@ -49,11 +51,11 @@ let create_output ~target_fork_state_hash ~target_epoch_ledgers_state_hash (input_genesis_ledger : Runtime_config.Ledger.t) = let create_ledger_as_list ledger = List.map (Ledger.to_list ledger) ~f:(fun acc -> - Genesis_ledger_helper.Accounts.Single.of_account acc None ) + Genesis_ledger_helper.Accounts.Single.of_account acc None) in let genesis_ledger_as_list = create_ledger_as_list ledger in let target_genesis_ledger = - {input_genesis_ledger with base= Accounts genesis_ledger_as_list} + { input_genesis_ledger with base = Accounts genesis_ledger_as_list } in let staking_epoch_ledger_as_list = create_ledger_as_list staking_epoch_ledger @@ -61,27 +63,27 @@ let create_output ~target_fork_state_hash ~target_epoch_ledgers_state_hash let next_epoch_ledger_as_list = create_ledger_as_list next_epoch_ledger in let target_staking_epoch_data : Runtime_config.Epoch_data.Data.t = let ledger = - {input_genesis_ledger with base= Accounts staking_epoch_ledger_as_list} + { input_genesis_ledger with base = Accounts staking_epoch_ledger_as_list } in - {ledger; seed= staking_seed} + { ledger; seed = staking_seed } in let target_next_epoch_data : Runtime_config.Epoch_data.Data.t = let ledger = - {input_genesis_ledger with base= Accounts next_epoch_ledger_as_list} + { input_genesis_ledger with base = Accounts next_epoch_ledger_as_list } in - {ledger; seed= next_seed} + { ledger; seed = next_seed } in let target_epoch_data : Runtime_config.Epoch_data.t = - {staking= target_staking_epoch_data; next= Some target_next_epoch_data} + { staking = target_staking_epoch_data; next = Some target_next_epoch_data } in { target_fork_state_hash ; target_epoch_ledgers_state_hash ; target_genesis_ledger - ; target_epoch_data } + ; target_epoch_data + } (* map from global slots to state hash, ledger hash pairs *) -let global_slot_hashes_tbl : (Int64.t, State_hash.t * Ledger_hash.t) Hashtbl.t - = +let global_slot_hashes_tbl : (Int64.t, State_hash.t * Ledger_hash.t) Hashtbl.t = Int64.Table.create () (* cache of account keys *) @@ -106,15 +108,15 @@ let pk_of_pk_id pool pk_id : Account.key Deferred.t = Caqti_async.Pool.use (fun db -> Sql.Public_key.run db pk_id) pool with | Ok (Some pk) -> ( - match Signature_lib.Public_key.Compressed.of_base58_check pk with - | Ok pk -> - Hashtbl.add_exn pk_tbl ~key:pk_id ~data:pk ; - pk - | Error err -> - Error.tag_arg err "Error decoding public key" - (("public_key", pk), ("id", pk_id)) - [%sexp_of: (string * string) * (string * int)] - |> Error.raise ) + match Signature_lib.Public_key.Compressed.of_base58_check pk with + | Ok pk -> + Hashtbl.add_exn pk_tbl ~key:pk_id ~data:pk ; + pk + | Error err -> + Error.tag_arg err "Error decoding public key" + (("public_key", pk), ("id", pk_id)) + [%sexp_of: (string * string) * (string * int)] + |> Error.raise ) | Ok None -> failwithf "Could not find public key with id %d" pk_id () | Error msg -> @@ -170,9 +172,9 @@ let epoch_data_of_id ~logger pool epoch_data_id = (fun db -> Sql.Epoch_data.get_epoch_data db epoch_data_id) pool with - | Ok {epoch_ledger_hash; epoch_data_seed} -> + | Ok { epoch_ledger_hash; epoch_data_seed } -> [%log info] "Found epoch data for id %d" epoch_data_id ; - ({epoch_ledger_hash; epoch_data_seed} : Sql.Epoch_data.epoch_data) + ({ epoch_ledger_hash; epoch_data_seed } : Sql.Epoch_data.epoch_data) | Error msg -> failwithf "Error retrieving epoch data for epoch data id %d, error: %s" epoch_data_id (Caqti_error.show msg) () @@ -187,7 +189,8 @@ let process_block_infos_of_state_hash ~logger pool state_hash ~f = [%log error] "Error getting block information for state hash" ~metadata: [ ("error", `String (Caqti_error.show msg)) - ; ("state_hash", `String state_hash) ] ; + ; ("state_hash", `String state_hash) + ] ; exit 1 let update_epoch_ledger ~logger ~name ~ledger ~epoch_ledger epoch_ledger_hash = @@ -222,7 +225,7 @@ let update_epoch_ledger ~logger ~name ~ledger ~epoch_ledger epoch_ledger_hash = [%sexp_of: (string * Signature_lib.Public_key.Compressed.t) * (string * Token_id.t)] - |> Error.raise ) ; + |> Error.raise) ; epoch_ledger ) else epoch_ledger @@ -238,7 +241,7 @@ let update_staking_epoch_data ~logger pool ~ledger ~last_block_id ~f:(fun db -> Sql.Epoch_data.get_staking_epoch_data_id db state_hash) ~item:"staking epoch id" in - let%map {epoch_ledger_hash; epoch_data_seed} = + let%map { epoch_ledger_hash; epoch_data_seed } = query_db pool ~f:(fun db -> Sql.Epoch_data.get_epoch_data db staking_epoch_id) ~item:"staking epoch data" @@ -261,7 +264,7 @@ let update_next_epoch_data ~logger pool ~ledger ~last_block_id ~f:(fun db -> Sql.Epoch_data.get_next_epoch_data_id db state_hash) ~item:"next epoch id" in - let%map {epoch_ledger_hash; epoch_data_seed} = + let%map { epoch_ledger_hash; epoch_data_seed } = query_db pool ~f:(fun db -> Sql.Epoch_data.get_epoch_data db next_epoch_id) ~item:"next epoch data" @@ -283,12 +286,11 @@ module Fee_transfer_key = struct include Hashable.Make (T) end -let fee_transfer_tbl : (Fee_transfer_key.t, Coinbase_fee_transfer.t) Hashtbl.t - = +let fee_transfer_tbl : (Fee_transfer_key.t, Coinbase_fee_transfer.t) Hashtbl.t = Fee_transfer_key.Table.create () -let cache_fee_transfer_via_coinbase pool - (internal_cmd : Sql.Internal_command.t) = +let cache_fee_transfer_via_coinbase pool (internal_cmd : Sql.Internal_command.t) + = match internal_cmd.type_ with | "fee_transfer_via_coinbase" -> let%map receiver_pk = pk_of_pk_id pool internal_cmd.receiver_id in @@ -316,14 +318,15 @@ let verify_balance ~logger ~pool ~ledger ~who ~balance_id ~pk_id ~token_int64 let actual_balance = match Ledger.location_of_account ledger account_id with | Some loc -> ( - match Ledger.get ledger loc with - | Some account -> - account.balance - | None -> - failwithf - "Could not find account in ledger for public key %s and token id %s" - (Signature_lib.Public_key.Compressed.to_base58_check pk) - (Token_id.to_string token) () ) + match Ledger.get ledger loc with + | Some account -> + account.balance + | None -> + failwithf + "Could not find account in ledger for public key %s and token id \ + %s" + (Signature_lib.Public_key.Compressed.to_base58_check pk) + (Token_id.to_string token) () ) | None -> failwithf "Could not get location of account for public key %s and token id %s" @@ -335,7 +338,8 @@ let verify_balance ~logger ~pool ~ledger ~who ~balance_id ~pk_id ~token_int64 ~metadata: [ ("who", `String who) ; ("claimed_balance", Currency.Balance.to_yojson claimed_balance) - ; ("actual_balance", Currency.Balance.to_yojson actual_balance) ] ; + ; ("actual_balance", Currency.Balance.to_yojson actual_balance) + ] ; if continue_on_error then incr error_count else Core_kernel.exit 1 ) let run_internal_command ~logger ~pool ~ledger (cmd : Sql.Internal_command.t) @@ -453,7 +457,7 @@ let apply_combined_fee_transfer ~logger ~pool ~ledger ~continue_on_error |> Error.raise let body_of_sql_user_cmd pool - ({type_; source_id; receiver_id; token= tok; amount; global_slot; _} : + ({ type_; source_id; receiver_id; token = tok; amount; global_slot; _ } : Sql.User_command.t) : Signed_command_payload.Body.t Deferred.t = let open Signed_command_payload.Body in let open Deferred.Let_syntax in @@ -471,31 +475,33 @@ let body_of_sql_user_cmd pool if Option.is_none amount then failwithf "Payment at global slot %Ld has NULL amount" global_slot () ; let amount = Option.value_exn amount in - Payment Payment_payload.Poly.{source_pk; receiver_pk; token_id; amount} + Payment Payment_payload.Poly.{ source_pk; receiver_pk; token_id; amount } | "delegation" -> Stake_delegation (Stake_delegation.Set_delegate - {delegator= source_pk; new_delegate= receiver_pk}) + { delegator = source_pk; new_delegate = receiver_pk }) | "create_token" -> Create_new_token - { New_token_payload.token_owner_pk= source_pk - ; disable_new_accounts= false } + { New_token_payload.token_owner_pk = source_pk + ; disable_new_accounts = false + } | "create_account" -> Create_token_account { New_account_payload.token_id - ; token_owner_pk= source_pk + ; token_owner_pk = source_pk ; receiver_pk - ; account_disabled= false } + ; account_disabled = false + } | "mint_tokens" -> if Option.is_none amount then - failwithf "Mint token at global slot %Ld has NULL amount" global_slot - () ; + failwithf "Mint token at global slot %Ld has NULL amount" global_slot () ; let amount = Option.value_exn amount in Mint_tokens { Minting_payload.token_id - ; token_owner_pk= source_pk + ; token_owner_pk = source_pk ; receiver_pk - ; amount } + ; amount + } | _ -> failwithf "Invalid user command type: %s" type_ () @@ -510,7 +516,7 @@ let run_user_command ~logger ~pool ~ledger (cmd : Sql.User_command.t) let memo = Signed_command_memo.of_string cmd.memo in let valid_until = Option.map cmd.valid_until ~f:(fun slot -> - Mina_numbers.Global_slot.of_uint32 @@ Unsigned.UInt32.of_int64 slot ) + Mina_numbers.Global_slot.of_uint32 @@ Unsigned.UInt32.of_int64 slot) in let payload = Signed_command_payload.create @@ -526,13 +532,13 @@ let run_user_command ~logger ~pool ~ledger (cmd : Sql.User_command.t) *) let signer = Signature_lib.Public_key.decompress_exn fee_payer_pk in let signed_cmd = - Signed_command.Poly.{payload; signer; signature= Signature.dummy} + Signed_command.Poly.{ payload; signer; signature = Signature.dummy } in (* the signature isn't checked when applying, the real signature was checked in the transaction SNARK, so deem the signature to be valid here *) - let (`If_this_is_used_it_should_have_a_comment_justifying_it - valid_signed_cmd) = + let (`If_this_is_used_it_should_have_a_comment_justifying_it valid_signed_cmd) + = Signed_command.to_valid_unsafe signed_cmd in let txn_global_slot = Unsigned.UInt32.of_int64 cmd.txn_global_slot in @@ -544,14 +550,14 @@ let run_user_command ~logger ~pool ~ledger (cmd : Sql.User_command.t) (* verify balances in database against current ledger *) let token_int64 = (* if the command is "create token", the token for the command is 0 (meaning unused), - and the balance is for source/receiver account using the new token - *) + and the balance is for source/receiver account using the new token + *) match (cmd.token, cmd.created_token) with | 0L, Some token -> token | n, Some m -> - failwithf "New token %Ld in user command with nonzero token %Ld" n - m () + failwithf "New token %Ld in user command with nonzero token %Ld" n m + () | _, None -> cmd.token in @@ -603,7 +609,7 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = match Caqti_async.connect_pool ~max_size:128 archive_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti pool for Postgresql" ; exit 1 | Ok pool -> @@ -636,23 +642,22 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = let%bind fork_state_hash = query_db pool ~f:(fun db -> - Sql.Parent_block.get_parent_state_hash db epoch_ledgers_state_hash - ) + Sql.Parent_block.get_parent_state_hash db epoch_ledgers_state_hash) ~item:"parent state hash of state hash" in [%log info] "Loading block information using target state hash" ; let%bind block_ids = process_block_infos_of_state_hash ~logger pool fork_state_hash ~f:(fun block_infos -> - let ids = List.map block_infos ~f:(fun {id; _} -> id) in + let ids = List.map block_infos ~f:(fun { id; _ } -> id) in (* build mapping from global slots to state and ledger hashes *) List.iter block_infos - ~f:(fun {global_slot; state_hash; ledger_hash; _} -> + ~f:(fun { global_slot; state_hash; ledger_hash; _ } -> Hashtbl.add_exn global_slot_hashes_tbl ~key:global_slot ~data: ( State_hash.of_string state_hash - , Ledger_hash.of_string ledger_hash ) ) ; - return (Int.Set.of_list ids) ) + , Ledger_hash.of_string ledger_hash )) ; + return (Int.Set.of_list ids)) in (* check that genesis block is in chain to target hash assumption: genesis block occupies global slot 0 @@ -678,7 +683,7 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = return ids | Error msg -> [%log error] "Error getting user command ids" - ~metadata:[("error", `String (Caqti_error.show msg))] ; + ~metadata:[ ("error", `String (Caqti_error.show msg)) ] ; exit 1 in [%log info] "Loading internal command ids" ; @@ -692,7 +697,7 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = return ids | Error msg -> [%log error] "Error getting user command ids" - ~metadata:[("error", `String (Caqti_error.show msg))] ; + ~metadata:[ ("error", `String (Caqti_error.show msg)) ] ; exit 1 in [%log info] "Obtained %d user command ids and %d internal command ids" @@ -714,21 +719,21 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = internal_cmds | Error msg -> failwithf - "Error querying for internal commands with id %d, error %s" - id (Caqti_error.show msg) () ) + "Error querying for internal commands with id %d, error %s" id + (Caqti_error.show msg) ()) in let unsorted_internal_cmds = List.concat unsorted_internal_cmds_list in (* filter out internal commands in blocks not along chain from target state hash *) let filtered_internal_cmds = List.filter unsorted_internal_cmds ~f:(fun cmd -> - Int.Set.mem block_ids cmd.block_id ) + Int.Set.mem block_ids cmd.block_id) in let sorted_internal_cmds = List.sort filtered_internal_cmds ~compare:(fun ic1 ic2 -> let tuple (ic : Sql.Internal_command.t) = (ic.global_slot, ic.sequence_no, ic.secondary_sequence_no) in - [%compare: int64 * int * int] (tuple ic1) (tuple ic2) ) + [%compare: int64 * int * int] (tuple ic1) (tuple ic2)) in (* populate cache of fee transfer via coinbase items *) [%log info] "Populating fee transfer via coinbase cache" ; @@ -750,28 +755,28 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = | Error msg -> failwithf "Error querying for user commands with id %d, error %s" id - (Caqti_error.show msg) () ) + (Caqti_error.show msg) ()) in let unsorted_user_cmds = List.concat unsorted_user_cmds_list in (* filter out user commands in blocks not along chain from target state hash *) let filtered_user_cmds = List.filter unsorted_user_cmds ~f:(fun cmd -> - Int.Set.mem block_ids cmd.block_id ) + Int.Set.mem block_ids cmd.block_id) in let sorted_user_cmds = List.sort filtered_user_cmds ~compare:(fun uc1 uc2 -> let tuple (uc : Sql.User_command.t) = (uc.global_slot, uc.sequence_no) in - [%compare: int64 * int] (tuple uc1) (tuple uc2) ) + [%compare: int64 * int] (tuple uc1) (tuple uc2)) in [%log info] "Applying %d user commands and %d internal commands" (List.length sorted_user_cmds) (List.length sorted_internal_cmds) ; (* apply commands in global slot, sequence order *) let rec apply_commands (internal_cmds : Sql.Internal_command.t list) - (user_cmds : Sql.User_command.t list) ~last_global_slot - ~last_block_id ~staking_epoch_ledger ~next_epoch_ledger = + (user_cmds : Sql.User_command.t list) ~last_global_slot ~last_block_id + ~staking_epoch_ledger ~next_epoch_ledger = let%bind staking_epoch_ledger, staking_seed = update_staking_epoch_data ~logger pool ~last_block_id ~ledger ~staking_epoch_ledger @@ -789,7 +794,7 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = [%log info] "Applied all commands at global slot %Ld, got expected ledger \ hash" - ~metadata:[("ledger_hash", json_ledger_hash_of_ledger ledger)] + ~metadata:[ ("ledger_hash", json_ledger_hash_of_ledger ledger) ] last_global_slot else ( [%log error] @@ -798,7 +803,8 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = ~metadata: [ ("ledger_hash", json_ledger_hash_of_ledger ledger) ; ( "expected_ledger_hash" - , Ledger_hash.to_yojson expected_ledger_hash ) ] + , Ledger_hash.to_yojson expected_ledger_hash ) + ] last_global_slot ; if continue_on_error then incr error_count else Core_kernel.exit 1 ) in @@ -808,7 +814,8 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = in [%log info] ~metadata: - [("state_hash", `String (State_hash.to_base58_check state_hash))] + [ ("state_hash", `String (State_hash.to_base58_check state_hash)) + ] "Starting processing of commands in block with state_hash \ $state_hash at global slot %Ld" curr_global_slot @@ -840,8 +847,7 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = | _ -> log_on_slot_change ic.global_slot ; let%bind () = - run_internal_command ~logger ~pool ~ledger ~continue_on_error - ic + run_internal_command ~logger ~pool ~ledger ~continue_on_error ic in apply_commands ics user_cmds ~last_global_slot:ic.global_slot ~last_block_id:ic.block_id ~staking_epoch_ledger @@ -891,13 +897,13 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = in let genesis_block_id = match List.filter unparented_ids ~f:(Int.Set.mem block_ids) with - | [id] -> + | [ id ] -> id | _ -> failwith "Expected only the genesis block to have an unparented id" in [%log info] "At genesis, ledger hash" - ~metadata:[("ledger_hash", json_ledger_hash_of_ledger ledger)] ; + ~metadata:[ ("ledger_hash", json_ledger_hash_of_ledger ledger) ] ; let%bind staking_epoch_ledger, staking_seed, next_epoch_ledger, next_seed = apply_commands sorted_internal_cmds sorted_user_cmds @@ -906,7 +912,7 @@ let main ~input_file ~output_file ~archive_uri ~continue_on_error () = in if Int.equal !error_count 0 then ( [%log info] "Writing output to $output_file" - ~metadata:[("output_file", `String output_file)] ; + ~metadata:[ ("output_file", `String output_file) ] ; let output = create_output ~target_epoch_ledgers_state_hash: diff --git a/src/app/replayer/sql.ml b/src/app/replayer/sql.ml index 96def27d93b..ba82c4cbd8b 100644 --- a/src/app/replayer/sql.ml +++ b/src/app/replayer/sql.ml @@ -4,12 +4,12 @@ open Core_kernel module Block_info = struct type t = - {id: int; global_slot: int64; state_hash: string; ledger_hash: string} + { id : int; global_slot : int64; state_hash : string; ledger_hash : string } [@@deriving hlist] let typ = let open Archive_lib.Processor.Caqti_type_spec in - let spec = Caqti_type.[int; int64; string; string] in + let spec = Caqti_type.[ int; int64; string; string ] in let encode t = Ok (hlist_to_tuple spec (to_hlist t)) in let decode t = Ok (of_hlist (tuple_to_hlist spec t)) in Caqti_type.custom ~encode ~decode (to_rep spec) @@ -97,26 +97,27 @@ end module User_command = struct type t = - { type_: string - ; fee_payer_id: int - ; source_id: int - ; receiver_id: int - ; fee: int64 - ; fee_token: int64 - ; token: int64 - ; amount: int64 option - ; valid_until: int64 option - ; memo: string - ; nonce: int64 - ; block_id: int - ; global_slot: int64 - ; txn_global_slot: int64 - ; sequence_no: int - ; status: string - ; created_token: int64 option - ; fee_payer_balance: int - ; source_balance: int option - ; receiver_balance: int option } + { type_ : string + ; fee_payer_id : int + ; source_id : int + ; receiver_id : int + ; fee : int64 + ; fee_token : int64 + ; token : int64 + ; amount : int64 option + ; valid_until : int64 option + ; memo : string + ; nonce : int64 + ; block_id : int + ; global_slot : int64 + ; txn_global_slot : int64 + ; sequence_no : int + ; status : string + ; created_token : int64 option + ; fee_payer_balance : int + ; source_balance : int option + ; receiver_balance : int option + } [@@deriving hlist] let typ = @@ -142,7 +143,8 @@ module User_command = struct ; option int64 ; int ; option int - ; option int ] + ; option int + ] in let encode t = Ok (hlist_to_tuple spec (to_hlist t)) in let decode t = Ok (of_hlist (tuple_to_hlist spec t)) in @@ -186,22 +188,23 @@ end module Internal_command = struct type t = - { type_: string - ; receiver_id: int - ; receiver_balance: int - ; fee: int64 - ; token: int64 - ; block_id: int - ; global_slot: int64 - ; txn_global_slot: int64 - ; sequence_no: int - ; secondary_sequence_no: int } + { type_ : string + ; receiver_id : int + ; receiver_balance : int + ; fee : int64 + ; token : int64 + ; block_id : int + ; global_slot : int64 + ; txn_global_slot : int64 + ; sequence_no : int + ; secondary_sequence_no : int + } [@@deriving hlist] let typ = let open Archive_lib.Processor.Caqti_type_spec in let spec = - Caqti_type.[string; int; int; int64; int64; int; int64; int64; int; int] + Caqti_type.[ string; int; int; int64; int64; int; int64; int64; int; int ] in let encode t = Ok (hlist_to_tuple spec (to_hlist t)) in let decode t = Ok (of_hlist (tuple_to_hlist spec t)) in @@ -247,12 +250,12 @@ module Public_key = struct end module Epoch_data = struct - type epoch_data = {epoch_ledger_hash: string; epoch_data_seed: string} + type epoch_data = { epoch_ledger_hash : string; epoch_data_seed : string } let epoch_data_typ = let encode t = Ok (t.epoch_ledger_hash, t.epoch_data_seed) in let decode (epoch_ledger_hash, epoch_data_seed) = - Ok {epoch_ledger_hash; epoch_data_seed} + Ok { epoch_ledger_hash; epoch_data_seed } in let rep = Caqti_type.(tup2 string string) in Caqti_type.custom ~encode ~decode rep @@ -293,8 +296,7 @@ module Epoch_data = struct WHERE state_hash = ? |sql} - let get_next_epoch_data_id (module Conn : Caqti_async.CONNECTION) state_hash - = + let get_next_epoch_data_id (module Conn : Caqti_async.CONNECTION) state_hash = Conn.find query_next_epoch_data_id state_hash end diff --git a/src/app/swap_bad_balances/swap_bad_balances.ml b/src/app/swap_bad_balances/swap_bad_balances.ml index 8785d666a3f..adfa03f8dfa 100644 --- a/src/app/swap_bad_balances/swap_bad_balances.ml +++ b/src/app/swap_bad_balances/swap_bad_balances.ml @@ -17,7 +17,7 @@ let main ~archive_uri ~state_hash ~sequence_no () = match Caqti_async.connect_pool archive_uri with | Error e -> [%log fatal] - ~metadata:[("error", `String (Caqti_error.show e))] + ~metadata:[ ("error", `String (Caqti_error.show e)) ] "Failed to create a Caqti connection to Postgresql" ; exit 1 | Ok pool -> @@ -27,7 +27,7 @@ let main ~archive_uri ~state_hash ~sequence_no () = query_db ~f:(fun db -> Sql.Receiver_balances.run_ids_from_fee_transfer db state_hash - sequence_no ) + sequence_no) ~item:"receiver balance ids" in if List.length receiver_balance_ids <> 2 then ( @@ -36,7 +36,7 @@ let main ~archive_uri ~state_hash ~sequence_no () = Core_kernel.exit 1 ) ; let balance_1_id, balance_2_id = match receiver_balance_ids with - | [id1; id2] -> + | [ id1; id2 ] -> (id1, id2) | _ -> failwith "Wrong number of balance ids" @@ -56,12 +56,13 @@ let main ~archive_uri ~state_hash ~sequence_no () = Unsigned.UInt64.of_int64 bal_int64 |> Currency.Balance.of_uint64 |> Currency.Balance.to_yojson in - `Assoc [("public_key_id", `Int pk_id); ("balance", bal_json)] + `Assoc [ ("public_key_id", `Int pk_id); ("balance", bal_json) ] in [%log info] "Found balances to be swapped" ~metadata: [ ("balance_1", balance_to_yojson balance_1) - ; ("balance_2", balance_to_yojson balance_2) ] ; + ; ("balance_2", balance_to_yojson balance_2) + ] ; let balance_1_swapped, balance_2_swapped = match (balance_1, balance_2) with | (pk1, bal1), (pk2, bal2) -> @@ -70,14 +71,14 @@ let main ~archive_uri ~state_hash ~sequence_no () = let%bind new_balance_id_1 = query_db ~f:(fun db -> - Sql.Receiver_balances.add_if_doesn't_exist db balance_1_swapped ) + Sql.Receiver_balances.add_if_doesn't_exist db balance_1_swapped) ~item:"receiver balance 1 swapped" in [%log info] "New balance id for balance 1: %d" new_balance_id_1 ; let%bind new_balance_id_2 = query_db ~f:(fun db -> - Sql.Receiver_balances.add_if_doesn't_exist db balance_2_swapped ) + Sql.Receiver_balances.add_if_doesn't_exist db balance_2_swapped) ~item:"receiver balance 2 swapped" in [%log info] "New balance id for balance 2: %d" new_balance_id_2 ; @@ -86,7 +87,7 @@ let main ~archive_uri ~state_hash ~sequence_no () = query_db ~f:(fun db -> Sql.Receiver_balances.swap_in_new_balance db state_hash sequence_no - balance_1_id new_balance_id_1 ) + balance_1_id new_balance_id_1) ~item:"balance 1 swap" in [%log info] "Swapping in new balance 2" ; @@ -94,7 +95,7 @@ let main ~archive_uri ~state_hash ~sequence_no () = query_db ~f:(fun db -> Sql.Receiver_balances.swap_in_new_balance db state_hash sequence_no - balance_2_id new_balance_id_2 ) + balance_2_id new_balance_id_2) ~item:"balance 2 swap" in Deferred.unit @@ -105,19 +106,19 @@ let () = (let open Let_syntax in async ~summary:"Swap bad balances for combined fee transfers" (let%map archive_uri = - Param.flag "--archive-uri" ~aliases:["archive-uri"] + Param.flag "--archive-uri" ~aliases:[ "archive-uri" ] ~doc: "URI URI for connecting to the archive database (e.g., \ postgres://$USER@localhost:5432/archiver)" Param.(required string) and state_hash = - Param.(flag "--state-hash" ~aliases:["state-hash"]) + Param.(flag "--state-hash" ~aliases:[ "state-hash" ]) ~doc: - "STATE-HASH State hash of the block containing the combined \ - fee transfer" + "STATE-HASH State hash of the block containing the combined fee \ + transfer" Param.(required string) and sequence_no = - Param.(flag "--sequence-no" ~aliases:["sequence-no"]) + Param.(flag "--sequence-no" ~aliases:[ "sequence-no" ]) ~doc:"NN Sequence number of the two fee transfers" Param.(required int) in diff --git a/src/app/test_executive/archive_node_test.ml b/src/app/test_executive/archive_node_test.ml index 0c7134badd6..9d4498ea7f2 100644 --- a/src/app/test_executive/archive_node_test.ml +++ b/src/app/test_executive/archive_node_test.ml @@ -17,15 +17,17 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct let config = let open Test_config in { default with - requires_graphql= true (* a few block producers, where few = 4 *) - ; block_producers= - [ {balance= "4000"; timing= Untimed} - ; {balance= "9000"; timing= Untimed} - ; {balance= "8000"; timing= Untimed} - ; {balance= "17000"; timing= Untimed} ] - ; num_archive_nodes= 1 - ; num_snark_workers= 0 - ; log_precomputed_blocks= true } + requires_graphql = true (* a few block producers, where few = 4 *) + ; block_producers = + [ { balance = "4000"; timing = Untimed } + ; { balance = "9000"; timing = Untimed } + ; { balance = "8000"; timing = Untimed } + ; { balance = "17000"; timing = Untimed } + ] + ; num_archive_nodes = 1 + ; num_snark_workers = 0 + ; log_precomputed_blocks = true + } (* number of minutes to let the network run, after initialization *) let runtime_min = 15. @@ -39,13 +41,11 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct [%log info] "archive node test: waiting for block producers to initialize" ; let%bind () = Malleable_error.List.iter block_producers ~f:(fun bp -> - wait_for t (Wait_condition.node_to_initialize bp) ) + wait_for t (Wait_condition.node_to_initialize bp)) in [%log info] "archive node test: running network for %0.1f minutes" runtime_min ; - let%bind.Async.Deferred () = - Async.after (Time.Span.of_min runtime_min) - in + let%bind.Async.Deferred () = Async.after (Time.Span.of_min runtime_min) in [%log info] "archive node test: done running network" ; let%bind () = Network.Node.dump_archive_data ~logger archive_node @@ -54,7 +54,7 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct [%log info] "archive node test: collecting block logs" ; let%map () = Malleable_error.List.iter block_producers ~f:(fun bp -> - Network.Node.dump_precomputed_blocks ~logger bp ) + Network.Node.dump_precomputed_blocks ~logger bp) in [%log info] "archive node test: succesfully completed" end diff --git a/src/app/test_executive/gossip_consistency.ml b/src/app/test_executive/gossip_consistency.ml index 06b189ce752..c1ddf5169a7 100644 --- a/src/app/test_executive/gossip_consistency.ml +++ b/src/app/test_executive/gossip_consistency.ml @@ -19,11 +19,13 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct let n = 3 in let open Test_config in { default with - requires_graphql= true - ; block_producers= + requires_graphql = true + ; block_producers = List.init n ~f:(fun _ -> - {Block_producer.balance= block_producer_balance; timing= Untimed} - ) } + { Block_producer.balance = block_producer_balance + ; timing = Untimed + }) + } let wait_for_all_to_initialize ~logger network t = let open Malleable_error.Let_syntax in @@ -34,11 +36,11 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct [%log info] "gossip_consistency test: Block producer %d (of %d) initialized" (i + 1) n ; - () ) + ()) |> Malleable_error.all_unit - let send_payments ~logger ~sender_pub_key ~receiver_pub_key ~amount ~fee - ~node n = + let send_payments ~logger ~sender_pub_key ~receiver_pub_key ~amount ~fee ~node + n = let open Malleable_error.Let_syntax in let rec go n = if n = 0 then return () @@ -55,8 +57,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct in go n - let wait_for_payments ~logger ~dsl ~sender_pub_key ~receiver_pub_key ~amount - n = + let wait_for_payments ~logger ~dsl ~sender_pub_key ~receiver_pub_key ~amount n + = let open Malleable_error.Let_syntax in let rec go n = if n = 0 then return () @@ -65,8 +67,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct let%bind () = let%map () = wait_for dsl - (Wait_condition.payment_to_be_included_in_frontier - ~sender_pub_key ~receiver_pub_key ~amount) + (Wait_condition.payment_to_be_included_in_frontier ~sender_pub_key + ~receiver_pub_key ~amount) in [%log info] "gossip_consistency test: payment #%d successfully included in \ @@ -103,8 +105,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct "gossip_consistency test: sending payments done. will now wait for \ payments" ; let%bind () = - wait_for_payments ~logger ~dsl:t ~sender_pub_key ~receiver_pub_key - ~amount num_payments + wait_for_payments ~logger ~dsl:t ~sender_pub_key ~receiver_pub_key ~amount + num_payments in [%log info] "gossip_consistency test: finished waiting for payments" ; let gossip_states = (network_state t).gossip_received in @@ -140,13 +142,13 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct let `Seen_by_all inter, `Seen_by_some union = Gossip_state.stats Transactions_gossip (Map.data (network_state t).gossip_received) - ~exclusion_list:[Network.Node.id sender_bp] + ~exclusion_list:[ Network.Node.id sender_bp ] in [%log info] "gossip_consistency test: inter = %d; union = %d " inter union ; let ratio = if union = 0 then 1. else Float.of_int inter /. Float.of_int union (* Gossip_state.consistency_ratio Transactions_gossip - (Map.data (network_state t).gossip_received) *) + (Map.data (network_state t).gossip_received) *) in [%log info] "gossip_consistency test: consistency ratio = %f" ratio ; let threshold = 0.95 in @@ -155,8 +157,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct let result = Malleable_error.soft_error_string ~value:() (Printf.sprintf - "consistency ratio = %f, which is less than threshold = %f" - ratio threshold) + "consistency ratio = %f, which is less than threshold = %f" ratio + threshold) in [%log error] "gossip_consistency test: TEST FAILURE. consistency ratio = %f, \ diff --git a/src/app/test_executive/payments_test.ml b/src/app/test_executive/payments_test.ml index 25571dd1b7e..fe6a82f6198 100644 --- a/src/app/test_executive/payments_test.ml +++ b/src/app/test_executive/payments_test.ml @@ -26,23 +26,27 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct ~vesting_increment : Mina_base.Account_timing.t = let open Currency in Timed - { initial_minimum_balance= Balance.of_int min_balance - ; cliff_time= Mina_numbers.Global_slot.of_int cliff_time - ; cliff_amount= Amount.of_int cliff_amount - ; vesting_period= Mina_numbers.Global_slot.of_int vesting_period - ; vesting_increment= Amount.of_int vesting_increment } + { initial_minimum_balance = Balance.of_int min_balance + ; cliff_time = Mina_numbers.Global_slot.of_int cliff_time + ; cliff_amount = Amount.of_int cliff_amount + ; vesting_period = Mina_numbers.Global_slot.of_int vesting_period + ; vesting_increment = Amount.of_int vesting_increment + } in { default with - requires_graphql= true - ; block_producers= - [ {balance= "4000"; timing= Untimed} - ; {balance= "3000"; timing= Untimed} - ; { balance= "1000" - ; timing= + requires_graphql = true + ; block_producers = + [ { balance = "4000"; timing = Untimed } + ; { balance = "3000"; timing = Untimed } + ; { balance = "1000" + ; timing = make_timing ~min_balance:100_000_000_000 ~cliff_time:4 ~cliff_amount:0 ~vesting_period:2 - ~vesting_increment:50_000_000_000 } ] - ; num_snark_workers= 0 } + ~vesting_increment:50_000_000_000 + } + ] + ; num_snark_workers = 0 + } let run network t = let open Network in @@ -55,7 +59,7 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct Malleable_error.List.iter block_producer_nodes ~f:(Fn.compose (wait_for t) Wait_condition.node_to_initialize) in - let[@warning "-8"] [untimed_node_a; untimed_node_b; timed_node_a] = + let[@warning "-8"] [ untimed_node_a; untimed_node_b; timed_node_a ] = block_producer_nodes in let%bind () = diff --git a/src/app/test_executive/reliability_test.ml b/src/app/test_executive/reliability_test.ml index 041a572dd2a..d262570880b 100644 --- a/src/app/test_executive/reliability_test.ml +++ b/src/app/test_executive/reliability_test.ml @@ -18,17 +18,19 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct let open Test_config in let open Test_config.Block_producer in { default with - requires_graphql= true - ; block_producers= - [ {balance= "1000"; timing= Untimed} - ; {balance= "1000"; timing= Untimed} - ; {balance= "1000"; timing= Untimed} ] - ; num_snark_workers= 0 } + requires_graphql = true + ; block_producers = + [ { balance = "1000"; timing = Untimed } + ; { balance = "1000"; timing = Untimed } + ; { balance = "1000"; timing = Untimed } + ] + ; num_snark_workers = 0 + } let check_common_prefixes ~number_of_blocks:n ~logger chains = let recent_chains = List.map chains ~f:(fun chain -> - List.take (List.rev chain) n |> Hash_set.of_list (module String) ) + List.take (List.rev chain) n |> Hash_set.of_list (module String)) in let common_prefixes = List.fold ~f:Hash_set.inter @@ -41,8 +43,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct Malleable_error.soft_error ~value:() (Error.of_string (sprintf - "Chains don't have any common prefixes among their most \ - recent %d blocks" + "Chains don't have any common prefixes among their most recent \ + %d blocks" n)) in [%log error] @@ -82,14 +84,14 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct in Malleable_error.ok_if_true (List.mem connected_peers p ~equal:String.equal) - ~error_type:`Hard ~error ) + ~error_type:`Hard ~error) let check_peers ~logger nodes = let open Malleable_error.Let_syntax in let%bind nodes_and_responses = Malleable_error.List.map nodes ~f:(fun node -> let%map response = Network.Node.must_get_peer_id ~logger node in - (node, response) ) + (node, response)) in let nodes_by_peer_id = nodes_and_responses @@ -98,7 +100,7 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct in Malleable_error.List.iter nodes_and_responses ~f:(fun (_, (peer_id, connected_peers)) -> - check_peer_connectivity ~nodes_by_peer_id ~peer_id ~connected_peers ) + check_peer_connectivity ~nodes_by_peer_id ~peer_id ~connected_peers) let run network t = let open Network in @@ -107,12 +109,12 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct (* TEMP: until we fix the seed graphql port, we will only check peers for block producers *) (* let all_nodes = Network.all_nodes network in *) let all_nodes = Network.block_producers network in - let[@warning "-8"] [node_a; node_b; node_c] = + let[@warning "-8"] [ node_a; node_b; node_c ] = Network.block_producers network in (* TODO: let%bind () = wait_for t (Wait_condition.nodes_to_initialize [node_a; node_b; node_c]) in *) let%bind () = - Malleable_error.List.iter [node_a; node_b; node_c] + Malleable_error.List.iter [ node_a; node_b; node_c ] ~f:(Fn.compose (wait_for t) Wait_condition.node_to_initialize) in let%bind () = @@ -130,7 +132,7 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct let%bind () = Node.start ~fresh_state:true node_c in let%bind () = wait_for t (Wait_condition.node_to_initialize node_c) in wait_for t - ( Wait_condition.nodes_to_synchronize [node_a; node_b; node_c] + ( Wait_condition.nodes_to_synchronize [ node_a; node_b; node_c ] |> Wait_condition.with_timeouts ~hard_timeout: (Network_time_span.Literal diff --git a/src/app/test_executive/test_executive.ml b/src/app/test_executive/test_executive.ml index b6f9e03ba00..6469f808d05 100644 --- a/src/app/test_executive/test_executive.ml +++ b/src/app/test_executive/test_executive.ml @@ -10,8 +10,8 @@ type engine = string * (module Intf.Engine.S) module Make_test_inputs (Engine : Intf.Engine.S) () : Intf.Test.Inputs_intf - with type Engine.Network_config.Cli_inputs.t = - Engine.Network_config.Cli_inputs.t = struct + with type Engine.Network_config.Cli_inputs.t = + Engine.Network_config.Cli_inputs.t = struct module Engine = Engine module Dsl = Dsl.Make (Engine) () @@ -25,18 +25,19 @@ type test_inputs_with_cli_inputs = -> test_inputs_with_cli_inputs type inputs = - { test_inputs: test_inputs_with_cli_inputs - ; test: test - ; coda_image: string - ; archive_image: string - ; debug: bool } + { test_inputs : test_inputs_with_cli_inputs + ; test : test + ; coda_image : string + ; archive_image : string + ; debug : bool + } -let validate_inputs {coda_image; _} = +let validate_inputs { coda_image; _ } = if String.is_empty coda_image then failwith "Coda image cannot be an empt string" let engines : engine list = - [("cloud", (module Integration_test_cloud_engine : Intf.Engine.S))] + [ ("cloud", (module Integration_test_cloud_engine : Intf.Engine.S)) ] let tests : test list = [ ("reliability", (module Reliability_test.Make : Intf.Test.Functor_intf)) @@ -107,14 +108,13 @@ let report_test_errors ~log_error_set ~internal_error_set = "=== Log %ss ===\n" log_type ; Error_accumulator.iter_contexts log_errors ~f:(fun node_id log_errors -> color_eprintf Bash_colors.light_magenta " %s:\n" node_id ; - List.iter log_errors ~f:(fun (severity, {error_message; _}) -> + List.iter log_errors ~f:(fun (severity, { error_message; _ }) -> color_eprintf (color_of_severity severity) " [%s] %s\n" (Time.to_string error_message.timestamp) - (Yojson.Safe.to_string (Logger.Message.to_yojson error_message)) - ) ; - Print.eprintf "\n" ) + (Yojson.Safe.to_string (Logger.Message.to_yojson error_message))) ; + Print.eprintf "\n") in (* check invariants *) if List.length log_errors.from_current_context > 0 then @@ -134,20 +134,20 @@ let report_test_errors ~log_error_set ~internal_error_set = print_category_header (max_severity_of_list (List.map errors ~f:fst)) "%s" context ; - List.iter errors ~f:(fun (severity, {occurrence_time; error}) -> + List.iter errors ~f:(fun (severity, { occurrence_time; error }) -> color_eprintf (color_of_severity severity) " [%s] %s\n" (Time.to_string occurrence_time) - (Error.to_string_hum error) ) ) ; + (Error.to_string_hum error))) ; (* report non-contextualized internal errors *) List.iter internal_errors.from_current_context - ~f:(fun (severity, {occurrence_time; error}) -> + ~f:(fun (severity, { occurrence_time; error }) -> color_eprintf (color_of_severity severity) "[%s] %s\n" (Time.to_string occurrence_time) - (Error.to_string_hum error) ) ; + (Error.to_string_hum error)) ; (* determine if test is passed/failed and exit accordingly *) let test_failed = match (log_errors_severity, internal_errors_severity) with @@ -164,8 +164,7 @@ let report_test_errors ~log_error_set ~internal_error_set = "The test has failed. See the above errors for details.\n\n" ; false ) else ( - color_eprintf Bash_colors.green - "The test has completed successfully.\n\n" ; + color_eprintf Bash_colors.green "The test has completed successfully.\n\n" ; true ) in let%bind () = Writer.(flushed (Lazy.force stderr)) in @@ -175,8 +174,8 @@ let report_test_errors ~log_error_set ~internal_error_set = let dispatch_cleanup ~logger ~pause_cleanup_func ~network_cleanup_func ~log_engine_cleanup_func ~lift_accumulated_errors_func ~net_manager_ref - ~log_engine_ref ~network_state_writer_ref ~cleanup_deferred_ref - ~exit_reason ~test_result : unit Deferred.t = + ~log_engine_ref ~network_state_writer_ref ~cleanup_deferred_ref ~exit_reason + ~test_result : unit Deferred.t = let cleanup () : unit Deferred.t = let%bind log_engine_cleanup_result = Option.value_map !log_engine_ref @@ -185,13 +184,11 @@ let dispatch_cleanup ~logger ~pause_cleanup_func ~network_cleanup_func in Option.value_map !network_state_writer_ref ~default:() ~f:Broadcast_pipe.Writer.close ; - let%bind test_error_set = - Malleable_error.lift_error_set_unit test_result - in + let%bind test_error_set = Malleable_error.lift_error_set_unit test_result in let log_error_set = lift_accumulated_errors_func () in let internal_error_set = let open Test_error.Set in - combine [test_error_set; of_hard_or_error log_engine_cleanup_result] + combine [ test_error_set; of_hard_or_error log_engine_cleanup_result ] in let%bind test_was_successful = report_test_errors ~log_error_set ~internal_error_set @@ -206,13 +203,13 @@ let dispatch_cleanup ~logger ~pause_cleanup_func ~network_cleanup_func match !cleanup_deferred_ref with | Some deferred -> [%log error] - "additional call to cleanup testnet while already cleaning up \ - (reason: $reason)" - ~metadata:[("reason", `String exit_reason)] ; + "additional call to cleanup testnet while already cleaning up (reason: \ + $reason)" + ~metadata:[ ("reason", `String exit_reason) ] ; deferred | None -> [%log info] "cleaning up testnet (reason: $reason)" - ~metadata:[("reason", `String exit_reason)] ; + ~metadata:[ ("reason", `String exit_reason) ] ; let deferred = cleanup () in cleanup_deferred_ref := Some deferred ; deferred @@ -225,8 +222,7 @@ let main inputs = let open Test_inputs in let test_name, (module Test) = inputs.test in let (module T) = - (module Test (Test_inputs) - : Intf.Test.S + (module Test (Test_inputs) : Intf.Test.S with type network = Engine.Network.t and type node = Engine.Network.Node.t and type dsl = Dsl.t ) @@ -243,11 +239,12 @@ let main inputs = *) let logger = Logger.create () in let images = - { Test_config.Container_images.coda= inputs.coda_image - ; archive_node= inputs.archive_image - ; user_agent= "codaprotocol/coda-user-agent:0.1.5" - ; bots= "codaprotocol/coda-bots:0.0.13-beta-1" - ; points= "codaprotocol/coda-points-hack:32b.4" } + { Test_config.Container_images.coda = inputs.coda_image + ; archive_node = inputs.archive_image + ; user_agent = "codaprotocol/coda-user-agent:0.1.5" + ; bots = "codaprotocol/coda-bots:0.0.13-beta-1" + ; points = "codaprotocol/coda-points-hack:32b.4" + } in let network_config = Engine.Network_config.expand ~logger ~test_name ~cli_inputs @@ -284,7 +281,7 @@ let main inputs = in don't_wait_for (f_dispatch_cleanup ~exit_reason:"signal received" - ~test_result:(Malleable_error.hard_error error)) ) ; + ~test_result:(Malleable_error.hard_error error))) ; let%bind monitor_test_result = let on_fatal_error message = don't_wait_for @@ -328,7 +325,7 @@ let main inputs = Deferred.bind init_result ~f:Malleable_error.or_hard_error in let%bind () = Engine.Network.initialize ~logger network in - T.run network dsl ) + T.run network dsl) in let exit_reason, test_result = match monitor_test_result with @@ -364,7 +361,7 @@ let coda_image_arg = Arg.( required & opt (some string) None - & info ["coda-image"] ~env ~docv:"CODA_IMAGE" ~doc) + & info [ "coda-image" ] ~env ~docv:"CODA_IMAGE" ~doc) let archive_image_arg = let doc = "Identifier of the archive node docker image to test." in @@ -372,14 +369,14 @@ let archive_image_arg = Arg.( value ( opt string "unused" - & info ["archive-image"] ~env ~docv:"ARCHIVE_IMAGE" ~doc )) + & info [ "archive-image" ] ~env ~docv:"ARCHIVE_IMAGE" ~doc )) let debug_arg = let doc = "Enable debug mode. On failure, the test executive will pause for user \ input before destroying the network it deployed." in - Arg.(value & flag & info ["debug"; "d"] ~doc) + Arg.(value & flag & info [ "debug"; "d" ] ~doc) let help_term = Term.(ret @@ const (`Help (`Plain, None))) @@ -397,7 +394,7 @@ let engine_cmd ((engine_name, (module Engine)) : engine) = in let inputs_term = let cons_inputs test_inputs test coda_image archive_image debug = - {test_inputs; test; coda_image; archive_image; debug} + { test_inputs; test; coda_image; archive_image; debug } in Term.( const cons_inputs $ test_inputs_with_cli_inputs_arg $ test_arg @@ -419,4 +416,4 @@ let default_cmd = (* TODO: move required args to positions instead of flags, or provide reasonable defaults to make them optional *) let () = let engine_cmds = List.map engines ~f:engine_cmd in - Term.(exit @@ eval_choice default_cmd (engine_cmds @ [help_cmd])) + Term.(exit @@ eval_choice default_cmd (engine_cmds @ [ help_cmd ])) diff --git a/src/app/validate_keypair/validate_keypair.ml b/src/app/validate_keypair/validate_keypair.ml index 40d74d5b3e1..40ad87c5b96 100644 --- a/src/app/validate_keypair/validate_keypair.ml +++ b/src/app/validate_keypair/validate_keypair.ml @@ -5,10 +5,10 @@ open Async let () = let is_version_cmd s = - List.mem ["version"; "-version"] s ~equal:String.equal + List.mem [ "version"; "-version" ] s ~equal:String.equal in match Sys.get_argv () with - | [|_generate_keypair_exe; version|] when is_version_cmd version -> + | [| _generate_keypair_exe; version |] when is_version_cmd version -> Mina_version.print_version () | _ -> Command.run Cli_lib.Commands.validate_keypair diff --git a/src/lib/allocation_functor/make.ml b/src/lib/allocation_functor/make.ml index d502bcd3b34..3475e870404 100644 --- a/src/lib/allocation_functor/make.ml +++ b/src/lib/allocation_functor/make.ml @@ -19,9 +19,10 @@ module Partial = struct let bin_writer_t = M.bin_writer_t - let bin_reader_t = {read= bin_read_t; vtag_read= __bin_read_t__} + let bin_reader_t = { read = bin_read_t; vtag_read = __bin_read_t__ } - let bin_t = {shape= bin_shape_t; writer= bin_writer_t; reader= bin_reader_t} + let bin_t = + { shape = bin_shape_t; writer = bin_writer_t; reader = bin_reader_t } end module Sexp (M : Intf.Input.Sexp_intf) : @@ -64,8 +65,8 @@ end module Bin_io_and_sexp (M : Intf.Input.Bin_io_and_sexp_intf) : Intf.Output.Bin_io_and_sexp_intf - with type t = M.t - and type 'a creator := 'a M.creator = struct + with type t = M.t + and type 'a creator := 'a M.creator = struct include Basic (M) include Partial.Bin_io (M) include Partial.Sexp (M) @@ -80,8 +81,8 @@ end module Bin_io_and_yojson (M : Intf.Input.Bin_io_and_yojson_intf) : Intf.Output.Bin_io_and_yojson_intf - with type t = M.t - and type 'a creator := 'a M.creator = struct + with type t = M.t + and type 'a creator := 'a M.creator = struct include Basic (M) include Partial.Bin_io (M) include Partial.Yojson (M) @@ -100,8 +101,8 @@ module Versioned_v1 = struct module Basic_intf (M : Intf.Input.Versioned_v1.Basic_intf) : sig include Intf.Output.Versioned_v1.Basic_intf - with type Stable.V1.t = M.Stable.V1.t - and type 'a Stable.V1.creator = 'a M.Stable.V1.creator + with type Stable.V1.t = M.Stable.V1.t + and type 'a Stable.V1.creator = 'a M.Stable.V1.creator end = struct module Stable = struct module V1 = struct @@ -129,8 +130,8 @@ module Versioned_v1 = struct module Sexp (M : Intf.Input.Versioned_v1.Sexp_intf) : sig include Intf.Output.Versioned_v1.Sexp_intf - with type Stable.V1.t = M.Stable.V1.t - and type 'a Stable.V1.creator = 'a M.Stable.V1.creator + with type Stable.V1.t = M.Stable.V1.t + and type 'a Stable.V1.creator = 'a M.Stable.V1.creator end = struct module Stable = struct module V1 = struct @@ -158,8 +159,8 @@ module Versioned_v1 = struct module Yojson (M : Intf.Input.Versioned_v1.Yojson_intf) : sig include Intf.Output.Versioned_v1.Yojson_intf - with type Stable.V1.t = M.Stable.V1.t - and type 'a Stable.V1.creator = 'a M.Stable.V1.creator + with type Stable.V1.t = M.Stable.V1.t + and type 'a Stable.V1.creator = 'a M.Stable.V1.creator end = struct module Stable = struct module V1 = struct @@ -188,8 +189,8 @@ module Versioned_v1 = struct (M : Intf.Input.Versioned_v1.Full_compare_eq_hash_intf) : sig include Intf.Output.Versioned_v1.Full_compare_eq_hash_intf - with type Stable.V1.t = M.Stable.V1.t - and type 'a Stable.V1.creator = 'a M.Stable.V1.creator + with type Stable.V1.t = M.Stable.V1.t + and type 'a Stable.V1.creator = 'a M.Stable.V1.creator end = struct module Stable = struct module V1 = struct @@ -233,8 +234,8 @@ module Versioned_v1 = struct module Full (M : Intf.Input.Versioned_v1.Full_intf) : sig include Intf.Output.Versioned_v1.Full_intf - with type Stable.V1.t = M.Stable.V1.t - and type 'a Stable.V1.creator = 'a M.Stable.V1.creator + with type Stable.V1.t = M.Stable.V1.t + and type 'a Stable.V1.creator = 'a M.Stable.V1.creator end = struct module Stable = struct module V1 = struct diff --git a/src/lib/allocation_functor/table.ml b/src/lib/allocation_functor/table.ml index bbb167ebc07..0c0ce321ca4 100644 --- a/src/lib/allocation_functor/table.ml +++ b/src/lib/allocation_functor/table.ml @@ -3,19 +3,19 @@ open Core_kernel (** immutable, serializable statistics derived from allocation data *) module Allocation_statistics = struct (* times represented in ms *) - type quartiles = {q1: float; q2: float; q3: float; q4: float} + type quartiles = { q1 : float; q2 : float; q3 : float; q4 : float } [@@deriving yojson] - let make_quartiles n = {q1= n; q2= n; q3= n; q4= n} + let make_quartiles n = { q1 = n; q2 = n; q3 = n; q4 = n } let empty_quartiles = make_quartiles 0.0 - type t = {count: int; lifetimes: quartiles} [@@deriving yojson] + type t = { count : int; lifetimes : quartiles } [@@deriving yojson] - let write_metrics {count; lifetimes} object_id = + let write_metrics { count; lifetimes } object_id = let open Mina_metrics in let open Mina_metrics.Object_lifetime_statistics in - let {q1; q2; q3; q4} = lifetimes in + let { q1; q2; q3; q4 } = lifetimes in let q x = lifetime_quartile_ms ~name:object_id ~quartile:x in Gauge.set (live_count ~name:object_id) (Int.to_float count) ; Gauge.set (q `Q1) q1 ; @@ -33,12 +33,14 @@ module Allocation_data = struct (* indexed queue data structure would be more effecient here, but keeping this simple for now *) type t = - { allocation_times: (allocation_id * Time.t) Queue.t - ; mutable next_allocation_id: allocation_id } + { allocation_times : (allocation_id * Time.t) Queue.t + ; mutable next_allocation_id : allocation_id + } let create () = - { allocation_times= Queue.create () - ; next_allocation_id= initial_allocation_id } + { allocation_times = Queue.create () + ; next_allocation_id = initial_allocation_id + } let register_allocation data = let id = data.next_allocation_id in @@ -50,7 +52,7 @@ module Allocation_data = struct let unregister_allocation data id = Queue.filter_inplace data.allocation_times ~f:(fun (id', _) -> id = id') - let compute_statistics {allocation_times; _} = + let compute_statistics { allocation_times; _ } = let now = Time.now () in let count = Queue.length allocation_times in let lifetime_ms_of_time time = Time.Span.to_ms (Time.diff now time) in @@ -59,7 +61,7 @@ module Allocation_data = struct in let mean_indices max_len = let m = max_len - 1 in - if m mod 2 = 0 then [m / 2] else [m / 2; (m / 2) + 1] + if m mod 2 = 0 then [ m / 2 ] else [ m / 2; (m / 2) + 1 ] in let mean offset length = let indices = @@ -67,7 +69,7 @@ module Allocation_data = struct in let sum = List.fold_left indices ~init:0.0 ~f:(fun acc i -> - acc +. get_lifetime_ms (count - 1 - (i + offset)) ) + acc +. get_lifetime_ms (count - 1 - (i + offset))) in sum /. Int.to_float (List.length indices) in @@ -83,15 +85,15 @@ module Allocation_data = struct let q3_offset = if count mod 2 = 0 then 0 else 1 in let q3 = mean ((count / 2) + q3_offset) (count / 2) in let q4 = get_lifetime_ms 0 in - Allocation_statistics.{q1; q2; q3; q4} + Allocation_statistics.{ q1; q2; q3; q4 } in - Allocation_statistics.{count; lifetimes} + Allocation_statistics.{ count; lifetimes } let compute_statistics t = try compute_statistics t with _ -> Allocation_statistics. - {count= 0; lifetimes= Allocation_statistics.make_quartiles 0.} + { count = 0; lifetimes = Allocation_statistics.make_quartiles 0. } let%test_module "Allocation_data unit tests" = ( module struct @@ -110,11 +112,12 @@ module Allocation_data = struct let now = Time.now () in (* ids do not need to be unique in this test *) let data = - { allocation_times= + { allocation_times = Queue.of_list @@ List.map (List.rev time_offsets) ~f:(fun offset -> - (0, Time.sub now (Time.Span.of_ms offset)) ) - ; next_allocation_id= 0 } + (0, Time.sub now (Time.Span.of_ms offset))) + ; next_allocation_id = 0 + } in let stats = compute_statistics data in [%test_eq: int] stats.count (List.length time_offsets) ; @@ -124,37 +127,37 @@ module Allocation_data = struct [%test_eq: robust_float] stats.lifetimes.q4 expected_quartiles.q4 let%test_unit "quartiles of empty list" = - run_test [] {q1= 0.0; q2= 0.0; q3= 0.0; q4= 0.0} + run_test [] { q1 = 0.0; q2 = 0.0; q3 = 0.0; q4 = 0.0 } let%test_unit "quartiles of singleton list" = - run_test [1.0] {q1= 1.0; q2= 1.0; q3= 1.0; q4= 1.0} + run_test [ 1.0 ] { q1 = 1.0; q2 = 1.0; q3 = 1.0; q4 = 1.0 } let%test_unit "quartiles of 2 element list" = - run_test [1.0; 2.0] {q1= 1.0; q2= 1.5; q3= 2.0; q4= 2.0} + run_test [ 1.0; 2.0 ] { q1 = 1.0; q2 = 1.5; q3 = 2.0; q4 = 2.0 } let%test_unit "quartiles of 3 element list" = - run_test [1.0; 2.0; 3.0] {q1= 1.0; q2= 2.0; q3= 3.0; q4= 3.0} + run_test [ 1.0; 2.0; 3.0 ] { q1 = 1.0; q2 = 2.0; q3 = 3.0; q4 = 3.0 } let%test_unit "quartiles of even list (> 3)" = run_test - [1.0; 2.0; 3.0; 4.0; 5.0; 6.0] - {q1= 2.0; q2= 3.5; q3= 5.0; q4= 6.0} + [ 1.0; 2.0; 3.0; 4.0; 5.0; 6.0 ] + { q1 = 2.0; q2 = 3.5; q3 = 5.0; q4 = 6.0 } let%test_unit "quartiles of odd list with even split (> 3)" = run_test - [1.0; 2.0; 3.0; 4.0; 5.0; 6.0; 7.0] - {q1= 2.0; q2= 4.0; q3= 6.0; q4= 7.0} + [ 1.0; 2.0; 3.0; 4.0; 5.0; 6.0; 7.0 ] + { q1 = 2.0; q2 = 4.0; q3 = 6.0; q4 = 7.0 } let%test_unit "quartiles of odd list with odd split (> 3)" = run_test - [1.0; 2.0; 3.0; 4.0; 5.0; 6.0; 7.0; 8.0; 9.0] - {q1= 2.5; q2= 5.0; q3= 7.5; q4= 9.0} + [ 1.0; 2.0; 3.0; 4.0; 5.0; 6.0; 7.0; 8.0; 9.0 ] + { q1 = 2.5; q2 = 5.0; q3 = 7.5; q4 = 9.0 } end ) end (** correlation of allocation data and derived statistics *) module Allocation_info = struct - type t = {statistics: Allocation_statistics.t; data: Allocation_data.t} + type t = { statistics : Allocation_statistics.t; data : Allocation_data.t } end let table = String.Table.create () @@ -162,7 +165,7 @@ let table = String.Table.create () let capture object_id = let open Allocation_info in let info_opt = String.Table.find table object_id in - let data_opt = Option.map info_opt ~f:(fun {data; _} -> data) in + let data_opt = Option.map info_opt ~f:(fun { data; _ } -> data) in let data = Lazy.( force @@ -172,11 +175,10 @@ let capture object_id = in let allocation_id = Allocation_data.register_allocation data in let statistics = Allocation_data.compute_statistics data in - String.Table.set table ~key:object_id ~data:{data; statistics} ; + String.Table.set table ~key:object_id ~data:{ data; statistics } ; Allocation_statistics.write_metrics statistics object_id ; Mina_metrics.( - Counter.inc_one - (Object_lifetime_statistics.allocated_count ~name:object_id)) ; + Counter.inc_one (Object_lifetime_statistics.allocated_count ~name:object_id)) ; allocation_id (* release is currently O(n), where n = number of active allocations for this object type; this can be improved by implementing indexed queues (with decent random delete computational complexity) in ocaml *) @@ -185,11 +187,10 @@ let release ~object_id ~allocation_id = let info = String.Table.find_exn table object_id in Allocation_data.unregister_allocation info.data allocation_id ; let statistics = Allocation_data.compute_statistics info.data in - String.Table.set table ~key:object_id ~data:{info with statistics} ; + String.Table.set table ~key:object_id ~data:{ info with statistics } ; Allocation_statistics.write_metrics statistics object_id ; Mina_metrics.( - Counter.inc_one - (Object_lifetime_statistics.collected_count ~name:object_id)) + Counter.inc_one (Object_lifetime_statistics.collected_count ~name:object_id)) let attach_finalizer object_id obj = let allocation_id = capture object_id in @@ -200,7 +201,7 @@ let dump () = let open Allocation_info in let entries = String.Table.to_alist table - |> List.Assoc.map ~f:(fun {statistics; _} -> - Allocation_statistics.to_yojson statistics ) + |> List.Assoc.map ~f:(fun { statistics; _ } -> + Allocation_statistics.to_yojson statistics) in `Assoc entries diff --git a/src/lib/base58_check/base58_check.ml b/src/lib/base58_check/base58_check.ml index 481833c123f..738a7e4f912 100644 --- a/src/lib/base58_check/base58_check.ml +++ b/src/lib/base58_check/base58_check.ml @@ -14,8 +14,7 @@ exception Invalid_base58_character of string (* same as Bitcoin alphabet *) let mina_alphabet = - B58.make_alphabet - "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + B58.make_alphabet "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" let version_len = 1 @@ -125,8 +124,8 @@ let%test_module "base58check tests" = let%test "longer_string" = test_roundtrip "Someday, I wish upon a star, wake up where the clouds are far behind \ - me, where trouble melts like lemon drops, High above the chimney \ - top, that's where you'll find me" + me, where trouble melts like lemon drops, High above the chimney top, \ + that's where you'll find me" let%test "invalid checksum" = try diff --git a/src/lib/base58_check/base58_check.mli b/src/lib/base58_check/base58_check.mli index a460ed09848..91548669465 100644 --- a/src/lib/base58_check/base58_check.mli +++ b/src/lib/base58_check/base58_check.mli @@ -28,6 +28,7 @@ end) : sig (** decode Base58Check result into payload *) val decode : string -> string Or_error.t -end [@@warning "-67"] +end +[@@warning "-67"] module Version_bytes : module type of Version_bytes diff --git a/src/lib/best_tip_prover/best_tip_prover.ml b/src/lib/best_tip_prover/best_tip_prover.ml index d054f6badd4..005774f975d 100644 --- a/src/lib/best_tip_prover/best_tip_prover.ml +++ b/src/lib/best_tip_prover/best_tip_prover.ml @@ -10,7 +10,7 @@ end module Make (Inputs : Inputs_intf) : Mina_intf.Best_tip_prover_intf - with type transition_frontier := Inputs.Transition_frontier.t = struct + with type transition_frontier := Inputs.Transition_frontier.t = struct open Inputs module Merkle_list_prover = Merkle_list_prover.Make_ident (struct @@ -41,7 +41,7 @@ module Make (Inputs : Inputs_intf) : let hash acc body_hash = Protocol_state.hash_abstract ~hash_body:Fn.id - {previous_state_hash= acc; body= body_hash} + { previous_state_hash = acc; body = body_hash } end) let prove ~logger frontier = @@ -78,13 +78,14 @@ module Make (Inputs : Inputs_intf) : [%log debug] ~metadata: [ ( "merkle_list" - , `List (List.map ~f:State_body_hash.to_yojson merkle_list) ) ] + , `List (List.map ~f:State_body_hash.to_yojson merkle_list) ) + ] "Best tip prover produced a merkle list of $merkle_list" ; Proof_carrying_data. - { data= best_tip - ; proof= - ( merkle_list - , root |> External_transition.Validation.forget_validation ) } + { data = best_tip + ; proof = + (merkle_list, root |> External_transition.Validation.forget_validation) + } let validate_proof ~verifier transition_with_hash = let open Deferred.Result.Monad_infix in @@ -96,16 +97,15 @@ module Make (Inputs : Inputs_intf) : `This_transition_was_generated_internally |> skip_protocol_versions_validation `This_transition_has_valid_protocol_versions - |> (fun x -> validate_proofs ~verifier [x] >>| List.hd_exn) + |> (fun x -> validate_proofs ~verifier [ x ] >>| List.hd_exn) >>= Fn.compose Deferred.Result.return (skip_delta_transition_chain_validation `This_transition_was_not_received_via_gossip) |> Deferred.map - ~f: - (Result.map_error ~f:(Fn.const (Error.of_string "invalid proof")))) + ~f:(Result.map_error ~f:(Fn.const (Error.of_string "invalid proof")))) let verify ~verifier ~genesis_constants ~precomputed_values - {Proof_carrying_data.data= best_tip; proof= merkle_list, root} = + { Proof_carrying_data.data = best_tip; proof = merkle_list, root } = let open Deferred.Or_error.Let_syntax in let merkle_list_length = List.length merkle_list in let max_length = Transition_frontier.global_max_length genesis_constants in diff --git a/src/lib/bignum_bigint/bignum_bigint.ml b/src/lib/bignum_bigint/bignum_bigint.ml index 3f9566e88c2..e460ac70b33 100644 --- a/src/lib/bignum_bigint/bignum_bigint.ml +++ b/src/lib/bignum_bigint/bignum_bigint.ml @@ -3,11 +3,11 @@ include Bigint let of_bool (b : bool) : t = if b then one else zero -let of_bit_fold_lsb ({fold} : bool Fold_lib.Fold.t) : t = +let of_bit_fold_lsb ({ fold } : bool Fold_lib.Fold.t) : t = fold ~init:(0, zero) ~f:(fun (i, acc) b -> - (Int.(i + 1), bit_or (shift_left (of_bool b) i) acc) ) + (Int.(i + 1), bit_or (shift_left (of_bool b) i) acc)) |> snd let of_bits_lsb : bool list -> t = List.foldi ~init:zero ~f:(fun i acc b -> - bit_or (shift_left (of_bool b) i) acc ) + bit_or (shift_left (of_bool b) i) acc) diff --git a/src/lib/blake2/blake2.ml b/src/lib/blake2/blake2.ml index ff1414d1dda..5cfc0f7d422 100644 --- a/src/lib/blake2/blake2.ml +++ b/src/lib/blake2/blake2.ml @@ -43,8 +43,7 @@ module Make () = struct end end] - [%%define_locally - T1.(to_raw_string, digest_string, to_hex)] + [%%define_locally T1.(to_raw_string, digest_string, to_hex)] (* do not create bin_io serialization *) include Hashable.Make (T1) @@ -69,7 +68,7 @@ module Make () = struct ~f:(fun i -> let c = Char.to_int s.[i / 8] in let j = i mod 8 in - Int.((c lsr j) land 1 = 1) ) + Int.((c lsr j) land 1 = 1)) end include Make () @@ -86,9 +85,9 @@ let%test "serialization test V1" = let%test_unit "bits_to_string" = [%test_eq: string] - (bits_to_string [|true; false|]) - (String.of_char_list [Char.of_int_exn 1]) + (bits_to_string [| true; false |]) + (String.of_char_list [ Char.of_int_exn 1 ]) let%test_unit "string to bits" = Quickcheck.test ~trials:5 String.quickcheck_generator ~f:(fun s -> - [%test_eq: string] s (bits_to_string (string_to_bits s)) ) + [%test_eq: string] s (bits_to_string (string_to_bits s))) diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index 5fb8e8eff54..2b9bc0cc4f0 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -10,7 +10,7 @@ open Otp_lib module Time = Block_time type Structured_log_events.t += Block_produced - [@@deriving register_event {msg= "Successfully produced a new block"}] + [@@deriving register_event { msg = "Successfully produced a new block" }] module Singleton_supervisor : sig type ('data, 'a) t @@ -23,10 +23,11 @@ module Singleton_supervisor : sig val dispatch : ('data, 'a) t -> 'data -> ('a, unit) Interruptible.t end = struct type ('data, 'a) t = - { mutable task: (unit Ivar.t * ('a, unit) Interruptible.t) option - ; f: unit Ivar.t -> 'data -> ('a, unit) Interruptible.t } + { mutable task : (unit Ivar.t * ('a, unit) Interruptible.t) option + ; f : unit Ivar.t -> 'data -> ('a, unit) Interruptible.t + } - let create ~task = {task= None; f= task} + let create ~task = { task = None; f = task } let cancel t = match t.task with @@ -64,7 +65,7 @@ let lift_sync f = (Deferred.create (fun ivar -> if Ivar.is_full ivar then [%log' error (Logger.create ())] "Ivar.fill bug is here!" ; - Ivar.fill ivar (f ()) )) + Ivar.fill ivar (f ()))) module Singleton_scheduler : sig type t @@ -75,10 +76,11 @@ module Singleton_scheduler : sig val schedule : t -> Time.t -> f:(unit -> unit) -> unit end = struct type t = - { mutable timeout: unit Time.Timeout.t option - ; time_controller: Time.Controller.t } + { mutable timeout : unit Time.Timeout.t option + ; time_controller : Time.Controller.t + } - let create time_controller = {time_controller; timeout= None} + let create time_controller = { time_controller; timeout = None } let cancel t = match t.timeout with @@ -104,7 +106,7 @@ end = struct let timeout = Time.Timeout.create t.time_controller wait_span ~f:(fun _ -> t.timeout <- None ; - f () ) + f ()) in t.timeout <- Some timeout end @@ -146,9 +148,9 @@ let generate_next_state ~constraint_constants ~previous_protocol_state ~coinbase_receiver ~logger ~current_state_view:previous_state_view ~transactions_by_fee:transactions ~get_completed_work - ~log_block_creation ~supercharge_coinbase ) + ~log_block_creation ~supercharge_coinbase) |> Result.map_error ~f:(fun err -> - Staged_ledger.Staged_ledger_error.Pre_diff err ) + Staged_ledger.Staged_ledger_error.Pre_diff err) in match (diff, block_reward_threshold) with | Ok d, Some threshold -> @@ -165,8 +167,10 @@ let generate_next_state ~constraint_constants ~previous_protocol_state $threshold, creating empty block" ~metadata: [ ("threshold", Currency.Amount.to_yojson threshold) - ; ("reward", Currency.Amount.to_yojson net_return) ] ; - Ok Staged_ledger_diff.With_valid_signatures_and_proofs.empty_diff ) + ; ("reward", Currency.Amount.to_yojson net_return) + ] ; + Ok Staged_ledger_diff.With_valid_signatures_and_proofs.empty_diff + ) | _ -> diff in @@ -196,7 +200,7 @@ let generate_next_state ~constraint_constants ~previous_protocol_state , pending_coinbase_update ) | Error (Staged_ledger.Staged_ledger_error.Unexpected e) -> [%log error] "Failed to apply the diff: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] ; None | Error e -> ( match diff with @@ -204,18 +208,18 @@ let generate_next_state ~constraint_constants ~previous_protocol_state [%log error] ~metadata: [ ( "error" - , `String (Staged_ledger.Staged_ledger_error.to_string e) - ) + , `String (Staged_ledger.Staged_ledger_error.to_string e) ) ; ( "diff" , Staged_ledger_diff.With_valid_signatures_and_proofs - .to_yojson diff ) ] + .to_yojson diff ) + ] "Error applying the diff $diff: $error" | Error e -> [%log error] "Error building the diff: $error" ~metadata: [ ( "error" - , `String (Staged_ledger.Staged_ledger_error.to_string e) - ) ] ) ; + , `String (Staged_ledger.Staged_ledger_error.to_string e) ) + ] ) ; None) in match res with @@ -236,8 +240,7 @@ let generate_next_state ~constraint_constants ~previous_protocol_state let next_ledger_hash = Option.value_map ledger_proof_opt ~f:(fun (proof, _) -> - Ledger_proof.statement proof |> Ledger_proof.statement_target - ) + Ledger_proof.statement proof |> Ledger_proof.statement_target) ~default:previous_ledger_hash in let snarked_next_available_token = @@ -255,18 +258,18 @@ let generate_next_state ~constraint_constants ~previous_protocol_state let supply_increase = Option.value_map ledger_proof_opt ~f:(fun (proof, _) -> - (Ledger_proof.statement proof).supply_increase ) + (Ledger_proof.statement proof).supply_increase) ~default:Currency.Amount.zero in let blockchain_state = (* We use the time of the beginning of the slot because if things - are slower than expected, we may have entered the next slot and - putting the **current** timestamp rather than the expected one - will screw things up. + are slower than expected, we may have entered the next slot and + putting the **current** timestamp rather than the expected one + will screw things up. - [generate_transition] will log an error if the [current_time] - has a different slot from the [scheduled_time] - *) + [generate_transition] will log an error if the [current_time] + has a different slot from the [scheduled_time] + *) Blockchain_state.create_value ~timestamp:scheduled_time ~snarked_ledger_hash:next_ledger_hash ~genesis_ledger_hash ~snarked_next_available_token @@ -280,9 +283,8 @@ let generate_next_state ~constraint_constants ~previous_protocol_state Consensus_state_hooks.generate_transition ~previous_protocol_state ~blockchain_state ~current_time ~block_data ~supercharge_coinbase - ~snarked_ledger_hash:previous_ledger_hash - ~genesis_ledger_hash ~supply_increase ~logger - ~constraint_constants ) ) + ~snarked_ledger_hash:previous_ledger_hash ~genesis_ledger_hash + ~supply_increase ~logger ~constraint_constants)) in lift_sync (fun () -> measure "making Snark and Internal transitions" (fun () -> @@ -302,20 +304,22 @@ let generate_next_state ~constraint_constants ~previous_protocol_state (Option.map ledger_proof_opt ~f:(fun (proof, _) -> proof)) in let witness = - { Pending_coinbase_witness.pending_coinbases= + { Pending_coinbase_witness.pending_coinbases = Staged_ledger.pending_coinbase_collection staged_ledger - ; is_new_stack } + ; is_new_stack + } in - Some (protocol_state, internal_transition, witness) ) ) + Some (protocol_state, internal_transition, witness))) module Precomputed_block = struct type t = External_transition.Precomputed_block.t = - { scheduled_time: Time.t - ; protocol_state: Protocol_state.value - ; protocol_state_proof: Proof.t - ; staged_ledger_diff: Staged_ledger_diff.t - ; delta_transition_chain_proof: - Frozen_ledger_hash.t * Frozen_ledger_hash.t list } + { scheduled_time : Time.t + ; protocol_state : Protocol_state.value + ; protocol_state_proof : Proof.t + ; staged_ledger_diff : Staged_ledger_diff.t + ; delta_transition_chain_proof : + Frozen_ledger_hash.t * Frozen_ledger_hash.t list + } let sexp_of_t = External_transition.Precomputed_block.sexp_of_t @@ -360,7 +364,8 @@ let handle_block_production_errors ~logger ~rejected_blocks_logger , Internal_transition.to_yojson internal_transition ) ; ( "pending_coinbase_witness" , Pending_coinbase_witness.to_yojson pending_coinbase_witness ) - ; time_metadata ] + ; time_metadata + ] in [%log error] ~metadata msg ; [%log' debug rejected_blocks_logger] ~metadata msg ; @@ -374,15 +379,16 @@ let handle_block_production_errors ~logger ~rejected_blocks_logger in let metadata = [ ("expected", state_yojson previous_protocol_state) - ; ("got", state_yojson protocol_state) ] + ; ("got", state_yojson protocol_state) + ] in [%log warn] ~metadata msg ; [%log' debug rejected_blocks_logger] - ~metadata:([time_metadata; state_metadata] @ metadata) + ~metadata:([ time_metadata; state_metadata ] @ metadata) msg ; return () | Error `Already_in_frontier -> - let metadata = [time_metadata; state_metadata] in + let metadata = [ time_metadata; state_metadata ] in [%log error] ~metadata "%sproduced transition is already in frontier" transition_error_msg_prefix ; [%log' debug rejected_blocks_logger] @@ -390,7 +396,7 @@ let handle_block_production_errors ~logger ~rejected_blocks_logger transition_error_msg_prefix ; return () | Error `Not_selected_over_frontier_root -> - let metadata = [time_metadata; state_metadata] in + let metadata = [ time_metadata; state_metadata ] in [%log warn] ~metadata "%sproduced transition is not selected over the root of transition \ frontier.%s" @@ -402,7 +408,7 @@ let handle_block_production_errors ~logger ~rejected_blocks_logger transition_error_msg_prefix transition_reason_for_failure ; return () | Error `Parent_missing_from_frontier -> - let metadata = [time_metadata; state_metadata] in + let metadata = [ time_metadata; state_metadata ] in [%log warn] ~metadata "%sparent of produced transition is missing from the frontier.%s" transition_error_msg_prefix transition_reason_for_failure ; @@ -418,18 +424,19 @@ let handle_block_production_errors ~logger ~rejected_blocks_logger | Error (`Invalid_staged_ledger_diff (e, staged_ledger_diff)) -> (* Unexpected errors from staged_ledger are captured in `Fatal_error - *) + *) let msg : (_, unit, string, unit) format4 = "Unable to build breadcrumb from produced transition due to invalid \ staged ledger diff: $error" in let metadata = [ ("error", Error_json.error_to_yojson e) - ; ("diff", Staged_ledger_diff.to_yojson staged_ledger_diff) ] + ; ("diff", Staged_ledger_diff.to_yojson staged_ledger_diff) + ] in [%log error] ~metadata msg ; [%log' debug rejected_blocks_logger] - ~metadata:([time_metadata; state_metadata] @ metadata) + ~metadata:([ time_metadata; state_metadata ] @ metadata) msg ; return () @@ -439,14 +446,14 @@ let time ~logger ~time_controller label f = let%map x = f () in let span = Time.diff (Time.now time_controller) t0 in [%log info] - ~metadata:[("time", `Int (Time.Span.to_ms span |> Int64.to_int_exn))] + ~metadata:[ ("time", `Int (Time.Span.to_ms span |> Int64.to_int_exn)) ] !"%s: $time %!" label ; x let run ~logger ~prover ~verifier ~trust_system ~get_completed_work - ~transaction_resource_pool ~time_controller ~keypairs - ~consensus_local_state ~coinbase_receiver ~frontier_reader - ~transition_writer ~set_next_producer_timing ~log_block_creation + ~transaction_resource_pool ~time_controller ~keypairs ~consensus_local_state + ~coinbase_receiver ~frontier_reader ~transition_writer + ~set_next_producer_timing ~log_block_creation ~(precomputed_values : Precomputed_values.t) ~block_reward_threshold = trace "block_producer" (fun () -> let constraint_constants = precomputed_values.constraint_constants in @@ -464,7 +471,8 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work "Generating genesis proof ($attempts_remaining / $max_attempts)" ~metadata: [ ("attempts_remaining", `Int retries) - ; ("max_attempts", `Int max_num_retries) ] ; + ; ("max_attempts", `Int max_num_retries) + ] ; match%bind Prover.create_genesis_block prover (Genesis_proof.to_inputs precomputed_values) @@ -474,7 +482,7 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work return (Ok res) | Error err -> [%log error] "Failed to generate genesis breadcrumb: $error" - ~metadata:[("error", Error_json.error_to_yojson err)] ; + ~metadata:[ ("error", Error_json.error_to_yojson err) ] ; if retries > 0 then go (retries - 1) else ( Ivar.fill genesis_breadcrumb_ivar (Error err) ; @@ -504,7 +512,7 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work let crumb = Transition_frontier.best_tip frontier in let start = Time.now time_controller in [%log info] - ~metadata:[("breadcrumb", Breadcrumb.to_yojson crumb)] + ~metadata:[ ("breadcrumb", Breadcrumb.to_yojson crumb) ] "Producing new block with parent $breadcrumb%!" ; let previous_transition = Breadcrumb.validated_transition crumb @@ -532,7 +540,8 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work [%log error] "Aborting block production: cannot generate a genesis \ proof" ; - Interruptible.lift (Deferred.never ()) (Deferred.return ()) ) + Interruptible.lift (Deferred.never ()) (Deferred.return ()) + ) else return (External_transition.protocol_state_proof @@ -564,15 +573,15 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work -> let protocol_state_hash = Protocol_state.hash protocol_state in let consensus_state_with_hash = - { With_hash.hash= protocol_state_hash - ; data= Protocol_state.consensus_state protocol_state } + { With_hash.hash = protocol_state_hash + ; data = Protocol_state.consensus_state protocol_state + } in Debug_assert.debug_assert (fun () -> - [%test_result: [`Take | `Keep]] + [%test_result: [ `Take | `Keep ]] (Consensus.Hooks.select ~constants:consensus_constants ~existing: - (With_hash.map - ~f:External_transition.consensus_state + (With_hash.map ~f:External_transition.consensus_state previous_transition) ~candidate:consensus_state_with_hash ~logger) ~expect:`Take @@ -583,7 +592,7 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work Transition_frontier.root frontier |> Breadcrumb.consensus_state_with_hash in - [%test_result: [`Take | `Keep]] + [%test_result: [ `Take | `Keep ]] (Consensus.Hooks.select ~existing:root_consensus_state_with_hash ~constants:consensus_constants @@ -591,7 +600,7 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work ~expect:`Take ~message: "newly generated consensus states should be selected \ - over the tf root" ) ; + over the tf root") ; Interruptible.uninterruptible (let open Deferred.Let_syntax in let emit_breadcrumb () = @@ -604,17 +613,16 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work ~prev_state:previous_protocol_state ~prev_state_proof:previous_protocol_state_proof ~next_state:protocol_state internal_transition - pending_coinbase_witness ) + pending_coinbase_witness) |> Deferred.Result.map_error ~f:(fun err -> `Prover_error ( err , ( previous_protocol_state_proof , internal_transition - , pending_coinbase_witness ) ) ) ) + , pending_coinbase_witness ) ))) in let staged_ledger_diff = - Internal_transition.staged_ledger_diff - internal_transition + Internal_transition.staged_ledger_diff internal_transition in let previous_state_hash = Protocol_state.hash previous_protocol_state @@ -629,14 +637,15 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work let%bind transition = let open Result.Let_syntax in External_transition.Validation.wrap - { With_hash.hash= protocol_state_hash - ; data= + { With_hash.hash = protocol_state_hash + ; data = External_transition.create ~protocol_state ~protocol_state_proof ~staged_ledger_diff ~validation_callback: (Mina_net2.Validation_callback .create_without_expiration ()) - ~delta_transition_chain_proof () } + ~delta_transition_chain_proof () + } |> External_transition.skip_time_received_validation `This_transition_was_not_received_via_gossip |> External_transition.skip_protocol_versions_validation @@ -662,11 +671,11 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work let%bind breadcrumb = time ~logger ~time_controller "Build breadcrumb on produced block" (fun () -> - Breadcrumb.build ~logger ~precomputed_values - ~verifier ~trust_system ~parent:crumb ~transition + Breadcrumb.build ~logger ~precomputed_values ~verifier + ~trust_system ~parent:crumb ~transition ~sender:None (* Consider skipping `All here *) ~skip_staged_ledger_verification:`Proofs - ~transition_receipt_time () ) + ~transition_receipt_time ()) |> Deferred.Result.map_error ~f:(function | `Invalid_staged_ledger_diff e -> `Invalid_staged_ledger_diff @@ -677,14 +686,15 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work | `Not_selected_over_frontier_root | `Parent_missing_from_frontier | `Prover_error _ ) as err -> - err ) + err) in [%str_log info] ~metadata: - [("breadcrumb", Breadcrumb.to_yojson breadcrumb)] + [ ("breadcrumb", Breadcrumb.to_yojson breadcrumb) ] Block_produced ; let metadata = - [("state_hash", State_hash.to_yojson protocol_state_hash)] + [ ("state_hash", State_hash.to_yojson protocol_state_hash) + ] in Mina_metrics.( Counter.inc_one Block_producer.blocks_produced) ; @@ -702,18 +712,19 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work ; Deferred.choice ( Time.Timeout.create time_controller (* We allow up to 20 seconds for the transition - to make its way from the transition_writer to - the frontier. - This value is chosen to be reasonably - generous. In theory, this should not take - terribly long. But long cycles do happen in - our system, and with medium curves those long - cycles can be substantial. - *) + to make its way from the transition_writer to + the frontier. + This value is chosen to be reasonably + generous. In theory, this should not take + terribly long. But long cycles do happen in + our system, and with medium curves those long + cycles can be substantial. + *) (Time.Span.of_ms 20000L) ~f:(Fn.const ()) |> Time.Timeout.to_deferred ) - (Fn.const (Ok `Timed_out)) ] + (Fn.const (Ok `Timed_out)) + ] >>= function | `Transition_accepted -> [%log info] ~metadata @@ -722,8 +733,8 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work return () | `Timed_out -> (* FIXME #3167: this should be fatal, and more - importantly, shouldn't happen. - *) + importantly, shouldn't happen. + *) let msg : (_, unit, string, unit) format4 = "Timed out waiting for generated transition \ $state_hash to enter transition frontier. \ @@ -731,13 +742,10 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work mean your CPU is overloaded. Consider disabling \ `-run-snark-worker` if it's configured." in - let span = - Time.diff (Time.now time_controller) start - in + let span = Time.diff (Time.now time_controller) start in let metadata = [ ( "time" - , `Int (Time.Span.to_ms span |> Int64.to_int_exn) - ) + , `Int (Time.Span.to_ms span |> Int64.to_int_exn) ) ; ( "protocol_state" , Protocol_state.Value.to_yojson protocol_state ) ] @@ -749,9 +757,9 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work in let%bind res = emit_breadcrumb () in let span = Time.diff (Time.now time_controller) start in - handle_block_production_errors ~logger - ~rejected_blocks_logger ~time_taken:span - ~previous_protocol_state ~protocol_state res) ) + handle_block_production_errors ~logger ~rejected_blocks_logger + ~time_taken:span ~previous_protocol_state ~protocol_state + res) ) in let production_supervisor = Singleton_supervisor.create ~task:produce in let scheduler = Singleton_scheduler.create time_controller in @@ -762,7 +770,7 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work match Agent.get keypairs with | keypairs, `Different -> (* Perform block production key swap since we have new - keypairs *) + keypairs *) Consensus.Data.Local_state.block_production_keys_swap ~constants:consensus_constants consensus_local_state ( Keypair.And_compressed_pk.Set.to_list keypairs @@ -817,10 +825,9 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work ~constraint_constants ~constants:consensus_constants (time_to_ms now) consensus_state ~local_state:consensus_local_state ~keypairs - ~coinbase_receiver:!coinbase_receiver ~logger ) + ~coinbase_receiver:!coinbase_receiver ~logger) in - set_next_producer_timing next_producer_timing - consensus_state ; + set_next_producer_timing next_producer_timing consensus_state ; match next_producer_timing with | `Check_again time -> Singleton_scheduler.schedule scheduler (time_of_ms time) @@ -840,13 +847,13 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work let scheduled_time = time_of_ms time in don't_wait_for ((* Attempt to generate a genesis proof in the slot - immediately before we'll actually need it, so that - it isn't limiting our block production time in the - won slot. - This also allows non-genesis blocks to be received - in the meantime and alleviate the need to produce - one at all, if this won't have block height 1. - *) + immediately before we'll actually need it, so that + it isn't limiting our block production time in the + won slot. + This also allows non-genesis blocks to be received + in the meantime and alleviate the need to produce + one at all, if this won't have block height 1. + *) let scheduled_genesis_time = time_of_ms Int64.( @@ -870,8 +877,8 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work production_supervisor (scheduled_time, data, winner_pk)) ~f:check_next_block_timing - : (unit, unit) Interruptible.t ) ) ; - Deferred.return ()) ) + : (unit, unit) Interruptible.t )) ; + Deferred.return ())) in let start () = (* Schedule to wake up immediately on the next tick of the producer loop @@ -884,7 +891,7 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work * *) Agent.on_update keypairs ~f:(fun _new_keypairs -> Singleton_scheduler.schedule scheduler (Time.now time_controller) - ~f:check_next_block_timing ) ; + ~f:check_next_block_timing) ; check_next_block_timing () in let genesis_state_timestamp = @@ -904,8 +911,8 @@ let run ~logger ~prover ~verifier ~trust_system ~get_completed_work milliseconds before starting block producer" ; ignore ( Time.Timeout.create time_controller time_till_genesis ~f:(fun _ -> - start () ) - : unit Time.Timeout.t ) ) + start ()) + : unit Time.Timeout.t )) let run_precomputed ~logger ~verifier ~trust_system ~time_controller ~frontier_reader ~transition_writer ~precomputed_blocks @@ -920,15 +927,17 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller let start = Time.now time_controller in let module Breadcrumb = Transition_frontier.Breadcrumb in let produce - { Precomputed_block.scheduled_time= _ + { Precomputed_block.scheduled_time = _ ; protocol_state ; protocol_state_proof ; staged_ledger_diff - ; delta_transition_chain_proof } = + ; delta_transition_chain_proof + } = let protocol_state_hash = Protocol_state.hash protocol_state in let consensus_state_with_hash = - { With_hash.hash= protocol_state_hash - ; data= Protocol_state.consensus_state protocol_state } + { With_hash.hash = protocol_state_hash + ; data = Protocol_state.consensus_state protocol_state + } in match Broadcast_pipe.Reader.peek frontier_reader with | None -> @@ -942,7 +951,7 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller in let crumb = Transition_frontier.best_tip frontier in [%log trace] - ~metadata:[("breadcrumb", Breadcrumb.to_yojson crumb)] + ~metadata:[ ("breadcrumb", Breadcrumb.to_yojson crumb) ] "Emitting precomputed block with parent $breadcrumb%!" ; let previous_transition = Breadcrumb.validated_transition crumb @@ -953,7 +962,7 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller (With_hash.data previous_transition) in Debug_assert.debug_assert (fun () -> - [%test_result: [`Take | `Keep]] + [%test_result: [ `Take | `Keep ]] (Consensus.Hooks.select ~constants:consensus_constants ~existing: (With_hash.map ~f:External_transition.consensus_state @@ -967,14 +976,14 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller Transition_frontier.root frontier |> Breadcrumb.consensus_state_with_hash in - [%test_result: [`Take | `Keep]] + [%test_result: [ `Take | `Keep ]] (Consensus.Hooks.select ~existing:root_consensus_state_with_hash ~constants:consensus_constants ~candidate:consensus_state_with_hash ~logger) ~expect:`Take ~message: "newly generated consensus states should be selected over the \ - tf root" ) ; + tf root") ; let emit_breadcrumb () = let open Deferred.Result.Let_syntax in let previous_protocol_state_hash = @@ -983,14 +992,15 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller let%bind transition = let open Result.Let_syntax in External_transition.Validation.wrap - { With_hash.hash= protocol_state_hash - ; data= + { With_hash.hash = protocol_state_hash + ; data = External_transition.create ~protocol_state ~protocol_state_proof ~staged_ledger_diff ~validation_callback: (Mina_net2.Validation_callback.create_without_expiration ()) - ~delta_transition_chain_proof () } + ~delta_transition_chain_proof () + } |> External_transition.skip_time_received_validation `This_transition_was_not_received_via_gossip |> External_transition.skip_protocol_versions_validation @@ -1024,13 +1034,13 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller | `Invalid_staged_ledger_hash _ | `Not_selected_over_frontier_root | `Parent_missing_from_frontier ) as err -> - err ) ) + err)) in [%str_log trace] - ~metadata:[("breadcrumb", Breadcrumb.to_yojson breadcrumb)] + ~metadata:[ ("breadcrumb", Breadcrumb.to_yojson breadcrumb) ] Block_produced ; let metadata = - [("state_hash", State_hash.to_yojson protocol_state_hash)] + [ ("state_hash", State_hash.to_yojson protocol_state_hash) ] in Mina_metrics.(Counter.inc_one Block_producer.blocks_produced) ; let%bind.Async.Deferred () = @@ -1047,17 +1057,18 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller ( Time.Timeout.create time_controller (Time.Span.of_ms 20000L) ~f:(Fn.const ()) |> Time.Timeout.to_deferred ) - (Fn.const (Ok `Timed_out)) ] + (Fn.const (Ok `Timed_out)) + ] >>= function | `Transition_accepted -> [%log info] ~metadata - "Generated transition $state_hash was accepted into \ - transition frontier" ; + "Generated transition $state_hash was accepted into transition \ + frontier" ; return () | `Timed_out -> (* FIXME #3167: this should be fatal, and more importantly, - shouldn't happen. - *) + shouldn't happen. + *) [%log fatal] ~metadata "Timed out waiting for generated transition $state_hash to \ enter transition frontier. Continuing to produce new blocks \ @@ -1081,27 +1092,28 @@ let run_precomputed ~logger ~verifier ~trust_system ~time_controller in emit_next_block precomputed_blocks | Some _transition_frontier -> ( - match Sequence.next precomputed_blocks with - | Some (precomputed_block, precomputed_blocks) -> - let new_time_offset = - Core_kernel.Time.diff (Core_kernel.Time.now ()) - (Block_time.to_time - precomputed_block.Precomputed_block.scheduled_time) - in - [%log info] - "Changing time offset from $old_time_offset to $new_time_offset" - ~metadata: - [ ( "old_time_offset" - , `String - (Core_kernel.Time.Span.to_string_hum - (Block_time.Controller.get_time_offset ~logger)) ) - ; ( "new_time_offset" - , `String (Core_kernel.Time.Span.to_string_hum new_time_offset) - ) ] ; - Block_time.Controller.set_time_offset new_time_offset ; - let%bind () = produce precomputed_block in - emit_next_block precomputed_blocks - | None -> - return () ) + match Sequence.next precomputed_blocks with + | Some (precomputed_block, precomputed_blocks) -> + let new_time_offset = + Core_kernel.Time.diff (Core_kernel.Time.now ()) + (Block_time.to_time + precomputed_block.Precomputed_block.scheduled_time) + in + [%log info] + "Changing time offset from $old_time_offset to $new_time_offset" + ~metadata: + [ ( "old_time_offset" + , `String + (Core_kernel.Time.Span.to_string_hum + (Block_time.Controller.get_time_offset ~logger)) ) + ; ( "new_time_offset" + , `String + (Core_kernel.Time.Span.to_string_hum new_time_offset) ) + ] ; + Block_time.Controller.set_time_offset new_time_offset ; + let%bind () = produce precomputed_block in + emit_next_block precomputed_blocks + | None -> + return () ) in emit_next_block precomputed_blocks diff --git a/src/lib/block_time/block_time.ml b/src/lib/block_time/block_time.ml index 683f474e713..e6b2c4988bf 100644 --- a/src/lib/block_time/block_time.ml +++ b/src/lib/block_time/block_time.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Snark_params @@ -32,8 +31,7 @@ module Time = struct let zero = UInt64.zero module Controller = struct - [%%if - time_offsets] + [%%if time_offsets] type t = unit -> Time.Span.t [@@deriving sexp] @@ -194,8 +192,7 @@ module Time = struct Time.of_span_since_epoch (Time.Span.of_ms (Int64.to_float (UInt64.to_int64 t))) - [%%if - time_offsets] + [%%if time_offsets] let now offset = of_time (Time.sub (Time.now ()) (offset ())) @@ -236,8 +233,7 @@ module Time = struct let to_string = Fn.compose Int64.to_string to_int64 - [%%if - time_offsets] + [%%if time_offsets] let to_string_system_time (offset : Controller.t) (t : t) : string = let t2 : t = diff --git a/src/lib/block_time/block_time.mli b/src/lib/block_time/block_time.mli index 0748dd22f12..585d41b5c1c 100644 --- a/src/lib/block_time/block_time.mli +++ b/src/lib/block_time/block_time.mli @@ -61,9 +61,9 @@ module Time : sig include Tick.Snarkable.Bits.Faithful - with type Unpacked.value = t - and type Packed.value = t - and type Packed.var = private Tick.Field.Var.t + with type Unpacked.value = t + and type Packed.value = t + and type Packed.var = private Tick.Field.Var.t module Checked : sig open Snark_params.Tick @@ -99,8 +99,8 @@ module Time : sig include Tick.Snarkable.Bits.Faithful - with type Unpacked.value = t - and type Packed.value = t + with type Unpacked.value = t + and type Packed.value = t val to_time_ns_span : t -> Core.Time_ns.Span.t @@ -175,29 +175,28 @@ end include module type of Time with type t = Time.t -module Timeout : - sig - type 'a t +module Timeout : sig + type 'a t - type time + type time - val create : Controller.t -> Span.t -> f:(time -> 'a) -> 'a t + val create : Controller.t -> Span.t -> f:(time -> 'a) -> 'a t - val to_deferred : 'a t -> 'a Async_kernel.Deferred.t + val to_deferred : 'a t -> 'a Async_kernel.Deferred.t - val peek : 'a t -> 'a option + val peek : 'a t -> 'a option - val cancel : Controller.t -> 'a t -> 'a -> unit + val cancel : Controller.t -> 'a t -> 'a -> unit - val remaining_time : 'a t -> Span.t + val remaining_time : 'a t -> Span.t - val await : - timeout_duration:Span.t - -> Controller.t - -> 'a Deferred.t - -> [`Ok of 'a | `Timeout] Deferred.t + val await : + timeout_duration:Span.t + -> Controller.t + -> 'a Deferred.t + -> [ `Ok of 'a | `Timeout ] Deferred.t - val await_exn : - timeout_duration:Span.t -> Controller.t -> 'a Deferred.t -> 'a Deferred.t - end - with type time := t + val await_exn : + timeout_duration:Span.t -> Controller.t -> 'a Deferred.t -> 'a Deferred.t +end +with type time := t diff --git a/src/lib/blockchain_snark/blockchain.ml b/src/lib/blockchain_snark/blockchain.ml index 95bd548e429..6309df61991 100644 --- a/src/lib/blockchain_snark/blockchain.ml +++ b/src/lib/blockchain_snark/blockchain.ml @@ -10,7 +10,7 @@ module Raw_versioned__ = struct module Stable = struct module V1 = struct type t = - {state: Protocol_state.Value.Stable.V1.t; proof: Proof.Stable.V1.t} + { state : Protocol_state.Value.Stable.V1.t; proof : Proof.Stable.V1.t } [@@deriving fields, sexp, yojson] let to_latest = Fn.id @@ -19,15 +19,14 @@ module Raw_versioned__ = struct let map_creator c ~f ~state ~proof = f (c ~state ~proof) - let create ~state ~proof = {state; proof} + let create ~state ~proof = { state; proof } end end] end include Allocation_functor.Make.Versioned_v1.Full (Raw_versioned__) -[%%define_locally -Raw_versioned__.(state, proof)] +[%%define_locally Raw_versioned__.(state, proof)] include ( Stable.Latest : @@ -36,5 +35,4 @@ include ( end with type t := t ) -[%%define_locally -Stable.Latest.(create)] +[%%define_locally Stable.Latest.(create)] diff --git a/src/lib/blockchain_snark/blockchain_snark_state.ml b/src/lib/blockchain_snark/blockchain_snark_state.ml index 27254350db2..0e50130bda1 100644 --- a/src/lib/blockchain_snark/blockchain_snark_state.ml +++ b/src/lib/blockchain_snark/blockchain_snark_state.ml @@ -15,12 +15,14 @@ end module Witness = struct type t = - {prev_state: Protocol_state.Value.t; transition: Snark_transition.Value.t} + { prev_state : Protocol_state.Value.t + ; transition : Snark_transition.Value.t + } end -let blockchain_handler on_unhandled {Witness.prev_state; transition} = +let blockchain_handler on_unhandled { Witness.prev_state; transition } = let open Snarky_backendless.Request in - fun (With {request; respond} as r) -> + fun (With { request; respond } as r) -> let k x = respond (Provide x) in match request with | Prev_state -> @@ -34,8 +36,8 @@ let wrap_handler h w = match h with | None -> blockchain_handler - (fun (Snarky_backendless.Request.With {respond; _}) -> - respond Unhandled ) + (fun (Snarky_backendless.Request.With { respond; _ }) -> + respond Unhandled) w | Some h -> (* TODO: Clean up the handler composition interface. *) @@ -70,8 +72,8 @@ let%snarkydef step ~(logger : Logger.t) ~(constraint_constants : Genesis_constants.Constraint_constants.t) Hlist.HlistId. [ previous_state_hash - ; (txn_snark : Transaction_snark.Statement.With_sok.Checked.t) ] - new_state_hash : (_, _) Tick.Checked.t = + ; (txn_snark : Transaction_snark.Statement.With_sok.Checked.t) + ] new_state_hash : (_, _) Tick.Checked.t = let%bind transition = with_label __LOC__ (exists Snark_transition.typ ~request:(As_prover.return Transition)) @@ -121,14 +123,14 @@ let%snarkydef step ~(logger : Logger.t) in let%bind previous_state_hash = match constraint_constants.fork with - | Some {previous_state_hash= fork_prev; _} -> + | Some { previous_state_hash = fork_prev; _ } -> State_hash.if_ is_base_case ~then_:(State_hash.var_of_t fork_prev) ~else_:t.previous_state_hash | None -> Checked.return t.previous_state_hash in - let t = {t with previous_state_hash} in + let t = { t with previous_state_hash } in let%map () = let%bind h, _ = Protocol_state.hash_checked t in with_label __LOC__ (State_hash.assert_equal h new_state_hash) @@ -193,7 +195,8 @@ let%snarkydef step ~(logger : Logger.t) |> Blockchain_state.snarked_next_available_token ) ; Token_id.Checked.equal txn_snark.next_available_token_after ( transition |> Snark_transition.blockchain_state - |> Blockchain_state.snarked_next_available_token ) ] + |> Blockchain_state.snarked_next_available_token ) + ] >>= Boolean.all in let%bind nothing_changed = @@ -205,7 +208,8 @@ let%snarkydef step ~(logger : Logger.t) [ ledger_hash_didn't_change ; supply_increase_is_zero ; no_coinbases_popped - ; next_available_token_didn't_change ] + ; next_available_token_didn't_change + ] in let%bind correct_coinbase_status = let new_root = @@ -216,11 +220,11 @@ let%snarkydef step ~(logger : Logger.t) Pending_coinbase.Hash.equal_var new_pending_coinbase_hash new_root in let%bind () = - Boolean.Assert.any [txn_snark_input_correct; nothing_changed] + Boolean.Assert.any [ txn_snark_input_correct; nothing_changed ] in let transaction_snark_should_verifiy = Boolean.not nothing_changed in let%bind result = - Boolean.all [updated_consensus_state; correct_coinbase_status] + Boolean.all [ updated_consensus_state; correct_coinbase_status ] in let%map () = as_prover @@ -249,7 +253,8 @@ let%snarkydef step ~(logger : Logger.t) ; ("updated_consensus_state", `Bool updated_consensus_state) ; ("correct_coinbase_status", `Bool correct_coinbase_status) ; ("result", `Bool result) - ; ("no_coinbases_popped", `Bool no_coinbases_popped) ])) + ; ("no_coinbases_popped", `Bool no_coinbases_popped) + ])) in (transaction_snark_should_verifiy, result) in @@ -267,11 +272,11 @@ let%snarkydef step ~(logger : Logger.t) | Full -> Boolean.not is_base_case in - let%map () = Boolean.Assert.any [is_base_case; success] in + let%map () = Boolean.Assert.any [ is_base_case; success ] in (prev_should_verify, txn_snark_should_verify) -let check w ?handler ~proof_level ~constraint_constants txn_snark - new_state_hash : unit Or_error.t = +let check w ?handler ~proof_level ~constraint_constants txn_snark new_state_hash + : unit Or_error.t = let open Tick in check (Fn.flip handle (wrap_handler handler w) @@ -285,23 +290,23 @@ let check w ?handler ~proof_level ~constraint_constants txn_snark ~compute:(As_prover.return txn_snark) in step ~proof_level ~constraint_constants ~logger:(Logger.create ()) - [prev; txn_snark] curr)) + [ prev; txn_snark ] curr)) () let rule ~proof_level ~constraint_constants transaction_snark self : _ Pickles.Inductive_rule.t = - { identifier= "step" - ; prevs= [self; transaction_snark] - ; main= - (fun [x1; x2] x -> + { identifier = "step" + ; prevs = [ self; transaction_snark ] + ; main = + (fun [ x1; x2 ] x -> let b1, b2 = Run.run_checked (step ~proof_level ~constraint_constants ~logger:(Logger.create ()) - [x1; x2] x) + [ x1; x2 ] x) in - [b1; b2] ) - ; main_value= - (fun [prev; (txn : Transaction_snark.Statement.With_sok.t)] curr -> + [ b1; b2 ]) + ; main_value = + (fun [ prev; (txn : Transaction_snark.Statement.With_sok.t) ] curr -> let lh t = Protocol_state.blockchain_state t |> Blockchain_state.snarked_ledger_hash @@ -315,24 +320,28 @@ let rule ~proof_level ~constraint_constants transaction_snark self : txn.Transaction_snark.Statement.supply_increase ; Pending_coinbase.Stack.equal txn.pending_coinbase_stack_state.source - txn.pending_coinbase_stack_state.target ] - |> not ] ) } + txn.pending_coinbase_stack_state.target + ] + |> not + ]) + } module Statement = struct type t = Protocol_state.Value.t - let to_field_elements (t : t) : Tick.Field.t array = [|Protocol_state.hash t|] + let to_field_elements (t : t) : Tick.Field.t array = + [| Protocol_state.hash t |] end module Statement_var = struct type t = State_hash.var - let to_field_elements (t : t) = [|State_hash.var_to_hash_packed t|] + let to_field_elements (t : t) = [| State_hash.var_to_hash_packed t |] end let typ = Typ.transport State_hash.typ ~there:Protocol_state.hash ~back:(fun _ -> - failwith "cannot unhash" ) + failwith "cannot unhash") type tag = (State_hash.var, Protocol_state.value, Nat.N2.n, Nat.N1.n) Pickles.Tag.t @@ -340,8 +349,8 @@ type tag = module type S = sig module Proof : Pickles.Proof_intf - with type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t - and type statement = Protocol_state.Value.t + with type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t + and type statement = Protocol_state.Value.t val tag : tag @@ -351,8 +360,7 @@ module type S = sig val step : Witness.t - -> ( Protocol_state.Value.t - * (Transaction_snark.Statement.With_sok.t * unit) + -> ( Protocol_state.Value.t * (Transaction_snark.Statement.With_sok.t * unit) , N2.n * (N2.n * unit) , N1.n * (N2.n * unit) , Protocol_state.Value.t @@ -374,11 +382,12 @@ let constraint_system_digests ~proof_level ~constraint_constants () = let%bind x2 = exists Transaction_snark.Statement.With_sok.typ in let%map _ = step ~proof_level ~constraint_constants ~logger:(Logger.create ()) - [x1; x2] x + [ x1; x2 ] x in () in - Tick.constraint_system ~exposing:[Mina_base.State_hash.typ] main) ) ] + Tick.constraint_system ~exposing:[ Mina_base.State_hash.typ ] main) ) + ] module Make (T : sig val tag : Transaction_snark.tag @@ -389,7 +398,7 @@ module Make (T : sig end) : S = struct open T - let tag, cache_handle, p, Pickles.Provers.[step] = + let tag, cache_handle, p, Pickles.Provers.[ step ] = Pickles.compile ~cache:Cache_dir.cache (module Statement_var) (module Statement) @@ -401,7 +410,7 @@ end) : S = struct (Genesis_constants.Constraint_constants.to_snark_keys_header constraint_constants) ~choices:(fun ~self -> - [rule ~proof_level ~constraint_constants T.tag self] ) + [ rule ~proof_level ~constraint_constants T.tag self ]) let step = with_handler step diff --git a/src/lib/blockchain_snark/blockchain_snark_state.mli b/src/lib/blockchain_snark/blockchain_snark_state.mli index 38e2a3a6f3d..67b9a220d83 100644 --- a/src/lib/blockchain_snark/blockchain_snark_state.mli +++ b/src/lib/blockchain_snark/blockchain_snark_state.mli @@ -5,7 +5,9 @@ open Pickles_types module Witness : sig type t = - {prev_state: Protocol_state.Value.t; transition: Snark_transition.Value.t} + { prev_state : Protocol_state.Value.t + ; transition : Snark_transition.Value.t + } end type tag = @@ -18,8 +20,9 @@ val verify : val check : Witness.t - -> ?handler:( Snarky_backendless.Request.request - -> Snarky_backendless.Request.response) + -> ?handler: + ( Snarky_backendless.Request.request + -> Snarky_backendless.Request.response) -> proof_level:Genesis_constants.Proof_level.t -> constraint_constants:Genesis_constants.Constraint_constants.t -> Transaction_snark.Statement.With_sok.t @@ -29,8 +32,8 @@ val check : module type S = sig module Proof : Pickles.Proof_intf - with type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t - and type statement = Protocol_state.Value.t + with type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t + and type statement = Protocol_state.Value.t val tag : tag @@ -40,8 +43,7 @@ module type S = sig val step : Witness.t - -> ( Protocol_state.Value.t - * (Transaction_snark.Statement.With_sok.t * unit) + -> ( Protocol_state.Value.t * (Transaction_snark.Statement.With_sok.t * unit) , N2.n * (N2.n * unit) , N1.n * (N2.n * unit) , Protocol_state.Value.t @@ -57,7 +59,8 @@ module Make (T : sig val constraint_constants : Genesis_constants.Constraint_constants.t val proof_level : Genesis_constants.Proof_level.t -end) : S [@@warning "-67"] +end) : S +[@@warning "-67"] val constraint_system_digests : proof_level:Genesis_constants.Proof_level.t diff --git a/src/lib/bootstrap_controller/bootstrap_controller.ml b/src/lib/bootstrap_controller/bootstrap_controller.ml index 4f876f1122e..1d1205bf78c 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.ml +++ b/src/lib/bootstrap_controller/bootstrap_controller.ml @@ -9,17 +9,18 @@ open Mina_transition open Network_peer type Structured_log_events.t += Bootstrap_complete - [@@deriving register_event {msg= "Bootstrap state: complete."}] + [@@deriving register_event { msg = "Bootstrap state: complete." }] type t = - { logger: Logger.t - ; trust_system: Trust_system.t - ; consensus_constants: Consensus.Constants.t - ; verifier: Verifier.t - ; precomputed_values: Precomputed_values.t - ; mutable best_seen_transition: External_transition.Initial_validated.t - ; mutable current_root: External_transition.Initial_validated.t - ; network: Mina_networking.t } + { logger : Logger.t + ; trust_system : Trust_system.t + ; consensus_constants : Consensus.Constants.t + ; verifier : Verifier.t + ; precomputed_values : Precomputed_values.t + ; mutable best_seen_transition : External_transition.Initial_validated.t + ; mutable current_root : External_transition.Initial_validated.t + ; network : Mina_networking.t + } type time = Time.Span.t @@ -35,12 +36,13 @@ let opt_time_to_yojson = function `Null type bootstrap_cycle_stats = - { cycle_result: string - ; sync_ledger_time: time - ; staged_ledger_data_download_time: time - ; staged_ledger_construction_time: opt_time - ; local_state_sync_required: bool - ; local_state_sync_time: opt_time } + { cycle_result : string + ; sync_ledger_time : time + ; staged_ledger_data_download_time : time + ; staged_ledger_construction_time : opt_time + ; local_state_sync_required : bool + ; local_state_sync_time : opt_time + } [@@deriving to_yojson] let time_deferred deferred = @@ -55,7 +57,8 @@ let worth_getting_root t candidate = ~logger: (Logger.extend t.logger [ ( "selection_context" - , `String "Bootstrap_controller.worth_getting_root" ) ]) + , `String "Bootstrap_controller.worth_getting_root" ) + ]) ~existing: ( t.best_seen_transition |> External_transition.Validation.forget_validation_with_hash @@ -69,7 +72,7 @@ let received_bad_proof t host e = ( Violated_protocol , Some ( "Bad ancestor proof: $error" - , [("error", Error_json.error_to_yojson e)] ) )) + , [ ("error", Error_json.error_to_yojson e) ] ) )) let done_syncing_root root_sync_ledger = Option.is_some (Sync_ledger.Db.peek_valid_tree root_sync_ledger) @@ -78,8 +81,8 @@ let should_sync ~root_sync_ledger t candidate_state = (not @@ done_syncing_root root_sync_ledger) && worth_getting_root t candidate_state -let start_sync_job_with_peer ~sender ~root_sync_ledger t peer_best_tip - peer_root = +let start_sync_job_with_peer ~sender ~root_sync_ledger t peer_best_tip peer_root + = let%bind () = Trust_system.( record t.trust_system t.logger sender @@ -130,7 +133,7 @@ let on_transition t ~sender ~root_sync_ledger ~genesis_constants with | Error e -> [%log' error t.logger] - ~metadata:[("error", Error_json.error_to_yojson e)] + ~metadata:[ ("error", Error_json.error_to_yojson e) ] !"Could not get the proof of the root transition from the network: \ $error" ; Deferred.return `Ignored @@ -144,8 +147,7 @@ let on_transition t ~sender ~root_sync_ledger ~genesis_constants | Ok (`Root root, `Best_tip best_tip) -> if done_syncing_root root_sync_ledger then return `Ignored else - start_sync_job_with_peer ~sender ~root_sync_ledger t best_tip - root + start_sync_job_with_peer ~sender ~root_sync_ledger t best_tip root | Error e -> return (received_bad_proof t sender e |> Fn.const `Ignored) ) @@ -175,11 +177,12 @@ let sync_ledger t ~preferred ~root_sync_ledger ~transition_graph ~metadata: [ ("state_hash", State_hash.to_yojson (With_hash.hash transition)) ; ( "external_transition" - , External_transition.to_yojson (With_hash.data transition) ) ] ; + , External_transition.to_yojson (With_hash.data transition) ) + ] ; Deferred.ignore_m @@ on_transition t ~sender ~root_sync_ledger ~genesis_constants transition ) - else Deferred.unit ) + else Deferred.unit) let external_transition_compare consensus_constants = Comparable.lift @@ -192,7 +195,7 @@ let external_transition_compare consensus_constants = @@ Consensus.Hooks.select ~constants:consensus_constants ~existing ~candidate ~logger:(Logger.null ()) then -1 - else 1 ) + else 1) ~f:(With_hash.map ~f:External_transition.consensus_state) (* We conditionally ask other peers for their best tip. This is for testing @@ -208,8 +211,7 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state let constraint_constants = precomputed_values.constraint_constants in let rec loop previous_cycles = let sync_ledger_reader, sync_ledger_writer = - create ~name:"sync ledger pipe" - (Buffered (`Capacity 50, `Overflow Crash)) + create ~name:"sync ledger pipe" (Buffered (`Capacity 50, `Overflow Crash)) in don't_wait_for (transfer_while_writer_alive transition_reader sync_ledger_writer @@ -221,14 +223,15 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state in let t = { network - ; consensus_constants= + ; consensus_constants = Precomputed_values.consensus_constants precomputed_values ; logger ; trust_system ; verifier ; precomputed_values - ; best_seen_transition= initial_root_transition - ; current_root= initial_root_transition } + ; best_seen_transition = initial_root_transition + ; current_root = initial_root_transition + } in let transition_graph = Transition_cache.create () in let temp_persistent_root_instance = @@ -253,12 +256,12 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state | Local -> None | Remote r -> - Some r ) ) + Some r) ) ~root_sync_ledger ~transition_graph ~sync_ledger_reader ~genesis_constants) ; (* We ignore the resulting ledger returned here since it will always - * be the same as the ledger we started with because we are syncing - * a db ledger. *) + * be the same as the ledger we started with because we are syncing + * a db ledger. *) let%map _, data = Sync_ledger.Db.valid_tree root_sync_ledger in Sync_ledger.Db.destroy root_sync_ledger ; data) @@ -275,8 +278,7 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state match staged_ledger_data_download_result with | Error err -> Deferred.return (staged_ledger_data_download_time, None, Error err) - | Ok - (scan_state, expected_merkle_root, pending_coinbases, protocol_states) + | Ok (scan_state, expected_merkle_root, pending_coinbases, protocol_states) -> ( let%map staged_ledger_construction_result = let open Deferred.Or_error.Let_syntax in @@ -302,12 +304,12 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state (`Staged_ledger_already_materialized received_staged_ledger_hash) |> Result.map_error ~f:(fun _ -> - Error.of_string "received faulty scan state from peer" ) + Error.of_string "received faulty scan state from peer") |> Deferred.return in let%bind protocol_states = - Staged_ledger.Scan_state.check_required_protocol_states - scan_state ~protocol_states + Staged_ledger.Scan_state.check_required_protocol_states scan_state + ~protocol_states |> Deferred.return in let protocol_states_map = @@ -320,21 +322,22 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state [%log error] ~metadata: [ ("new_root", State_hash.to_yojson new_state_hash) - ; ("state_hash", State_hash.to_yojson hash) ] + ; ("state_hash", State_hash.to_yojson hash) + ] "Protocol state (for scan state transactions) for \ $state_hash not found when bootstrapping to the new root \ $new_root" ; Or_error.errorf !"Protocol state (for scan state transactions) for \ - %{sexp:State_hash.t} not found when bootstrapping to \ - the new root %{sexp:State_hash.t}" + %{sexp:State_hash.t} not found when bootstrapping to the \ + new root %{sexp:State_hash.t}" hash new_state_hash | Some protocol_state -> Ok protocol_state in (* Construct the staged ledger before constructing the transition * frontier in order to verify the scan state we received. - * TODO: reorganize the code to avoid doing this twice (#3480) *) + * TODO: reorganize the code to avoid doing this twice (#3480) *) let open Deferred.Let_syntax in let%map staged_ledger_construction_time, construction_result = time_deferred @@ -353,10 +356,7 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state Result.map result ~f: (const - ( scan_state - , pending_coinbases - , new_root - , protocol_states ))) + (scan_state, pending_coinbases, new_root, protocol_states))) in Ok (staged_ledger_construction_time, construction_result) in @@ -387,18 +387,20 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state [ ("error", Error_json.error_to_yojson e) ; ("state_hash", State_hash.to_yojson hash) ; ( "expected_staged_ledger_hash" - , Staged_ledger_hash.to_yojson expected_staged_ledger_hash ) ] + , Staged_ledger_hash.to_yojson expected_staged_ledger_hash ) + ] "Failed to find scan state for the transition with hash $state_hash \ from the peer or received faulty scan state: $error. Retry \ bootstrap" ; Writer.close sync_ledger_writer ; let this_cycle = - { cycle_result= "failed to download and construct scan state" + { cycle_result = "failed to download and construct scan state" ; sync_ledger_time ; staged_ledger_data_download_time ; staged_ledger_construction_time - ; local_state_sync_required= false - ; local_state_sync_time= None } + ; local_state_sync_required = false + ; local_state_sync_time = None + } in loop (this_cycle :: previous_cycles) | Ok (scan_state, pending_coinbase, new_root, protocol_states) -> ( @@ -430,7 +432,8 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state consensus_local_state ) ; ( "consensus_state" , Consensus.Data.Consensus_state.Value.to_yojson - consensus_state ) ] + consensus_state ) + ] "Not synchronizing consensus local state" ; Deferred.return (false, Or_error.return ()) | Some sync_jobs -> @@ -443,13 +446,14 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state let%map peers = Mina_networking.random_peers t.network n in - sender :: peers ) + sender :: peers) ~query_peer: - { Consensus.Hooks.Rpcs.query= + { Consensus.Hooks.Rpcs.query = (fun peer rpc query -> Mina_networking.( query_peer t.network peer.peer_id - (Rpcs.Consensus_rpc rpc) query) ) } + (Rpcs.Consensus_rpc rpc) query)) + } ~ledger_depth: precomputed_values.constraint_constants.ledger_depth sync_jobs @@ -459,16 +463,17 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state match local_state_sync_result with | Error e -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] + ~metadata:[ ("error", Error_json.error_to_yojson e) ] "Local state sync failed: $error. Retry bootstrap" ; Writer.close sync_ledger_writer ; let this_cycle = - { cycle_result= "failed to synchronize local state" + { cycle_result = "failed to synchronize local state" ; sync_ledger_time ; staged_ledger_data_download_time ; staged_ledger_construction_time ; local_state_sync_required - ; local_state_sync_time= Some local_state_sync_time } + ; local_state_sync_time = Some local_state_sync_time + } in loop (this_cycle :: previous_cycles) | Ok () -> @@ -487,7 +492,7 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state Transition_frontier.Persistent_root.( with_instance_exn persistent_root ~f:(fun instance -> Instance.set_root_state_hash instance - (External_transition.Validated.state_hash new_root) )) ; + (External_transition.Validated.state_hash new_root))) ; let%map new_frontier = let fail msg = failwith @@ -515,7 +520,8 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state let logger = Logger.extend logger [ ( "context" - , `String "Filter collected transitions in bootstrap" ) ] + , `String "Filter collected transitions in bootstrap" ) + ] in let root_consensus_state = Transition_frontier.( @@ -534,7 +540,7 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state ~candidate: (With_hash.map ~f:External_transition.consensus_state transition) - ~logger ) + ~logger) in [%log debug] "Sorting filtered transitions by consensus state" ~metadata:[] ; @@ -549,12 +555,13 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state (external_transition_compare t.consensus_constants)) in let this_cycle = - { cycle_result= "success" + { cycle_result = "success" ; sync_ledger_time ; staged_ledger_data_download_time ; staged_ledger_construction_time ; local_state_sync_required - ; local_state_sync_time= Some local_state_sync_time } + ; local_state_sync_time = Some local_state_sync_time + } in ( this_cycle :: previous_cycles , (new_frontier, sorted_filtered_collected_transitions) ) ) @@ -564,10 +571,10 @@ let run ~logger ~trust_system ~verifier ~network ~consensus_local_state ~metadata: [ ("time_elapsed", time_to_yojson time_elapsed) ; ( "bootstrap_stats" - , `List (List.map ~f:bootstrap_cycle_stats_to_yojson cycles) ) ] ; + , `List (List.map ~f:bootstrap_cycle_stats_to_yojson cycles) ) + ] ; Mina_metrics.( - Gauge.set Bootstrap.bootstrap_time_ms - Core.Time.(Span.to_ms @@ time_elapsed)) ; + Gauge.set Bootstrap.bootstrap_time_ms Core.Time.(Span.to_ms @@ time_elapsed)) ; result let%test_module "Bootstrap_controller tests" = @@ -591,15 +598,14 @@ let%test_module "Bootstrap_controller tests" = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) module Genesis_ledger = (val precomputed_values.genesis_ledger) let downcast_transition ~sender transition = let transition = transition - |> External_transition.Validation - .reset_frontier_dependencies_validation + |> External_transition.Validation.reset_frontier_dependencies_validation |> External_transition.Validation.reset_staged_ledger_diff_validation in Envelope.Incoming.wrap ~data:transition @@ -612,19 +618,19 @@ let%test_module "Bootstrap_controller tests" = let make_non_running_bootstrap ~genesis_root ~network = let transition = genesis_root - |> External_transition.Validation - .reset_frontier_dependencies_validation + |> External_transition.Validation.reset_frontier_dependencies_validation |> External_transition.Validation.reset_staged_ledger_diff_validation in { logger - ; consensus_constants= + ; consensus_constants = Precomputed_values.consensus_constants precomputed_values ; trust_system ; verifier ; precomputed_values - ; best_seen_transition= transition - ; current_root= transition - ; network } + ; best_seen_transition = transition + ; current_root = transition + ; network + } let%test_unit "Bootstrap controller caches all transitions it is passed \ through the transition_reader" = @@ -635,7 +641,7 @@ let%test_module "Bootstrap_controller tests" = let%bind fake_network = Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length - [fresh_peer; fresh_peer]) + [ fresh_peer; fresh_peer ]) in let%map make_branch = Transition_frontier.Breadcrumb.For_tests.gen_seq ~precomputed_values @@ -643,14 +649,14 @@ let%test_module "Bootstrap_controller tests" = ~accounts_with_secret_keys:(Lazy.force Genesis_ledger.accounts) branch_size in - let [me; _] = fake_network.peer_networks in + let [ me; _ ] = fake_network.peer_networks in let branch = Async.Thread_safe.block_on_async_exn (fun () -> - make_branch (Transition_frontier.root me.state.frontier) ) + make_branch (Transition_frontier.root me.state.frontier)) in (fake_network, branch)) ~f:(fun (fake_network, branch) -> - let [me; other] = fake_network.peer_networks in + let [ me; other ] = fake_network.peer_networks in let genesis_root = Transition_frontier.( Breadcrumb.validated_transition @@ root me.state.frontier) @@ -676,10 +682,10 @@ let%test_module "Bootstrap_controller tests" = let%bind () = Deferred.List.iter branch ~f:(fun breadcrumb -> Strict_pipe.Writer.write sync_ledger_writer - (downcast_breadcrumb ~sender:other.peer breadcrumb) ) + (downcast_breadcrumb ~sender:other.peer breadcrumb)) in Strict_pipe.Writer.close sync_ledger_writer ; - sync_deferred ) ; + sync_deferred) ; let expected_transitions = List.map branch ~f: @@ -692,8 +698,8 @@ let%test_module "Bootstrap_controller tests" = |> List.map ~f: (Fn.compose - External_transition.Validation - .forget_validation_with_hash Envelope.Incoming.data) + External_transition.Validation.forget_validation_with_hash + Envelope.Incoming.data) in let module E = struct module T = struct @@ -709,7 +715,7 @@ let%test_module "Bootstrap_controller tests" = end in [%test_result: E.Set.t] (E.Set.of_list saved_transitions) - ~expect:(E.Set.of_list expected_transitions) ) + ~expect:(E.Set.of_list expected_transitions)) let run_bootstrap ~timeout_duration ~my_net ~transition_reader = let open Fake_network in @@ -749,7 +755,7 @@ let%test_module "Bootstrap_controller tests" = ignore ( List.fold_result ~init:root incoming_transitions ~f:(fun max_acc incoming_transition -> - let With_hash.{data= transition; _}, _ = + let With_hash.{ data = transition; _ }, _ = Envelope.Incoming.data incoming_transition in let open Result.Let_syntax in @@ -761,7 +767,7 @@ let%test_module "Bootstrap_controller tests" = (Error.of_string "The blocks are not sorted in increasing order") in - transition ) + transition) |> Or_error.ok_exn : External_transition.t ) @@ -771,9 +777,10 @@ let%test_module "Bootstrap_controller tests" = gen ~precomputed_values ~verifier ~max_frontier_length [ fresh_peer ; peer_with_branch - ~frontier_branch_size:((max_frontier_length * 2) + 2) ]) + ~frontier_branch_size:((max_frontier_length * 2) + 2) + ]) ~f:(fun fake_network -> - let [my_net; peer_net] = fake_network.peer_networks in + let [ my_net; peer_net ] = fake_network.peer_networks in let transition_reader, transition_writer = Pipe_lib.Strict_pipe.create ~name:(__MODULE__ ^ __LOC__) (Buffered (`Capacity 10, `Overflow Drop_head)) @@ -789,7 +796,7 @@ let%test_module "Bootstrap_controller tests" = Async.Thread_safe.block_on_async_exn (fun () -> run_bootstrap ~timeout_duration:(Block_time.Span.of_ms 30_000L) - ~my_net ~transition_reader ) + ~my_net ~transition_reader) in assert_transitions_increasingly_sorted ~root:(Transition_frontier.root new_frontier) @@ -799,8 +806,8 @@ let%test_module "Bootstrap_controller tests" = @@ Transition_frontier.root_snarked_ledger new_frontier ) ~expect: ( Ledger.Db.merkle_root - @@ Transition_frontier.root_snarked_ledger - peer_net.state.frontier ) ) + @@ Transition_frontier.root_snarked_ledger peer_net.state.frontier + )) let%test_unit "reconstruct staged_ledgers using \ of_scan_state_and_snarked_ledger" = @@ -824,9 +831,7 @@ let%test_module "Bootstrap_controller tests" = in let scan_state = Staged_ledger.scan_state staged_ledger in let get_state hash = - match - Transition_frontier.find_protocol_state frontier hash - with + match Transition_frontier.find_protocol_state frontier hash with | Some protocol_state -> Ok protocol_state | None -> @@ -839,16 +844,16 @@ let%test_module "Bootstrap_controller tests" = Staged_ledger.pending_coinbase_collection staged_ledger in let%map actual_staged_ledger = - Staged_ledger - .of_scan_state_pending_coinbases_and_snarked_ledger ~scan_state - ~logger ~verifier ~constraint_constants ~snarked_ledger - ~expected_merkle_root ~pending_coinbases ~get_state + Staged_ledger.of_scan_state_pending_coinbases_and_snarked_ledger + ~scan_state ~logger ~verifier ~constraint_constants + ~snarked_ledger ~expected_merkle_root ~pending_coinbases + ~get_state |> Deferred.Or_error.ok_exn in assert ( Staged_ledger_hash.equal (Staged_ledger.hash staged_ledger) - (Staged_ledger.hash actual_staged_ledger) ) ) ) + (Staged_ledger.hash actual_staged_ledger) ))) (* let%test_unit "if we see a new transition that is better than the \ diff --git a/src/lib/bootstrap_controller/bootstrap_controller.mli b/src/lib/bootstrap_controller/bootstrap_controller.mli index e11054a3f76..8bf3605c292 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.mli +++ b/src/lib/bootstrap_controller/bootstrap_controller.mli @@ -3,8 +3,7 @@ open Mina_transition open Pipe_lib open Network_peer -type Structured_log_events.t += Bootstrap_complete - [@@deriving register_event] +type Structured_log_events.t += Bootstrap_complete [@@deriving register_event] val run : logger:Logger.t @@ -12,17 +11,16 @@ val run : -> verifier:Verifier.t -> network:Mina_networking.t -> consensus_local_state:Consensus.Data.Local_state.t - -> transition_reader:External_transition.Initial_validated.t - Envelope.Incoming.t - Strict_pipe.Reader.t - -> best_seen_transition:External_transition.Initial_validated.t - Envelope.Incoming.t - option + -> transition_reader: + External_transition.Initial_validated.t Envelope.Incoming.t + Strict_pipe.Reader.t + -> best_seen_transition: + External_transition.Initial_validated.t Envelope.Incoming.t option -> persistent_root:Transition_frontier.Persistent_root.t -> persistent_frontier:Transition_frontier.Persistent_frontier.t -> initial_root_transition:External_transition.Validated.t -> precomputed_values:Precomputed_values.t - -> catchup_mode:[`Normal | `Super] + -> catchup_mode:[ `Normal | `Super ] -> ( Transition_frontier.t * External_transition.Initial_validated.t Envelope.Incoming.t list ) Deferred.t diff --git a/src/lib/bootstrap_controller/transition_cache.ml b/src/lib/bootstrap_controller/transition_cache.ml index 06e5aeb0efd..3d593b91edf 100644 --- a/src/lib/bootstrap_controller/transition_cache.ml +++ b/src/lib/bootstrap_controller/transition_cache.ml @@ -15,7 +15,7 @@ let create () = State_hash.Table.create () let add (t : t) ~parent new_child = State_hash.Table.update t parent ~f:(function | None -> - [new_child] + [ new_child ] | Some children -> if List.mem children new_child ~equal:(fun e1 e2 -> @@ -23,9 +23,9 @@ let add (t : t) ~parent new_child = ( Envelope.Incoming.data e1 |> External_transition.Initial_validated.state_hash ) ( Envelope.Incoming.data e2 - |> External_transition.Initial_validated.state_hash ) ) + |> External_transition.Initial_validated.state_hash )) then children - else new_child :: children ) + else new_child :: children) let data t = let collected_transitions = State_hash.Table.data t |> List.concat in diff --git a/src/lib/bootstrap_controller/transition_cache.mli b/src/lib/bootstrap_controller/transition_cache.mli index 69460c6de6c..5b63c7b1494 100644 --- a/src/lib/bootstrap_controller/transition_cache.mli +++ b/src/lib/bootstrap_controller/transition_cache.mli @@ -12,5 +12,4 @@ val add : -> External_transition.Initial_validated.t Envelope.Incoming.t -> unit -val data : - t -> External_transition.Initial_validated.t Envelope.Incoming.t list +val data : t -> External_transition.Initial_validated.t Envelope.Incoming.t list diff --git a/src/lib/bowe_gabizon_hash/bowe_gabizon_hash.ml b/src/lib/bowe_gabizon_hash/bowe_gabizon_hash.ml index f4ea4c85740..456212afa6c 100644 --- a/src/lib/bowe_gabizon_hash/bowe_gabizon_hash.ml +++ b/src/lib/bowe_gabizon_hash/bowe_gabizon_hash.ml @@ -5,15 +5,20 @@ module Make (Inputs : Inputs_intf.S) = struct let g1 g = let x, y = G1.to_affine_exn g in - [|x; y|] + [| x; y |] let g2 g = let x, y = G2.to_affine_exn g in - Array.of_list (List.concat_map ~f:Fqe.to_list [x; y]) + Array.of_list (List.concat_map ~f:Fqe.to_list [ x; y ]) let hash ?message ~a ~b ~c ~delta_prime = hash (Array.concat - [g1 a; g2 b; g1 c; g2 delta_prime; Option.value ~default:[||] message]) + [ g1 a + ; g2 b + ; g1 c + ; g2 delta_prime + ; Option.value ~default:[||] message + ]) |> group_map |> G1.of_affine end diff --git a/src/lib/cache_dir/cache_dir.ml b/src/lib/cache_dir/cache_dir.ml index 931dc04ae7d..1767b421a6f 100644 --- a/src/lib/cache_dir/cache_dir.ml +++ b/src/lib/cache_dir/cache_dir.ml @@ -22,13 +22,14 @@ let brew_install_path = "/usr/local/var/coda" let cache = - let dir d w = Key_cache.Spec.On_disk {directory= d; should_write= w} in + let dir d w = Key_cache.Spec.On_disk { directory = d; should_write = w } in [ dir manual_install_path false ; dir brew_install_path false ; dir s3_install_path false ; dir autogen_path true ; Key_cache.Spec.S3 - {bucket_prefix= s3_keys_bucket_prefix; install_path= s3_install_path} ] + { bucket_prefix = s3_keys_bucket_prefix; install_path = s3_install_path } + ] let env_path = match Sys.getenv "CODA_KEYS_PATH" with @@ -43,7 +44,8 @@ let possible_paths base = ; brew_install_path ; s3_install_path ; autogen_path - ; manual_install_path ] ~f:(fun d -> d ^/ base) + ; manual_install_path + ] ~f:(fun d -> d ^/ base) let load_from_s3 s3_bucket_prefix s3_install_path ~logger = Deferred.map ~f:Result.join @@ -53,7 +55,8 @@ let load_from_s3 s3_bucket_prefix s3_install_path ~logger = [%log trace] "Downloading file from S3" ~metadata: [ ("url", `String uri_string) - ; ("local_file_path", `String file_path) ] ; + ; ("local_file_path", `String file_path) + ] ; let%map _result = Process.run_exn ~prog:"curl" ~args: @@ -62,16 +65,18 @@ let load_from_s3 s3_bucket_prefix s3_install_path ~logger = ; "--show-error" ; "-o" ; file_path - ; uri_string ] + ; uri_string + ] () in [%log trace] "Download finished" ~metadata: [ ("url", `String uri_string) - ; ("local_file_path", `String file_path) ] ; + ; ("local_file_path", `String file_path) + ] ; Result.return () in Deferred.List.map ~f:each_uri (List.zip_exn s3_bucket_prefix s3_install_path) - |> Deferred.map ~f:Result.all_unit ) + |> Deferred.map ~f:Result.all_unit) |> Deferred.Result.map_error ~f:Error.of_exn diff --git a/src/lib/cache_lib/cache_lib.ml b/src/lib/cache_lib/cache_lib.ml index dffd768c470..557b3f16e46 100644 --- a/src/lib/cache_lib/cache_lib.ml +++ b/src/lib/cache_lib/cache_lib.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel module Intf = Intf @@ -7,8 +6,7 @@ module Intf = Intf include Impl.Make (struct let msg = sprintf "cached item was not consumed (cache name = \"%s\")" - [%%if - cache_exceptions] + [%%if cache_exceptions] let handle_unconsumed_cache_item ~logger:_ ~cache_name = let open Error in @@ -18,7 +16,7 @@ include Impl.Make (struct let handle_unconsumed_cache_item ~logger ~cache_name = [%log error] "Unconsumed item in cache: $cache" - ~metadata:[("cache", `String (msg cache_name))] + ~metadata:[ ("cache", `String (msg cache_name)) ] [%%endif] end) diff --git a/src/lib/cache_lib/impl.ml b/src/lib/cache_lib/impl.ml index 201cab0f9fc..aa2d1d8a314 100644 --- a/src/lib/cache_lib/impl.ml +++ b/src/lib/cache_lib/impl.ml @@ -13,24 +13,26 @@ module Make (Inputs : Inputs_intf) : Intf.Main.S = struct val logger : _ t -> Logger.t - val remove : 'elt t -> [`Consumed | `Unconsumed | `Failure] -> 'elt -> unit + val remove : + 'elt t -> [ `Consumed | `Unconsumed | `Failure ] -> 'elt -> unit end = struct type 'a t = - { name: string - ; on_add: 'a -> unit - ; on_remove: [`Consumed | `Unconsumed | `Failure] -> 'a -> unit - ; set: ('a, 'a Intf.final_state) Hashtbl.t - ; logger: Logger.t } + { name : string + ; on_add : 'a -> unit + ; on_remove : [ `Consumed | `Unconsumed | `Failure ] -> 'a -> unit + ; set : ('a, 'a Intf.final_state) Hashtbl.t + ; logger : Logger.t + } - let name {name; _} = name + let name { name; _ } = name - let logger {logger; _} = logger + let logger { logger; _ } = logger let create (type elt) ~name ~logger ~on_add ~on_remove (module Elt : Hashtbl.Key_plain with type t = elt) : elt t = let set = Hashtbl.create ~growth_allowed:true ?size:None (module Elt) in - let logger = Logger.extend logger [("cache", `String name)] in - {name; on_add; on_remove; set; logger} + let logger = Logger.extend logger [ ("cache", `String name) ] in + { name; on_add; on_remove; set; logger } let final_state t x = Hashtbl.find t.set x @@ -50,22 +52,23 @@ module Make (Inputs : Inputs_intf) : Intf.Main.S = struct and Cached : sig include Intf.Cached.S - val create : - 'elt Cache.t -> 'elt -> 'elt Intf.final_state -> ('elt, 'elt) t + val create : 'elt Cache.t -> 'elt -> 'elt Intf.final_state -> ('elt, 'elt) t end = struct type (_, _) t = | Base : - { data: 'a - ; cache: 'a Cache.t - ; mutable transformed: bool - ; final_state: 'a Intf.final_state } + { data : 'a + ; cache : 'a Cache.t + ; mutable transformed : bool + ; final_state : 'a Intf.final_state + } -> ('a, 'a) t | Derivative : - { original: 'a - ; mutant: 'b - ; cache: 'a Cache.t - ; mutable transformed: bool - ; final_state: 'a Intf.final_state } + { original : 'a + ; mutant : 'b + ; cache : 'a Cache.t + ; mutable transformed : bool + ; final_state : 'a Intf.final_state + } -> ('b, 'a) t | Pure : 'a -> ('a, _) t @@ -158,11 +161,11 @@ module Make (Inputs : Inputs_intf) : Intf.Main.S = struct let cache = cache t in Cache.remove cache `Unconsumed (original t) ; Inputs.handle_unconsumed_cache_item ~logger:(Cache.logger cache) - ~cache_name:(Cache.name cache) ) ) ; + ~cache_name:(Cache.name cache) )) ; t let create cache data final_state = - attach_finalizer (Base {data; cache; transformed= false; final_state}) + attach_finalizer (Base { data; cache; transformed = false; final_state }) let assert_not_consumed t msg = let open Error in @@ -189,11 +192,12 @@ module Make (Inputs : Inputs_intf) : Intf.Main.S = struct mark_transformed t ; attach_finalizer (Derivative - { original= original t - ; mutant= f (value t) - ; cache= cache t - ; transformed= false - ; final_state= final_state t }) + { original = original t + ; mutant = f (value t) + ; cache = cache t + ; transformed = false + ; final_state = final_state t + }) let invalidate_with_failure (type a b) (t : (a, b) t) : a = assert_not_finalized t "Cached item has already been finalized" ; @@ -227,17 +231,17 @@ module Make (Inputs : Inputs_intf) : Intf.Main.S = struct module Transmuter_cache : Intf.Transmuter_cache.F - with module Cached := Cached - and module Cache := Cache = struct + with module Cached := Cached + and module Cache := Cache = struct module Make (Transmuter : Intf.Transmuter.S) (Registry : Intf.Registry.S with type element := Transmuter.Target.t) (Name : Intf.Constant.S with type t := string) : Intf.Transmuter_cache.S - with module Cached := Cached - and module Cache := Cache - and type source = Transmuter.Source.t - and type target = Transmuter.Target.t = struct + with module Cached := Cached + and module Cache := Cache + and type source = Transmuter.Source.t + and type target = Transmuter.Target.t = struct type source = Transmuter.Source.t type target = Transmuter.Target.t @@ -289,9 +293,9 @@ let%test_module "cache_lib test instance" = with_cache ~logger ~f:(fun cache -> with_item ~f:(fun data -> let x = Cache.register_exn cache data in - ignore (Cached.invalidate_with_success x : string) ) ; + ignore (Cached.invalidate_with_success x : string)) ; Gc.full_major () ; - assert (!dropped_cache_items = 0) ) + assert (!dropped_cache_items = 0)) let%test_unit "cached objects are garbage collected independently of caches" = @@ -299,21 +303,18 @@ let%test_module "cache_lib test instance" = let logger = Logger.null () in with_cache ~logger ~f:(fun cache -> with_item ~f:(fun data -> - ignore (Cache.register_exn cache data : (string, string) Cached.t) - ) ; + ignore (Cache.register_exn cache data : (string, string) Cached.t)) ; Gc.full_major () ; - assert (!dropped_cache_items = 1) ) + assert (!dropped_cache_items = 1)) - let%test_unit "cached objects are garbage collected independently of data" - = + let%test_unit "cached objects are garbage collected independently of data" = setup () ; let logger = Logger.null () in with_item ~f:(fun data -> with_cache ~logger ~f:(fun cache -> - ignore (Cache.register_exn cache data : (string, string) Cached.t) - ) ; + ignore (Cache.register_exn cache data : (string, string) Cached.t)) ; Gc.full_major () ; - assert (!dropped_cache_items = 1) ) + assert (!dropped_cache_items = 1)) let%test_unit "cached objects are not unexpectedly garbage collected" = setup () ; @@ -323,12 +324,12 @@ let%test_module "cache_lib test instance" = let cached = Cache.register_exn cache data in Gc.full_major () ; assert (!dropped_cache_items = 0) ; - ignore (Cached.invalidate_with_success cached : string) ) ) ; + ignore (Cached.invalidate_with_success cached : string))) ; Gc.full_major () ; assert (!dropped_cache_items = 0) - let%test_unit "garbage collection of derived cached objects do not \ - trigger unconsumption handler for parents" = + let%test_unit "garbage collection of derived cached objects do not trigger \ + unconsumption handler for parents" = setup () ; let logger = Logger.null () in with_cache ~logger ~f:(fun cache -> @@ -337,9 +338,9 @@ let%test_module "cache_lib test instance" = ( Cache.register_exn cache data |> Cached.transform ~f:(Fn.const 5) |> Cached.transform ~f:(Fn.const ()) - : (unit, string) Cached.t ) ) ; + : (unit, string) Cached.t )) ; Gc.full_major () ; - assert (!dropped_cache_items = 1) ) + assert (!dropped_cache_items = 1)) let%test_unit "properly invalidated derived cached objects do not trigger \ any unconsumption handler calls" = @@ -350,9 +351,9 @@ let%test_module "cache_lib test instance" = Cache.register_exn cache data |> Cached.transform ~f:(Fn.const 5) |> Cached.transform ~f:(Fn.const ()) - |> Cached.invalidate_with_success ) ; + |> Cached.invalidate_with_success) ; Gc.full_major () ; - assert (!dropped_cache_items = 0) ) + assert (!dropped_cache_items = 0)) let%test_unit "invalidate original cached object would also remove the \ derived cached object" = @@ -366,12 +367,11 @@ let%test_module "cache_lib test instance" = |> Cached.transform ~f:(Fn.const 5) |> Cached.transform ~f:(Fn.const ()) in - ignore (Cached.invalidate_with_success src : string) ) ; + ignore (Cached.invalidate_with_success src : string)) ; Gc.full_major () ; - assert (!dropped_cache_items = 0) ) + assert (!dropped_cache_items = 0)) - let%test_unit "deriving a cached object inhabits its parent's final_state" - = + let%test_unit "deriving a cached object inhabits its parent's final_state" = setup () ; with_cache ~logger:(Logger.null ()) ~f:(fun cache -> with_item ~f:(fun data -> @@ -379,5 +379,5 @@ let%test_module "cache_lib test instance" = let der = Cached.transform src ~f:(Fn.const 5) in let src_final_state = Cached.final_state src in let der_final_state = Cached.final_state der in - assert (Ivar.equal src_final_state der_final_state) ) ) + assert (Ivar.equal src_final_state der_final_state))) end ) diff --git a/src/lib/cache_lib/intf.ml b/src/lib/cache_lib/intf.ml index ce96e69286d..265a6e3b1d3 100644 --- a/src/lib/cache_lib/intf.ml +++ b/src/lib/cache_lib/intf.ml @@ -1,7 +1,7 @@ open Async_kernel open Core_kernel -type 'a final_state = [`Failed | `Success of 'a] Ivar.t +type 'a final_state = [ `Failed | `Success of 'a ] Ivar.t (** [Constant.S] is a helper signature for passing constant values * to functors. @@ -24,7 +24,7 @@ module Registry = struct val element_added : element -> unit val element_removed : - [`Consumed | `Unconsumed | `Failure] -> element -> unit + [ `Consumed | `Unconsumed | `Failure ] -> element -> unit end end @@ -111,7 +111,7 @@ module Cache = struct name:string -> logger:Logger.t -> on_add:('elt -> unit) - -> on_remove:([`Consumed | `Unconsumed | `Failure] -> 'elt -> unit) + -> on_remove:([ `Consumed | `Unconsumed | `Failure ] -> 'elt -> unit) -> (module Hashtbl.Key_plain with type t = 'elt) -> 'elt t @@ -184,11 +184,11 @@ module Transmuter_cache = struct (Registry : Registry.S with type element := Transmuter.Target.t) (Name : Constant.S with type t := string) : S - with module Cached := Cached - and module Cache := Cache - and type source = Transmuter.Source.t - and type target = Transmuter.Target.t - [@@warning "-67"] + with module Cached := Cached + and module Cache := Cache + and type source = Transmuter.Source.t + and type target = Transmuter.Target.t + [@@warning "-67"] end end diff --git a/src/lib/child_processes/child_processes.ml b/src/lib/child_processes/child_processes.ml index d4cb38a7ad1..8d006c8e80e 100644 --- a/src/lib/child_processes/child_processes.ml +++ b/src/lib/child_processes/child_processes.ml @@ -8,18 +8,19 @@ module Termination = Termination exception Child_died type t = - { process: Process.t - ; stdout_pipe: string Strict_pipe.Reader.t - ; stderr_pipe: string Strict_pipe.Reader.t - ; stdin: Writer.t - ; terminated_ivar: Unix.Exit_or_signal.t Or_error.t Ivar.t - ; mutable killing: bool - ; mutable termination_response: + { process : Process.t + ; stdout_pipe : string Strict_pipe.Reader.t + ; stderr_pipe : string Strict_pipe.Reader.t + ; stdin : Writer.t + ; terminated_ivar : Unix.Exit_or_signal.t Or_error.t Ivar.t + ; mutable killing : bool + ; mutable termination_response : [ `Always_raise | `Raise_on_failure | `Handler of killed:bool -> Unix.Exit_or_signal.t Or_error.t -> unit Deferred.t - | `Ignore ] } + | `Ignore ] + } let stdout_lines : t -> string Strict_pipe.Reader.t = fun t -> t.stdout_pipe @@ -41,11 +42,7 @@ let keep_trying : | [] -> return e | x :: xs -> ( - match%bind f x with - | Ok r -> - return (Ok r) - | Error e -> - go (Error e) xs ) + match%bind f x with Ok r -> return (Ok r) | Error e -> go (Error e) xs ) in go (Or_error.error_string "empty input") xs @@ -75,7 +72,7 @@ let get_project_root () = let get_mina_binary () = let open Async in let open Deferred.Or_error.Let_syntax in - let%bind os = Process.run ~prog:"uname" ~args:["-s"] () in + let%bind os = Process.run ~prog:"uname" ~args:[ "-s" ] () in if String.equal os "Darwin\n" then let open Ctypes in let ns_get_executable_path = @@ -123,8 +120,7 @@ let maybe_kill_and_unlock : string -> Filename.t -> Logger.t -> unit Deferred.t pid_str ; Deferred.unit | `Ok -> ( - [%log debug] "Successfully sent TERM signal to %s (%s)" name - pid_str ; + [%log debug] "Successfully sent TERM signal to %s (%s)" name pid_str ; let%bind () = after (Time.Span.of_sec 0.5) in match Signal.send Signal.kill (`Pid pid) with | `No_such_process -> @@ -151,16 +147,17 @@ let maybe_kill_and_unlock : string -> Filename.t -> Logger.t -> unit Deferred.t name ~metadata: [ ("childPid", `Int (Pid.to_int pid)) - ; ("exn", `String (Exn.to_string exn)) ] ; + ; ("exn", `String (Exn.to_string exn)) + ] ; Deferred.unit ) ) | `Unknown | `No -> [%log debug] "No PID file for %s" name ; Deferred.unit type output_handling = - [`Log of Logger.Level.t | `Don't_log] - * [`Pipe | `No_pipe] - * [`Keep_empty | `Filter_empty] + [ `Log of Logger.Level.t | `Don't_log ] + * [ `Pipe | `No_pipe ] + * [ `Keep_empty | `Filter_empty ] (* Given a Reader.t coming from a process output, optionally log the lines coming from it and return a strict pipe that will get the lines if the @@ -185,7 +182,7 @@ let reader_to_strict_pipe_with_logging : Strict_pipe.Writer.write master_w line | `Filter_empty -> if not (String.equal line "") then - Strict_pipe.Writer.write master_w line ) + Strict_pipe.Writer.write master_w line) >>= fun () -> Strict_pipe.Writer.close master_w ; Deferred.unit ) ; @@ -196,32 +193,31 @@ let reader_to_strict_pipe_with_logging : | `Log level -> ( let simple_log_msg = lazy - { Logger.Message.timestamp= Time.now () + { Logger.Message.timestamp = Time.now () ; level - ; source= + ; source = Some (Logger.Source.create ~module_:__MODULE__ ~location:__LOC__) - ; message= "Output from process $child_name: $line" - ; metadata= + ; message = "Output from process $child_name: $line" + ; metadata = String.Map.set ~key:"child_name" ~data:(`String name) (String.Map.set ~key:"line" ~data:(`String line) (Logger.metadata logger)) - ; event_id= None } + ; event_id = None + } in - match - Option.try_with (fun () -> Yojson.Safe.from_string line) - with + match Option.try_with (fun () -> Yojson.Safe.from_string line) with | Some json -> ( - match Logger.Message.of_yojson json with - | Ok msg -> - Logger.raw logger msg - | Error _err -> - Logger.raw logger (Lazy.force simple_log_msg) ) + match Logger.Message.of_yojson json with + | Ok msg -> + Logger.raw logger msg + | Error _err -> + Logger.raw logger (Lazy.force simple_log_msg) ) | None -> Logger.raw logger (Lazy.force simple_log_msg) ) | `Don't_log -> - () )) ; + ())) ; (* Ideally we'd close the pipe, but you can't do that with a reader, so we iterate over it and drop everything. Since Strict_pipe enforces a single reader this is safe. *) @@ -241,13 +237,12 @@ let start_custom : -> args:string list -> stdout:output_handling -> stderr:output_handling - -> termination:[ `Always_raise - | `Raise_on_failure - | `Handler of - killed:bool - -> Unix.Exit_or_signal.t Or_error.t - -> unit Deferred.t - | `Ignore ] + -> termination: + [ `Always_raise + | `Raise_on_failure + | `Handler of + killed:bool -> Unix.Exit_or_signal.t Or_error.t -> unit Deferred.t + | `Ignore ] -> t Deferred.Or_error.t = fun ~logger ~name ~git_root_relative_path ~conf_dir ~args ~stdout ~stderr ~termination -> @@ -259,7 +254,7 @@ let start_custom : Deferred.Or_error.return () | _ -> Deferred.Or_error.errorf "Config directory %s does not exist" - conf_dir ) + conf_dir) in let lock_path = conf_dir ^/ name ^ ".lock" in let%bind () = @@ -269,7 +264,8 @@ let start_custom : [%log debug] "Starting custom child process $name with args $args" ~metadata: [ ("name", `String name) - ; ("args", `List (List.map args ~f:(fun a -> `String a))) ] ; + ; ("args", `List (List.map args ~f:(fun a -> `String a))) + ] ; let%bind mina_binary_path = get_mina_binary () in let relative_to_root = get_project_root () @@ -282,14 +278,16 @@ let start_custom : ; relative_to_root ; Some (Filename.dirname mina_binary_path ^/ name) ; Some ("mina-" ^ name) - ; Some ("coda-" ^ name) ]) + ; Some ("coda-" ^ name) + ]) ~f:(fun prog -> Process.create ~stdin:"" ~prog ~args ()) in [%log info] "Custom child process $name started with pid $pid" ~metadata: [ ("name", `String name) ; ("args", `List (List.map args ~f:(fun a -> `String a))) - ; ("pid", `Int (Process.pid process |> Pid.to_int)) ] ; + ; ("pid", `Int (Process.pid process |> Pid.to_int)) + ] ; Termination.wait_for_process_log_errors ~logger process ~module_:__MODULE__ ~location:__LOC__ ~here:[%here] ; let%bind () = @@ -302,22 +300,23 @@ let start_custom : reader_to_strict_pipe_with_logging (Process.stdout process) (name ^ "-stdout") stdout (Logger.extend logger - [("process", `String name); ("handle", `String "stdout")]) + [ ("process", `String name); ("handle", `String "stdout") ]) in let stderr_pipe = reader_to_strict_pipe_with_logging (Process.stderr process) (name ^ "-stderr") stderr (Logger.extend logger - [("process", `String name); ("handle", `String "stderr")]) + [ ("process", `String name); ("handle", `String "stderr") ]) in let t = { process ; stdout_pipe ; stderr_pipe - ; stdin= Process.stdin process + ; stdin = Process.stdin process ; terminated_ivar - ; killing= false - ; termination_response= termination } + ; killing = false + ; termination_response = termination + } in don't_wait_for (let open Deferred.Let_syntax in @@ -341,7 +340,7 @@ let start_custom : Error_json.error_to_yojson err in [%log fatal] "Process died unexpectedly: $exit_or_signal" - ~metadata:[("exit_or_signal", exit_or_signal)] ; + ~metadata:[ ("exit_or_signal", exit_or_signal) ] ; raise Child_died in match (t.termination_response, termination_status) with @@ -385,8 +384,8 @@ let kill : t -> Unix.Exit_or_signal.t Deferred.Or_error.t = let register_process ?termination_expected (termination : Termination.t) (process : t) kind = - Termination.register_process ?termination_expected termination - process.process kind + Termination.register_process ?termination_expected termination process.process + kind let%test_module _ = ( module struct @@ -396,7 +395,7 @@ let%test_module _ = Async.Thread_safe.block_on_async_exn (fun () -> File_system.with_temp_dir (Filename.temp_dir_name ^/ "child-processes") - ~f ) + ~f) let name = "tester.sh" @@ -409,7 +408,7 @@ let%test_module _ = let open Deferred.Let_syntax in let%bind process = start_custom ~logger ~name ~git_root_relative_path ~conf_dir - ~args:["exit"] + ~args:[ "exit" ] ~stdout:(`Log Logger.Level.Debug, `Pipe, `Keep_empty) ~stderr:(`Log Logger.Level.Error, `No_pipe, `Keep_empty) ~termination:`Raise_on_failure @@ -418,7 +417,7 @@ let%test_module _ = let%bind () = Strict_pipe.Reader.iter (stdout_lines process) ~f:(fun line -> [%test_eq: string] "hello" line ; - Deferred.unit ) + Deferred.unit) in (* Pipe will be closed before the ivar is filled, so we need to wait a bit. *) @@ -426,14 +425,14 @@ let%test_module _ = [%test_eq: Unix.Exit_or_signal.t Or_error.t option] (Some (Ok (Ok ()))) (termination_status process) ; - Deferred.unit ) + Deferred.unit) let%test_unit "killing works" = async_with_temp_dir (fun conf_dir -> let open Deferred.Let_syntax in let%bind process = start_custom ~logger ~name ~git_root_relative_path ~conf_dir - ~args:["loop"] + ~args:[ "loop" ] ~stdout:(`Don't_log, `Pipe, `Keep_empty) ~stderr:(`Don't_log, `No_pipe, `Keep_empty) ~termination:`Always_raise @@ -470,14 +469,14 @@ let%test_module _ = [%test_eq: Unix.Exit_or_signal.t] exit_or_signal (Error (`Signal Signal.term)) ; assert (Option.is_some @@ termination_status process) ; - Deferred.unit ) + Deferred.unit) let%test_unit "if you spawn two processes it kills the earlier one" = async_with_temp_dir (fun conf_dir -> let open Deferred.Let_syntax in let mk_process () = start_custom ~logger ~name ~git_root_relative_path ~conf_dir - ~args:["loop"] + ~args:[ "loop" ] ~stdout:(`Don't_log, `No_pipe, `Keep_empty) ~stderr:(`Don't_log, `No_pipe, `Keep_empty) ~termination:`Ignore @@ -496,5 +495,5 @@ let%test_module _ = (termination_status process2) None ; let%bind _ = kill process2 in - Deferred.unit ) + Deferred.unit) end ) diff --git a/src/lib/child_processes/child_processes.mli b/src/lib/child_processes/child_processes.mli index 1b14e115811..8dc9d279404 100644 --- a/src/lib/child_processes/child_processes.mli +++ b/src/lib/child_processes/child_processes.mli @@ -31,9 +31,9 @@ val termination_status : t -> Unix.Exit_or_signal.t Or_error.t option exception. *) type output_handling = - [`Log of Logger.Level.t | `Don't_log] - * [`Pipe | `No_pipe] - * [`Keep_empty | `Filter_empty] + [ `Log of Logger.Level.t | `Don't_log ] + * [ `Pipe | `No_pipe ] + * [ `Keep_empty | `Filter_empty ] (** Start a process, handling a lock file, termination, optional logging, and the standard in, out and error fds. This is for "custom" processes, as @@ -41,23 +41,22 @@ type output_handling = val start_custom : logger:Logger.t -> name:string - (** The name of the executable file, without any coda- prefix *) + (** The name of the executable file, without any coda- prefix *) -> git_root_relative_path:string - (** Path to the built executable, relative to the root of a source checkout + (** Path to the built executable, relative to the root of a source checkout *) -> conf_dir:string - (** Absolute path to the configuration directory for Coda *) + (** Absolute path to the configuration directory for Coda *) -> args:string list (** Arguments to the process *) -> stdout:output_handling (** What to do with process standard out *) -> stderr:output_handling (** What to do with process standard error *) - -> termination:[ `Always_raise - | `Raise_on_failure - | `Handler of - killed:bool - -> Unix.Exit_or_signal.t Or_error.t - -> unit Deferred.t - | `Ignore ] - (** What to do when the process exits. Note that an exception will not be + -> termination: + [ `Always_raise + | `Raise_on_failure + | `Handler of + killed:bool -> Unix.Exit_or_signal.t Or_error.t -> unit Deferred.t + | `Ignore ] + (** What to do when the process exits. Note that an exception will not be raised after you run [kill] on it, regardless of this value. An [Error _] passed to a [`Handler _] indicates that there was an error monitoring the process, and that it is unknown whether the diff --git a/src/lib/child_processes/termination.ml b/src/lib/child_processes/termination.ml index 9dc0fb47cf0..83052db4605 100644 --- a/src/lib/child_processes/termination.ml +++ b/src/lib/child_processes/termination.ml @@ -7,9 +7,9 @@ open Core_kernel include Hashable.Make_binable (Pid) type process_kind = Prover | Verifier | Libp2p_helper -[@@deriving show {with_path= false}, yojson] +[@@deriving show { with_path = false }, yojson] -type data = {kind: process_kind; termination_expected: bool} +type data = { kind : process_kind; termination_expected : bool } [@@deriving yojson] type t = data Pid.Table.t @@ -17,12 +17,12 @@ type t = data Pid.Table.t let create_pid_table () : t = Pid.Table.create () let register_process ?(termination_expected = false) (t : t) process kind = - let data = {kind; termination_expected} in + let data = { kind; termination_expected } in Pid.Table.add_exn t ~key:(Process.pid process) ~data let mark_termination_as_expected t child_pid = Pid.Table.change t child_pid - ~f:(Option.map ~f:(fun r -> {r with termination_expected= true})) + ~f:(Option.map ~f:(fun r -> { r with termination_expected = true })) let remove : t -> Pid.t -> unit = Pid.Table.remove @@ -32,9 +32,9 @@ let get_signal_cause_opt = let signal_causes_tbl : string Table.t = Table.create () in List.iter [ (kill, "Process killed because out of memory") - ; (int, "Process interrupted by user or other program") ] - ~f:(fun (signal, msg) -> - Base.ignore (Table.add signal_causes_tbl ~key:signal ~data:msg) ) ; + ; (int, "Process interrupted by user or other program") + ] ~f:(fun (signal, msg) -> + Base.ignore (Table.add signal_causes_tbl ~key:signal ~data:msg)) ; fun signal -> Signal.Table.find signal_causes_tbl signal let get_child_data (t : t) child_pid = Pid.Table.find t child_pid @@ -52,9 +52,10 @@ let check_terminated_child (t : t) child_pid logger = unexpectedly terminated" ~metadata: [ ("child_pid", `Int (Pid.to_int child_pid)) - ; ("process_kind", `String kind) ] ; - failwithf "Child process of kind %s has unexpectedly terminated" kind - () ) + ; ("process_kind", `String kind) + ] ; + failwithf "Child process of kind %s has unexpectedly terminated" kind () + ) (** wait for a [process], which may resolve immediately or in a Deferred.t, log any errors, attributing the source to the provided [module] and [location] @@ -77,7 +78,7 @@ let wait_for_process_log_errors ~logger process ~module_ ~location ~here = let err = Error.of_exn exn in Logger.error logger ~module_ ~location "Saw a deferred exception $exn after waiting for process" - ~metadata:[("exn", Error_json.error_to_yojson err)] )) + ~metadata:[ ("exn", Error_json.error_to_yojson err) ])) (fun () -> Process.wait process) in don't_wait_for @@ -88,11 +89,11 @@ let wait_for_process_log_errors ~logger process ~module_ ~location ~here = let err = Error.of_exn exn in Logger.error logger ~module_ ~location "Saw a deferred exception $exn while waiting for process" - ~metadata:[("exn", Error_json.error_to_yojson err)] ) ) + ~metadata:[ ("exn", Error_json.error_to_yojson err) ] )) with | Ok _ -> () | Error err -> Logger.error logger ~module_ ~location "Saw an immediate exception $exn while waiting for process" - ~metadata:[("exn", Error_json.error_to_yojson err)] + ~metadata:[ ("exn", Error_json.error_to_yojson err) ] diff --git a/src/lib/cli_lib/arg_type.ml b/src/lib/cli_lib/arg_type.ml index 79ff571b221..b06bcd67480 100644 --- a/src/lib/cli_lib/arg_type.ml +++ b/src/lib/cli_lib/arg_type.ml @@ -24,21 +24,21 @@ let public_key_compressed = exit 1 in try Public_key.of_base58_check_decompress_exn s - with e -> error_string (Error.of_exn e) ) + with e -> error_string (Error.of_exn e)) (* Hack to allow us to deprecate a value without needing to add an mli * just for this. We only want to have one "kind" of public key in the * public-facing interface if possible *) include ( struct - let public_key = - Command.Arg_type.map public_key_compressed ~f:(fun pk -> - match Public_key.decompress pk with - | None -> - failwith "Invalid key" - | Some pk' -> - pk' ) - end : + let public_key = + Command.Arg_type.map public_key_compressed ~f:(fun pk -> + match Public_key.decompress pk with + | None -> + failwith "Invalid key" + | Some pk' -> + pk') + end : sig val public_key : Public_key.t Command.Arg_type.t [@@deprecated "Use public_key_compressed in commandline args"] @@ -74,8 +74,7 @@ let hd_index = let ip_address = Command.Arg_type.map Command.Param.string ~f:Unix.Inet_addr.of_string -let cidr_mask = - Command.Arg_type.map Command.Param.string ~f:Unix.Cidr.of_string +let cidr_mask = Command.Arg_type.map Command.Param.string ~f:Unix.Cidr.of_string let log_level = Command.Arg_type.map Command.Param.string ~f:(fun log_level_str_with_case -> @@ -90,14 +89,14 @@ let log_level = |> String.concat ~sep:", " ) ; exit 14 | Ok ll -> - ll ) + ll) let user_command = Command.Arg_type.create (fun s -> try Mina_base.Signed_command.of_base58_check_exn s with e -> Error.tag (Error.of_exn e) ~tag:"Couldn't decode transaction id" - |> Error.raise ) + |> Error.raise) module Work_selection_method = struct [%%versioned diff --git a/src/lib/cli_lib/background_daemon.ml b/src/lib/cli_lib/background_daemon.ml index 4d3fbbdd21e..3c05f405233 100644 --- a/src/lib/cli_lib/background_daemon.ml +++ b/src/lib/cli_lib/background_daemon.ml @@ -21,12 +21,11 @@ let run ~f (t : Host_and_port.t Flag.Types.with_name) arg = - The daemon might not be running. See logs (in \ `~/.mina-config/mina.log`) for details under the host:%s.\n\ \ Run `mina daemon -help` to see how to start daemon.\n\ - - If you just started the daemon, wait a minute for the RPC \ - server to start.\n\ + - If you just started the daemon, wait a minute for the RPC server \ + to start.\n\ - Alternatively, the daemon may not be running the RPC server on \ %{sexp:Host_and_port.t}.\n\ - \ If so, add flag `-%s` with correct port when running this \ - command.\n" + \ If so, add flag `-%s` with correct port when running this command.\n" (Host_and_port.host t.value) t.value t.name ; go Abort diff --git a/src/lib/cli_lib/commands.ml b/src/lib/cli_lib/commands.ml index 02e39f6b303..5cf3e3c9138 100644 --- a/src/lib/cli_lib/commands.ml +++ b/src/lib/cli_lib/commands.ml @@ -33,15 +33,15 @@ let validate_keypair = In_channel.with_file pubkey_path ~f:(fun in_channel -> match In_channel.input_line in_channel with | Some line -> ( - try Public_key.Compressed.of_base58_check_exn line - with _exn -> - eprintf - "Could not create public key in file %s from text: %s\n" - pubkey_path line ; - exit 1 ) + try Public_key.Compressed.of_base58_check_exn line + with _exn -> + eprintf + "Could not create public key in file %s from text: %s\n" + pubkey_path line ; + exit 1 ) | None -> eprintf "No public key found in file %s\n" pubkey_path ; - exit 1 ) + exit 1) with exn -> eprintf "Could not read public key file %s, error: %s\n" pubkey_path (Exn.to_string exn) ; @@ -70,8 +70,7 @@ let validate_keypair = (Snark_params.Tick.Inner_curve.of_affine keypair.public_key) message in - if verified then - printf "Verified a transaction using specified keypair\n" + if verified then printf "Verified a transaction using specified keypair\n" else ( eprintf "Failed to verify a transaction using the specific keypair\n" ; exit 1 ) @@ -79,8 +78,7 @@ let validate_keypair = let open Deferred.Let_syntax in let%bind () = let password = - lazy - (Secrets.Keypair.Terminal_stdin.prompt_password "Enter password: ") + lazy (Secrets.Keypair.Terminal_stdin.prompt_password "Enter password: ") in match%map Secrets.Keypair.read ~privkey_path ~password with | Ok keypair -> @@ -124,8 +122,8 @@ let validate_transaction = error:%s@." (Yojson.Safe.pretty_to_string transaction_json) (Yojson.Safe.pretty_to_string - (Error_json.error_to_yojson err)) ) - jsons ) + (Error_json.error_to_yojson err))) + jsons) with | Ok () -> () @@ -167,8 +165,8 @@ module Vrf = struct and generate_outputs = flag "--generate-outputs" ~doc: - "true|false Whether to generate the vrf in addition to the \ - witness (default: false)" + "true|false Whether to generate the vrf in addition to the witness \ + (default: false)" (optional_with_default false bool) and delegated_stake = flag "--delegated-stake" @@ -197,21 +195,24 @@ module Vrf = struct let open Consensus_vrf.Layout in let evaluation = Evaluation.of_message_and_sk ~constraint_constants - { global_slot= Mina_numbers.Global_slot.of_int global_slot - ; epoch_seed= + { global_slot = Mina_numbers.Global_slot.of_int global_slot + ; epoch_seed = Mina_base.Epoch_seed.of_base58_check_exn epoch_seed - ; delegator_index } + ; delegator_index + } keypair.private_key in let evaluation = match (delegated_stake, total_stake) with | Some delegated_stake, Some total_stake -> { evaluation with - vrf_threshold= + vrf_threshold = Some - { delegated_stake= + { delegated_stake = Currency.Balance.of_int delegated_stake - ; total_stake= Currency.Amount.of_int total_stake } } + ; total_stake = Currency.Amount.of_int total_stake + } + } | _ -> evaluation in @@ -234,9 +235,9 @@ module Vrf = struct let batch_generate_witness = Command.async ~summary: - "Generate a batch of vrf evaluation witnesses from {\"globalSlot\": \ - _, \"epochSeed\": _, \"delegatorIndex\": _} JSON message objects \ - read on stdin" + "Generate a batch of vrf evaluation witnesses from {\"globalSlot\": _, \ + \"epochSeed\": _, \"delegatorIndex\": _} JSON message objects read on \ + stdin" (let open Command.Let_syntax in let env = Secrets.Keypair.env in if Option.is_some (Sys.getenv env) then @@ -276,7 +277,7 @@ module Vrf = struct (Yojson.Safe.pretty_print ?std:None) (Evaluation.to_yojson evaluation) ; Deferred.return (`Repeat ()) - with Yojson.End_of_input -> return (`Finished ()) ) + with Yojson.End_of_input -> return (`Finished ())) >>| function | Ok x -> x @@ -284,7 +285,7 @@ module Vrf = struct Format.eprintf "Error:@.%s@.@." (Yojson.Safe.pretty_to_string (Error_json.error_to_yojson err)) ; - `Repeat () ) + `Repeat ()) | Error err -> eprintf "Could not read the specified keypair: %s\n" (Secrets.Privkey_error.to_string err) ; @@ -301,8 +302,8 @@ module Vrf = struct if given. The threshold should be included in the JSON for each vrf \ as the 'vrfThreshold' field, of format {delegatedStake: 1000, \ totalStake: 1000000000}. The threshold is not checked against a \ - ledger; this should be done manually to confirm whether \ - threshold_met in the output corresponds to an actual won block." + ledger; this should be done manually to confirm whether threshold_met \ + in the output corresponds to an actual won block." ( Command.Param.return @@ Exceptions.handle_nicely @@ fun () -> let open Deferred.Let_syntax in @@ -321,8 +322,7 @@ module Vrf = struct in let open Consensus_vrf.Layout in let evaluation = - Result.ok_or_failwith - (Evaluation.of_yojson evaluation_json) + Result.ok_or_failwith (Evaluation.of_yojson evaluation_json) in let evaluation = Evaluation.compute_vrf ~constraint_constants evaluation @@ -331,7 +331,7 @@ module Vrf = struct (Yojson.Safe.pretty_print ?std:None) (Evaluation.to_yojson evaluation) ; Deferred.return (`Repeat ()) - with Yojson.End_of_input -> return (`Finished ()) ) + with Yojson.End_of_input -> return (`Finished ())) >>| function | Ok x -> x @@ -339,7 +339,7 @@ module Vrf = struct Format.eprintf "Error:@.%s@.@." (Yojson.Safe.pretty_to_string (Error_json.error_to_yojson err)) ; - `Repeat () ) + `Repeat ()) in exit 0 ) @@ -347,5 +347,6 @@ module Vrf = struct Command.group ~summary:"Commands for vrf evaluations" [ ("generate-witness", generate_witness) ; ("batch-generate-witness", batch_generate_witness) - ; ("batch-check-witness", batch_check_witness) ] + ; ("batch-check-witness", batch_check_witness) + ] end diff --git a/src/lib/cli_lib/flag.ml b/src/lib/cli_lib/flag.ml index 9ede7cd3bf5..32e28af9e3d 100644 --- a/src/lib/cli_lib/flag.ml +++ b/src/lib/cli_lib/flag.ml @@ -2,48 +2,49 @@ open Core let json = Command.Param.( - flag "--json" ~aliases:["json"] no_arg + flag "--json" ~aliases:[ "json" ] no_arg ~doc:"Use JSON output (default: plaintext)") let plaintext = Command.Param.( - flag "--plaintext" ~aliases:["plaintext"] no_arg + flag "--plaintext" ~aliases:[ "plaintext" ] no_arg ~doc:"Use plaintext input or output (default: JSON)") let performance = Command.Param.( - flag "--performance" ~aliases:["performance"] no_arg + flag "--performance" ~aliases:[ "performance" ] no_arg ~doc: "Include performance histograms in status output (default: don't \ include)") let privkey_write_path = let open Command.Param in - flag "--privkey-path" ~aliases:["privkey-path"] + flag "--privkey-path" ~aliases:[ "privkey-path" ] ~doc:"FILE File to write private key into (public key will be FILE.pub)" (required string) let privkey_read_path = let open Command.Param in - flag "--privkey-path" ~aliases:["privkey-path"] + flag "--privkey-path" ~aliases:[ "privkey-path" ] ~doc:"FILE File to read private key from" (required string) let conf_dir = let open Command.Param in - flag "--config-directory" ~aliases:["config-directory"] + flag "--config-directory" ~aliases:[ "config-directory" ] ~doc:"DIR Configuration directory" (optional string) module Doc_builder = struct type 'value t = - { type_name: string - ; description: string - ; examples: 'value list - ; display: 'value -> string } + { type_name : string + ; description : string + ; examples : 'value list + ; display : 'value -> string + } let create ~display ?(examples = []) type_name description = - {type_name; description; examples; display} + { type_name; description; examples; display } - let display ~default {type_name; description; examples; display} = + let display ~default { type_name; description; examples; display } = let open Printf in let example_text = if List.is_empty examples then "" @@ -59,10 +60,10 @@ module Doc_builder = struct end module Types = struct - type 'a with_name = {name: string; value: 'a} + type 'a with_name = { name : string; value : 'a } type 'a with_name_and_displayed_default = - {name: string; value: 'a option; default: 'a} + { name : string; value : 'a option; default : 'a } (*Difference between Optional and Optional_value is that the name is still accessible if the value is None*) type ('value, 'output) t = @@ -77,7 +78,7 @@ end let setup_flag ~arg_type ~name ?aliases doc = let open Command.Let_syntax in Command.Param.flag name ?aliases ~doc (Command.Param.optional arg_type) - >>| Option.map ~f:(fun value -> {Types.name; value}) + >>| Option.map ~f:(fun value -> { Types.name; value }) let create (type value output) : name:string @@ -95,22 +96,22 @@ let create (type value output) : setup_flag ~arg_type ~name ?aliases (Doc_builder.display ~default:None doc_builder) >>| function - | Some {name; value} -> - {Types.name; value= Some value} + | Some { name; value } -> + { Types.name; value = Some value } | None -> - {name; value= None} ) + { name; value = None } ) | Optional_with_displayed_default default -> ( setup_flag ~arg_type ~name ?aliases (Doc_builder.display ~default:(Some default) doc_builder) >>| function - | Some {name; value} -> - {Types.name; value= Some value; default} + | Some { name; value } -> + { Types.name; value = Some value; default } | None -> - {name; value= None; default} ) + { name; value = None; default } ) | Resolve_with_default default -> setup_flag ~arg_type ~name ?aliases (Doc_builder.display ~default:(Some default) doc_builder) - >>| Option.value ~default:{Types.name; value= default} + >>| Option.value ~default:{ Types.name; value = default } module Port = struct let to_string = Int.to_string @@ -150,28 +151,28 @@ module Port = struct module Daemon = struct let external_ = - create ~name:"--external-port" ~aliases:["external-port"] + create ~name:"--external-port" ~aliases:[ "external-port" ] ~default:default_libp2p "Port to use for all libp2p communications (gossip and RPC)" let client = - create ~name:"--client-port" ~aliases:["client-port"] + create ~name:"--client-port" ~aliases:[ "client-port" ] ~default:default_client "local RPC-server for clients to interact with the daemon" let rest_server = - create ~name:"--rest-port" ~aliases:["rest-port"] ~default:default_rest + create ~name:"--rest-port" ~aliases:[ "rest-port" ] ~default:default_rest "local REST-server for daemon interaction" let limited_graphql_server = create_optional ~name:"--limited-graphql-port" - ~aliases:["limited-graphql-port"] + ~aliases:[ "limited-graphql-port" ] "GraphQL-server for limited daemon interaction" end module Archive = struct let server = - create ~name:"--server-port" ~aliases:["server-port"] + create ~name:"--server-port" ~aliases:[ "server-port" ] ~default:default_archive "port to launch the archive server" end end @@ -181,7 +182,7 @@ module Host = struct let is_localhost host = Option.value_map ~default:false (Unix.Host.getbyname host) ~f:(fun host -> - Core.Unix.Host.have_address_in_common host localhost ) + Core.Unix.Host.have_address_in_common host localhost) end let example_host = "154.97.53.97" @@ -206,7 +207,9 @@ module Host_and_port = struct else Host_and_port.to_string host_and_port let create_examples port = - [Port.to_host_and_port port; Host_and_port.create ~host:example_host ~port] + [ Port.to_host_and_port port + ; Host_and_port.create ~host:example_host ~port + ] let make_doc_builder description example_port = Doc_builder.create ~display:to_string @@ -217,7 +220,7 @@ module Host_and_port = struct module Client = struct let daemon = - create ~name:"--daemon-port" ~aliases:["daemon-port"] ~arg_type + create ~name:"--daemon-port" ~aliases:[ "daemon-port" ] ~arg_type (make_doc_builder "Client to local daemon communication" Port.default_client) (Resolve_with_default (Port.to_host_and_port Port.default_client)) @@ -225,7 +228,7 @@ module Host_and_port = struct module Daemon = struct let archive = - create ~name:"--archive-address" ~aliases:["archive-address"] ~arg_type + create ~name:"--archive-address" ~aliases:[ "archive-address" ] ~arg_type (make_doc_builder "Daemon to archive process communication" Port.default_archive) Optional @@ -260,7 +263,8 @@ module Uri = struct ; Uri.of_string ( "/dns4/peer1-rising-phoenix.o1test.net" ^ ":" ^ Int.to_string Port.default_rest - ^/ "graphql" ) ] + ^/ "graphql" ) + ] "URI/LOCALHOST-PORT" "graphql rest server for daemon interaction" let name = "rest-server" @@ -268,12 +272,12 @@ module Uri = struct let default = Port.to_uri ~path:"graphql" Port.default_rest let rest_graphql = - create ~name:"--rest-server" ~aliases:["rest-server"] + create ~name:"--rest-server" ~aliases:[ "rest-server" ] ~arg_type:(arg_type ~path:"graphql") doc_builder (Resolve_with_default default) let rest_graphql_opt = - create ~name:"--rest-server" ~aliases:["rest-server"] + create ~name:"--rest-server" ~aliases:[ "rest-server" ] ~arg_type:(arg_type ~path:"graphql") doc_builder Optional end @@ -282,10 +286,11 @@ module Uri = struct let doc_builder = Doc_builder.create ~display:to_string ~examples: - [Uri.of_string "postgres://admin:codarules@postgres:5432/archiver"] + [ Uri.of_string "postgres://admin:codarules@postgres:5432/archiver" + ] "URI" "URI for postgresql database" in - create ~name:"--postgres-uri" ~aliases:["postgres-uri"] + create ~name:"--postgres-uri" ~aliases:[ "postgres-uri" ] ~arg_type:(Command.Arg_type.map Command.Param.string ~f:Uri.of_string) doc_builder (Resolve_with_default @@ -296,7 +301,7 @@ end module Log = struct let json = let open Command.Param in - flag "--log-json" ~aliases:["log-json"] no_arg + flag "--log-json" ~aliases:[ "log-json" ] no_arg ~doc:"Print log output as JSON (default: plain text)" let all_levels = @@ -306,7 +311,7 @@ module Log = struct let log_level = Arg_type.log_level in let open Command.Param in let doc = sprintf "LEVEL Set log level (%s, default: Info)" all_levels in - flag "--log-level" ~aliases:["log-level"] ~doc + flag "--log-level" ~aliases:[ "log-level" ] ~doc (optional_with_default Logger.Level.Info log_level) let file_log_level = @@ -316,25 +321,26 @@ module Log = struct sprintf "LEVEL Set log level for the log file (%s, default: Trace)" all_levels in - flag "--file-log-level" ~aliases:["file-log-level"] ~doc + flag "--file-log-level" ~aliases:[ "file-log-level" ] ~doc (optional_with_default Logger.Level.Trace log_level) end type signed_command_common = - { sender: Signature_lib.Public_key.Compressed.t - ; fee: Currency.Fee.t - ; nonce: Mina_base.Account.Nonce.t option - ; memo: string option } + { sender : Signature_lib.Public_key.Compressed.t + ; fee : Currency.Fee.t + ; nonce : Mina_base.Account.Nonce.t option + ; memo : string option + } let signed_command_common : signed_command_common Command.Param.t = let open Command.Let_syntax in let open Arg_type in let%map_open sender = - flag "--sender" ~aliases:["sender"] + flag "--sender" ~aliases:[ "sender" ] (required public_key_compressed) ~doc:"PUBLICKEY Public key from which you want to send the transaction" and fee = - flag "--fee" ~aliases:["fee"] + flag "--fee" ~aliases:[ "fee" ] ~doc: (Printf.sprintf "FEE Amount you are willing to pay to process the transaction \ @@ -345,44 +351,44 @@ let signed_command_common : signed_command_common Command.Param.t = Mina_base.Signed_command.minimum_fee)) (optional txn_fee) and nonce = - flag "--nonce" ~aliases:["nonce"] + flag "--nonce" ~aliases:[ "nonce" ] ~doc: - "NONCE Nonce that you would like to set for your transaction \ - (default: nonce of your account on the best ledger or the successor \ - of highest value nonce of your sent transactions from the \ - transaction pool )" + "NONCE Nonce that you would like to set for your transaction (default: \ + nonce of your account on the best ledger or the successor of highest \ + value nonce of your sent transactions from the transaction pool )" (optional txn_nonce) and memo = - flag "--memo" ~aliases:["memo"] + flag "--memo" ~aliases:[ "memo" ] ~doc:"STRING Memo accompanying the transaction" (optional string) in { sender - ; fee= Option.value fee ~default:Mina_compile_config.default_transaction_fee + ; fee = Option.value fee ~default:Mina_compile_config.default_transaction_fee ; nonce - ; memo } + ; memo + } module Signed_command = struct open Arg_type let hd_index = let open Command.Param in - flag "--hd-index" ~aliases:["HD-index"] + flag "--hd-index" ~aliases:[ "HD-index" ] ~doc:"HD-INDEX Index used by hardware wallet" (required hd_index) let receiver_pk = let open Command.Param in - flag "--receiver" ~aliases:["receiver"] + flag "--receiver" ~aliases:[ "receiver" ] ~doc:"PUBLICKEY Public key to which you want to send money" (required public_key_compressed) let amount = let open Command.Param in - flag "--amount" ~aliases:["amount"] + flag "--amount" ~aliases:[ "amount" ] ~doc:"VALUE Payment amount you want to send" (required txn_amount) let fee = let open Command.Param in - flag "--fee" ~aliases:["fee"] + flag "--fee" ~aliases:[ "fee" ] ~doc: (Printf.sprintf "FEE Amount you are willing to pay to process the transaction \ @@ -395,7 +401,7 @@ module Signed_command = struct let valid_until = let open Command.Param in - flag "--valid-until" ~aliases:["valid-until"] + flag "--valid-until" ~aliases:[ "valid-until" ] ~doc: "GLOBAL-SLOT The last global-slot at which this transaction will be \ considered valid. This makes it possible to have transactions which \ @@ -405,17 +411,16 @@ module Signed_command = struct let nonce = let open Command.Param in - flag "--nonce" ~aliases:["nonce"] + flag "--nonce" ~aliases:[ "nonce" ] ~doc: - "NONCE Nonce that you would like to set for your transaction \ - (default: nonce of your account on the best ledger or the successor \ - of highest value nonce of your sent transactions from the \ - transaction pool )" + "NONCE Nonce that you would like to set for your transaction (default: \ + nonce of your account on the best ledger or the successor of highest \ + value nonce of your sent transactions from the transaction pool )" (optional txn_nonce) let memo = let open Command.Param in - flag "--memo" ~aliases:["memo"] + flag "--memo" ~aliases:[ "memo" ] ~doc: (sprintf "STRING Memo accompanying the transaction (up to %d characters)" diff --git a/src/lib/cli_lib/flag.mli b/src/lib/cli_lib/flag.mli index b36ee89a920..10116b51e39 100644 --- a/src/lib/cli_lib/flag.mli +++ b/src/lib/cli_lib/flag.mli @@ -13,10 +13,10 @@ val privkey_read_path : string Command.Param.t val conf_dir : string option Command.Param.t module Types : sig - type 'a with_name = {name: string; value: 'a} + type 'a with_name = { name : string; value : 'a } type 'a with_name_and_displayed_default = - {name: string; value: 'a option; default: 'a} + { name : string; value : 'a option; default : 'a } end module Host_and_port : sig @@ -49,13 +49,11 @@ end module Port : sig module Daemon : sig - val external_ : - int Types.with_name_and_displayed_default Command.Param.t + val external_ : int Types.with_name_and_displayed_default Command.Param.t val client : int Types.with_name_and_displayed_default Command.Param.t - val rest_server : - int Types.with_name_and_displayed_default Command.Param.t + val rest_server : int Types.with_name_and_displayed_default Command.Param.t val limited_graphql_server : int option Types.with_name Command.Param.t end @@ -74,10 +72,11 @@ module Log : sig end type signed_command_common = - { sender: Signature_lib.Public_key.Compressed.t - ; fee: Currency.Fee.t - ; nonce: Mina_base.Account.Nonce.t option - ; memo: string option } + { sender : Signature_lib.Public_key.Compressed.t + ; fee : Currency.Fee.t + ; nonce : Mina_base.Account.Nonce.t option + ; memo : string option + } val signed_command_common : signed_command_common Command.Param.t diff --git a/src/lib/cli_lib/graphql_types.ml b/src/lib/cli_lib/graphql_types.ml index b9c87509fbf..861444d56ff 100644 --- a/src/lib/cli_lib/graphql_types.ml +++ b/src/lib/cli_lib/graphql_types.ml @@ -4,9 +4,10 @@ module Completed_works = struct module Work = struct type t = - { work_ids: int list - ; fee: Currency.Fee.t - ; prover: Signature_lib.Public_key.Compressed.t } + { work_ids : int list + ; fee : Currency.Fee.t + ; prover : Signature_lib.Public_key.Compressed.t + } [@@deriving yojson] end @@ -17,11 +18,12 @@ end module Pending_snark_work = struct module Work = struct type t = - { work_id: int - ; fee_excess: Currency.Fee.Signed.t - ; supply_increase: Currency.Amount.t - ; source_ledger_hash: Mina_base.Frozen_ledger_hash.t - ; target_ledger_hash: Mina_base.Frozen_ledger_hash.t } + { work_id : int + ; fee_excess : Currency.Fee.Signed.t + ; supply_increase : Currency.Amount.t + ; source_ledger_hash : Mina_base.Frozen_ledger_hash.t + ; target_ledger_hash : Mina_base.Frozen_ledger_hash.t + } [@@deriving yojson] end diff --git a/src/lib/cli_lib/render.ml b/src/lib/cli_lib/render.ml index 8313fdf934b..53ff8741ce2 100644 --- a/src/lib/cli_lib/render.ml +++ b/src/lib/cli_lib/render.ml @@ -26,7 +26,7 @@ module String_list_formatter = struct List.mapi pks ~f:(fun i pk -> let i = i + 1 in let padding = String.init (max_padding - log10 i) ~f:(fun _ -> ' ') in - sprintf "%s%i, %s" padding i pk ) + sprintf "%s%i, %s" padding i pk) |> String.concat ~sep:"\n" end @@ -46,17 +46,18 @@ module Public_key_with_details = struct let to_yojson (public_key, balance, nonce) = `Assoc [ ( public_key - , `Assoc [("balance", `Int balance); ("nonce", `Int nonce)] ) ] + , `Assoc [ ("balance", `Int balance); ("nonce", `Int nonce) ] ) + ] end type t = Pretty_account.t list [@@deriving to_yojson] - type format = {accounts: t} [@@deriving to_yojson, fields] + type format = { accounts : t } [@@deriving to_yojson, fields] - let to_yojson t = format_to_yojson {accounts= t} + let to_yojson t = format_to_yojson { accounts = t } let to_text account = List.map account ~f:(fun (public_key, balance, nonce) -> - sprintf !"%s, %d, %d" public_key balance nonce ) + sprintf !"%s, %d, %d" public_key balance nonce) |> String.concat ~sep:"\n" end diff --git a/src/lib/cli_lib/stdout_log.ml b/src/lib/cli_lib/stdout_log.ml index c0ca2fb1922..8894c60aec5 100644 --- a/src/lib/cli_lib/stdout_log.ml +++ b/src/lib/cli_lib/stdout_log.ml @@ -4,9 +4,10 @@ let setup log_json log_level = else Logger.Processor.pretty ~log_level ~config: - { Logproc_lib.Interpolator.mode= Inline - ; max_interpolation_length= 50 - ; pretty_print= true } + { Logproc_lib.Interpolator.mode = Inline + ; max_interpolation_length = 50 + ; pretty_print = true + } in Logger.Consumer_registry.register ~id:"default" ~processor:stdout_log_processor diff --git a/src/lib/coda_plugins/examples/toplevel/plugin_toplevel.ml b/src/lib/coda_plugins/examples/toplevel/plugin_toplevel.ml index 1ce89a379a8..4652af447d2 100644 --- a/src/lib/coda_plugins/examples/toplevel/plugin_toplevel.ml +++ b/src/lib/coda_plugins/examples/toplevel/plugin_toplevel.ml @@ -20,7 +20,7 @@ let read_input = Writer.write stdout prompt ; Thread_safe.block_on_async_exn (fun () -> let%bind () = Writer.flushed stdout in - go buffer len 0 ) + go buffer len 0) let () = let config = Coda_lib.config coda in diff --git a/src/lib/command_line_tests/command_line_tests.ml b/src/lib/command_line_tests/command_line_tests.ml index 59558f5cc56..0e50a8ef7f4 100644 --- a/src/lib/command_line_tests/command_line_tests.ml +++ b/src/lib/command_line_tests/command_line_tests.ml @@ -18,8 +18,7 @@ let%test_module "Command line tests" = the mina.exe executable must have been built before running the test here, else it will fail - - *) + *) let coda_exe = "../../app/cli/src/mina.exe" let start_daemon config_dir genesis_ledger_dir port = @@ -41,7 +40,8 @@ let%test_module "Command line tests" = ; "-genesis-ledger-dir" ; genesis_ledger_dir ; "-current-protocol-version" - ; "0.0.0" ] + ; "0.0.0" + ] () with | Ok s -> @@ -53,11 +53,11 @@ let%test_module "Command line tests" = let stop_daemon port = Process.run () ~prog:coda_exe - ~args:["client"; "stop-daemon"; "-daemon-port"; sprintf "%d" port] + ~args:[ "client"; "stop-daemon"; "-daemon-port"; sprintf "%d" port ] let start_client port = Process.run ~prog:coda_exe - ~args:["client"; "status"; "-daemon-port"; sprintf "%d" port] + ~args:[ "client"; "status"; "-daemon-port"; sprintf "%d" port ] () let create_config_directories () = @@ -67,8 +67,8 @@ let%test_module "Command line tests" = (conf, genesis) let remove_config_directory config_dir genesis_dir = - let%bind _ = Process.run_exn ~prog:"rm" ~args:["-rf"; config_dir] () in - Process.run_exn ~prog:"rm" ~args:["-rf"; genesis_dir] () + let%bind _ = Process.run_exn ~prog:"rm" ~args:[ "-rf"; config_dir ] () in + Process.run_exn ~prog:"rm" ~args:[ "-rf"; genesis_dir ] () |> Deferred.ignore_m let test_background_daemon () = @@ -88,7 +88,7 @@ let%test_module "Command line tests" = Core.Printf.printf !"**** DAEMON CRASHED (OUTPUT BELOW) ****\n%s\n************\n%!" contents ) ; - remove_config_directory config_dir genesis_ledger_dir ) + remove_config_directory config_dir genesis_ledger_dir) (fun () -> match%map let open Deferred.Or_error.Let_syntax in @@ -122,7 +122,7 @@ let%test_module "Command line tests" = true | Error err -> test_failed := true ; - Error.raise err ) + Error.raise err) let%test "The mina daemon works in background mode" = match Core.Sys.is_file coda_exe with diff --git a/src/lib/consensus/consensus.ml b/src/lib/consensus/consensus.ml index e925b4711c9..03affe8da2b 100644 --- a/src/lib/consensus/consensus.ml +++ b/src/lib/consensus/consensus.ml @@ -1,20 +1,16 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] module Intf = Intf -[%%if -consensus_mechanism = "proof_of_stake"] +[%%if consensus_mechanism = "proof_of_stake"] include Proof_of_stake [%%else] -[%%show -consesus_mechanism] +[%%show consesus_mechanism] -[%%optcomp.error -"invalid value for \"consensus_mechanism\""] +[%%optcomp.error "invalid value for \"consensus_mechanism\""] [%%endif] diff --git a/src/lib/consensus/consensus.mli b/src/lib/consensus/consensus.mli index eaeb4f63df5..31c7f2ce925 100644 --- a/src/lib/consensus/consensus.mli +++ b/src/lib/consensus/consensus.mli @@ -6,10 +6,10 @@ module Intf : module type of Intf include module type of Proof_of_stake - with module Exported := Proof_of_stake.Exported - and type Data.Block_data.t = Proof_of_stake.Data.Block_data.t - and type Data.Consensus_state.Value.Stable.V1.t = - Proof_of_stake.Data.Consensus_state.Value.Stable.V1.t + with module Exported := Proof_of_stake.Exported + and type Data.Block_data.t = Proof_of_stake.Data.Block_data.t + and type Data.Consensus_state.Value.Stable.V1.t = + Proof_of_stake.Data.Consensus_state.Value.Stable.V1.t [%%else] diff --git a/src/lib/consensus/constants.ml b/src/lib/consensus/constants.ml index b53a3ecd19a..ae881752a3e 100644 --- a/src/lib/consensus/constants.ml +++ b/src/lib/consensus/constants.ml @@ -9,21 +9,22 @@ module Poly = struct module Stable = struct module V1 = struct type ('length, 'time, 'timespan) t = - { k: 'length - ; delta: 'length - ; slots_per_sub_window: 'length - ; slots_per_window: 'length - ; sub_windows_per_window: 'length - ; slots_per_epoch: 'length (* The first slot after the grace period. *) - ; grace_period_end: 'length - ; epoch_size: 'length - ; checkpoint_window_slots_per_year: 'length - ; checkpoint_window_size_in_slots: 'length - ; block_window_duration_ms: 'timespan - ; slot_duration_ms: 'timespan - ; epoch_duration: 'timespan - ; delta_duration: 'timespan - ; genesis_state_timestamp: 'time } + { k : 'length + ; delta : 'length + ; slots_per_sub_window : 'length + ; slots_per_window : 'length + ; sub_windows_per_window : 'length + ; slots_per_epoch : 'length (* The first slot after the grace period. *) + ; grace_period_end : 'length + ; epoch_size : 'length + ; checkpoint_window_slots_per_year : 'length + ; checkpoint_window_size_in_slots : 'length + ; block_window_duration_ms : 'timespan + ; slot_duration_ms : 'timespan + ; epoch_duration : 'timespan + ; delta_duration : 'timespan + ; genesis_state_timestamp : 'time + } [@@deriving equal, compare, hash, sexp, to_yojson, hlist] end end] @@ -89,9 +90,9 @@ end module Constants_UInt32 : M_intf - with type length = Length.t - and type time = Block_time.t - and type timespan = Block_time.Span.t = struct + with type length = Length.t + and type time = Block_time.t + and type timespan = Block_time.Span.t = struct type t = UInt32.t type length = Length.t @@ -131,9 +132,9 @@ end module Constants_checked : M_intf - with type length = Length.Checked.t - and type time = Block_time.Unpacked.var - and type timespan = Block_time.Span.Unpacked.var = struct + with type length = Length.Checked.t + and type time = Block_time.Unpacked.var + and type timespan = Block_time.Span.Unpacked.var = struct open Snarky_integer type t = field Integer.t @@ -193,7 +194,7 @@ let create' (type a b c) let k = of_length protocol_constants.k in let delta = of_length protocol_constants.delta in (*TODO: sub_windows_per_window, slots_per_sub_window are currently dummy - values and need to be updated before mainnet*) + values and need to be updated before mainnet*) let slots_per_sub_window = of_length protocol_constants.slots_per_sub_window in @@ -243,21 +244,22 @@ let create' (type a b c) slots in let res : (a, b, c) Poly.t = - { Poly.k= to_length k - ; delta= to_length delta - ; block_window_duration_ms= to_timespan block_window_duration_ms - ; slots_per_sub_window= to_length slots_per_sub_window - ; slots_per_window= to_length slots_per_window - ; sub_windows_per_window= to_length sub_windows_per_window - ; slots_per_epoch= to_length slots_per_epoch - ; grace_period_end= to_length grace_period_end - ; slot_duration_ms= to_timespan Slot.duration_ms - ; epoch_size= to_length Epoch.size - ; epoch_duration= to_timespan Epoch.duration - ; checkpoint_window_slots_per_year= to_length zero - ; checkpoint_window_size_in_slots= to_length zero - ; delta_duration= to_timespan delta_duration - ; genesis_state_timestamp= protocol_constants.genesis_state_timestamp } + { Poly.k = to_length k + ; delta = to_length delta + ; block_window_duration_ms = to_timespan block_window_duration_ms + ; slots_per_sub_window = to_length slots_per_sub_window + ; slots_per_window = to_length slots_per_window + ; sub_windows_per_window = to_length sub_windows_per_window + ; slots_per_epoch = to_length slots_per_epoch + ; grace_period_end = to_length grace_period_end + ; slot_duration_ms = to_timespan Slot.duration_ms + ; epoch_size = to_length Epoch.size + ; epoch_duration = to_timespan Epoch.duration + ; checkpoint_window_slots_per_year = to_length zero + ; checkpoint_window_size_in_slots = to_length zero + ; delta_duration = to_timespan delta_duration + ; genesis_state_timestamp = protocol_constants.genesis_state_timestamp + } in res @@ -284,7 +286,8 @@ let create ~(constraint_constants : Genesis_constants.Constraint_constants.t) in { constants with checkpoint_window_size_in_slots - ; checkpoint_window_slots_per_year } + ; checkpoint_window_slots_per_year + } let for_unit_tests = lazy @@ -299,13 +302,15 @@ let to_protocol_constants ; genesis_state_timestamp ; slots_per_sub_window ; slots_per_epoch - ; _ } : + ; _ + } : _ Poly.t) = { Mina_base.Protocol_constants_checked.Poly.k ; delta ; genesis_state_timestamp ; slots_per_sub_window - ; slots_per_epoch } + ; slots_per_epoch + } let data_spec = Data_spec. @@ -323,7 +328,8 @@ let data_spec = ; Block_time.Span.Unpacked.typ ; Block_time.Span.Unpacked.typ ; Block_time.Span.Unpacked.typ - ; Block_time.Unpacked.typ ] + ; Block_time.Unpacked.typ + ] let typ = Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist @@ -345,13 +351,16 @@ let to_input (t : t) = ; t.grace_period_end ; t.epoch_size ; t.checkpoint_window_slots_per_year - ; t.checkpoint_window_size_in_slots |] + ; t.checkpoint_window_size_in_slots + |] ; Array.map ~f:s [| t.block_window_duration_ms ; t.slot_duration_ms ; t.epoch_duration - ; t.delta_duration |] - ; [|Block_time.Bits.to_bits t.genesis_state_timestamp|] ]) + ; t.delta_duration + |] + ; [| Block_time.Bits.to_bits t.genesis_state_timestamp |] + ]) let gc_parameters (constants : t) = let open Unsigned.UInt32 in @@ -409,7 +418,8 @@ module Checked = struct ; slot_duration_ms ; epoch_duration ; delta_duration - ; genesis_state_timestamp |]) + ; genesis_state_timestamp + |]) let create ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~(protocol_constants : Mina_base.Protocol_constants_checked.var) : @@ -419,7 +429,7 @@ module Checked = struct make_checked (fun () -> create' (module Constants_checked) - ~constraint_constants ~protocol_constants ) + ~constraint_constants ~protocol_constants) in let%map checkpoint_window_slots_per_year, checkpoint_window_size_in_slots = let constant c = Integer.constant ~m (Bignum_bigint.of_int c) in @@ -446,7 +456,8 @@ module Checked = struct in { constants with checkpoint_window_slots_per_year - ; checkpoint_window_size_in_slots } + ; checkpoint_window_size_in_slots + } end let%test_unit "checked = unchecked" = @@ -458,12 +469,12 @@ let%test_unit "checked = unchecked" = let test = Test_util.test_equal Protocol_constants_checked.typ typ (fun protocol_constants -> - Checked.create ~constraint_constants ~protocol_constants ) + Checked.create ~constraint_constants ~protocol_constants) (fun protocol_constants -> create ~constraint_constants ~protocol_constants: - (Protocol_constants_checked.t_of_value protocol_constants) ) + (Protocol_constants_checked.t_of_value protocol_constants)) in Quickcheck.test ~trials:100 Protocol_constants_checked.Value.gen - ~examples:[Protocol_constants_checked.value_of_t for_unit_tests] + ~examples:[ Protocol_constants_checked.value_of_t for_unit_tests ] ~f:test diff --git a/src/lib/consensus/genesis_epoch_data.ml b/src/lib/consensus/genesis_epoch_data.ml index 5d492568936..8fe0cb6c743 100644 --- a/src/lib/consensus/genesis_epoch_data.ml +++ b/src/lib/consensus/genesis_epoch_data.ml @@ -1,8 +1,8 @@ module Data = struct - type t = {ledger: Mina_base.Ledger.t Lazy.t; seed: Mina_base.Epoch_seed.t} + type t = { ledger : Mina_base.Ledger.t Lazy.t; seed : Mina_base.Epoch_seed.t } end -type tt = {staking: Data.t; next: Data.t option} +type tt = { staking : Data.t; next : Data.t option } type t = tt option diff --git a/src/lib/consensus/global_slot.ml b/src/lib/consensus/global_slot.ml index b323a3da364..1d43324560f 100644 --- a/src/lib/consensus/global_slot.ml +++ b/src/lib/consensus/global_slot.ml @@ -9,7 +9,7 @@ module Poly = struct module Stable = struct module V1 = struct type ('slot_number, 'slots_per_epoch) t = - {slot_number: 'slot_number; slots_per_epoch: 'slots_per_epoch} + { slot_number : 'slot_number; slots_per_epoch : 'slots_per_epoch } [@@deriving sexp, equal, compare, hash, yojson, hlist] end end] @@ -29,7 +29,7 @@ type value = t [@@deriving sexp, compare, hash, yojson] type var = (T.Checked.t, Length.Checked.t) Poly.t -let data_spec = Data_spec.[T.Checked.typ; Length.Checked.typ] +let data_spec = Data_spec.[ T.Checked.typ; Length.Checked.typ ] let typ = Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist @@ -38,30 +38,31 @@ let typ = let to_input (t : value) = Random_oracle.Input.bitstrings - [|T.to_bits t.slot_number; Length.to_bits t.slots_per_epoch|] + [| T.to_bits t.slot_number; Length.to_bits t.slots_per_epoch |] let gen ~(constants : Constants.t) = let open Quickcheck.Let_syntax in let slots_per_epoch = constants.slots_per_epoch in let%map slot_number = T.gen in - {Poly.slot_number; slots_per_epoch} + { Poly.slot_number; slots_per_epoch } let create ~(constants : Constants.t) ~(epoch : Epoch.t) ~(slot : Slot.t) : t = - { slot_number= UInt32.Infix.(slot + (constants.slots_per_epoch * epoch)) - ; slots_per_epoch= constants.slots_per_epoch } + { slot_number = UInt32.Infix.(slot + (constants.slots_per_epoch * epoch)) + ; slots_per_epoch = constants.slots_per_epoch + } let of_epoch_and_slot ~(constants : Constants.t) (epoch, slot) = create ~epoch ~slot ~constants let zero ~(constants : Constants.t) : t = - {slot_number= T.zero; slots_per_epoch= constants.slots_per_epoch} + { slot_number = T.zero; slots_per_epoch = constants.slots_per_epoch } -let slot_number {Poly.slot_number; _} = slot_number +let slot_number { Poly.slot_number; _ } = slot_number -let slots_per_epoch {Poly.slots_per_epoch; _} = slots_per_epoch +let slots_per_epoch { Poly.slots_per_epoch; _ } = slots_per_epoch let to_bits (t : t) = - List.concat_map ~f:T.to_bits [t.slot_number; t.slots_per_epoch] + List.concat_map ~f:T.to_bits [ t.slot_number; t.slots_per_epoch ] let epoch (t : t) = UInt32.Infix.(t.slot_number / t.slots_per_epoch) @@ -69,7 +70,8 @@ let slot (t : t) = UInt32.Infix.(t.slot_number mod t.slots_per_epoch) let to_epoch_and_slot t = (epoch t, slot t) -let ( + ) (x : t) n : t = {x with slot_number= T.add x.slot_number (T.of_int n)} +let ( + ) (x : t) n : t = + { x with slot_number = T.add x.slot_number (T.of_int n) } let ( < ) (t : t) (t' : t) = UInt32.compare t.slot_number t'.slot_number < 0 @@ -77,10 +79,10 @@ let ( - ) (t : t) (t' : t) = T.sub t.slot_number t'.slot_number let max (t1 : t) (t2 : t) = if t1 < t2 then t2 else t1 -let succ (t : t) = {t with slot_number= T.succ t.slot_number} +let succ (t : t) = { t with slot_number = T.succ t.slot_number } let of_slot_number ~(constants : Constants.t) slot_number = - {Poly.slot_number; slots_per_epoch= constants.slots_per_epoch} + { Poly.slot_number; slots_per_epoch = constants.slots_per_epoch } let start_time ~(constants : Constants.t) t = let epoch, slot = to_epoch_and_slot t in @@ -117,20 +119,20 @@ module Checked = struct let ( < ) (t : t) (t' : t) = T.Checked.(t.slot_number < t'.slot_number) let of_slot_number ~(constants : Constants.var) slot_number : t = - {slot_number; slots_per_epoch= constants.slots_per_epoch} + { slot_number; slots_per_epoch = constants.slots_per_epoch } let to_bits (t : t) = let open Bitstring_lib.Bitstring.Lsb_first in let%map slot_number = T.Checked.to_bits t.slot_number and slots_per_epoch = Length.Checked.to_bits t.slots_per_epoch in - List.concat_map ~f:to_list [slot_number; slots_per_epoch] |> of_list + List.concat_map ~f:to_list [ slot_number; slots_per_epoch ] |> of_list let to_input (var : t) = let s = Bitstring_lib.Bitstring.Lsb_first.to_list in let%map slot_number = T.Checked.to_bits var.slot_number and slots_per_epoch = Length.Checked.to_bits var.slots_per_epoch in Random_oracle.Input.bitstrings - (Array.map ~f:s [|slot_number; slots_per_epoch|]) + (Array.map ~f:s [| slot_number; slots_per_epoch |]) let to_epoch_and_slot (t : t) : (Epoch.Checked.t * Slot.Checked.t, _) Checked.t = @@ -142,11 +144,11 @@ module Checked = struct (Length.Checked.to_integer t.slots_per_epoch) in ( Epoch.Checked.Unsafe.of_integer epoch - , Slot.Checked.Unsafe.of_integer slot ) ) + , Slot.Checked.Unsafe.of_integer slot )) let sub (t : t) (t' : t) = T.Checked.sub t.slot_number t'.slot_number end module For_tests = struct - let of_global_slot (t : t) slot_number : t = {t with slot_number} + let of_global_slot (t : t) slot_number : t = { t with slot_number } end diff --git a/src/lib/consensus/global_sub_window.ml b/src/lib/consensus/global_sub_window.ml index cf6d67d3640..596c7528bdd 100644 --- a/src/lib/consensus/global_sub_window.ml +++ b/src/lib/consensus/global_sub_window.ml @@ -36,7 +36,7 @@ module Checked = struct (Mina_numbers.Length.Checked.to_integer constants.slots_per_sub_window) in - q ) + q) let sub_window ~(constants : Constants.var) (t : t) : (Sub_window.Checked.t, _) Checked.t = @@ -46,7 +46,7 @@ module Checked = struct (Mina_numbers.Length.Checked.to_integer constants.sub_windows_per_window) in - Sub_window.Checked.Unsafe.of_integer shift ) + Sub_window.Checked.Unsafe.of_integer shift) let succ (t : t) : t = Integer.succ ~m t diff --git a/src/lib/consensus/intf.ml b/src/lib/consensus/intf.ml index 9fb8309159d..1823fbb182d 100644 --- a/src/lib/consensus/intf.ml +++ b/src/lib/consensus/intf.ml @@ -17,11 +17,11 @@ module type Constants = sig val gc_parameters : t - -> [`Acceptable_network_delay of Length.t] - * [`Gc_width of Length.t] - * [`Gc_width_epoch of Length.t] - * [`Gc_width_slot of Length.t] - * [`Gc_interval of Length.t] + -> [ `Acceptable_network_delay of Length.t ] + * [ `Gc_width of Length.t ] + * [ `Gc_width_epoch of Length.t ] + * [ `Gc_width_slot of Length.t ] + * [ `Gc_interval of Length.t ] end module type Blockchain_state = sig @@ -241,7 +241,7 @@ module type State_hooks = sig -> prev_state_hash:Mina_base.State_hash.var -> snark_transition_var -> Currency.Amount.var - -> ( [`Success of Snark_params.Tick.Boolean.var] * consensus_state_var + -> ( [ `Success of Snark_params.Tick.Boolean.var ] * consensus_state_var , _ ) Snark_params.Tick.Checked.t @@ -252,9 +252,8 @@ module type State_hooks = sig constraint_constants:Genesis_constants.Constraint_constants.t -> constants:Constants.t -> gen_slot_advancement:int Quickcheck.Generator.t - -> ( previous_protocol_state:( protocol_state - , Mina_base.State_hash.t ) - With_hash.t + -> ( previous_protocol_state: + (protocol_state, Mina_base.State_hash.t) With_hash.t -> snarked_ledger_hash:Mina_base.Frozen_ledger_hash.t -> coinbase_receiver:Public_key.Compressed.t -> supercharge_coinbase:bool @@ -280,13 +279,14 @@ module type S = sig module Stable : sig module V1 : sig type t = - { delta: int - ; k: int - ; slots_per_epoch: int - ; slot_duration: int - ; epoch_duration: int - ; genesis_state_timestamp: Block_time.Stable.V1.t - ; acceptable_network_delay: int } + { delta : int + ; k : int + ; slots_per_epoch : int + ; slot_duration : int + ; epoch_duration : int + ; genesis_state_timestamp : Block_time.Stable.V1.t + ; acceptable_network_delay : int + } [@@deriving yojson, fields] end end] @@ -299,10 +299,11 @@ module type S = sig module Genesis_epoch_data : sig module Data : sig - type t = {ledger: Mina_base.Ledger.t Lazy.t; seed: Mina_base.Epoch_seed.t} + type t = + { ledger : Mina_base.Ledger.t Lazy.t; seed : Mina_base.Epoch_seed.t } end - type tt = {staking: Data.t; next: Data.t option} + type tt = { staking : Data.t; next : Data.t option } type t = tt option @@ -528,8 +529,7 @@ module type S = sig val next_epoch_data : Value.t -> Mina_base.Epoch_data.Value.t - val graphql_type : - unit -> ('ctx, Value.t option) Graphql_async.Schema.typ + val graphql_type : unit -> ('ctx, Value.t option) Graphql_async.Schema.typ val curr_slot : Value.t -> Slot.t @@ -571,7 +571,7 @@ module type S = sig Other: specified account (with default token) receives coinbases *) - type t = [`Producer | `Other of Public_key.Compressed.t] + type t = [ `Producer | `Other of Public_key.Compressed.t ] [@@deriving yojson] end @@ -588,9 +588,10 @@ module type S = sig -> rpc_handler list type query = - { query: + { query : 'q 'r. Network_peer.Peer.t -> ('q, 'r) rpc -> 'q - -> 'r Mina_base.Rpc_intf.rpc_response Deferred.t } + -> 'r Mina_base.Rpc_intf.rpc_response Deferred.t + } end (* Check whether we are in the genesis epoch *) @@ -603,9 +604,9 @@ module type S = sig constants:Constants.t -> Consensus_state.Value.t -> time_received:Unix_timestamp.t - -> (unit, [`Too_early | `Too_late of int64]) result + -> (unit, [ `Too_early | `Too_late of int64 ]) result - type select_status = [`Keep | `Take] [@@deriving equal] + type select_status = [ `Keep | `Take ] [@@deriving equal] (** * Select between two ledger builder controller tips given the consensus @@ -698,25 +699,25 @@ module type S = sig module Make_state_hooks (Blockchain_state : Blockchain_state) (Protocol_state : Protocol_state - with type blockchain_state := - Blockchain_state.Value.t - and type blockchain_state_var := - Blockchain_state.var - and type consensus_state := Consensus_state.Value.t - and type consensus_state_var := Consensus_state.var) + with type blockchain_state := + Blockchain_state.Value.t + and type blockchain_state_var := + Blockchain_state.var + and type consensus_state := Consensus_state.Value.t + and type consensus_state_var := Consensus_state.var) (Snark_transition : Snark_transition - with type blockchain_state_var := - Blockchain_state.var - and type consensus_transition_var := - Consensus_transition.var) : + with type blockchain_state_var := + Blockchain_state.var + and type consensus_transition_var := + Consensus_transition.var) : State_hooks - with type blockchain_state := Blockchain_state.Value.t - and type protocol_state := Protocol_state.Value.t - and type protocol_state_var := Protocol_state.var - and type snark_transition_var := Snark_transition.var - and type consensus_state := Consensus_state.Value.t - and type consensus_state_var := Consensus_state.var - and type consensus_transition := Consensus_transition.Value.t - and type block_data := Block_data.t + with type blockchain_state := Blockchain_state.Value.t + and type protocol_state := Protocol_state.Value.t + and type protocol_state_var := Protocol_state.var + and type snark_transition_var := Snark_transition.var + and type consensus_state := Consensus_state.Value.t + and type consensus_state_var := Consensus_state.var + and type consensus_transition := Consensus_transition.Value.t + and type block_data := Block_data.t end end diff --git a/src/lib/consensus/proof_of_stake.ml b/src/lib/consensus/proof_of_stake.ml index 729cbbcd59e..16c93735571 100644 --- a/src/lib/consensus/proof_of_stake.ml +++ b/src/lib/consensus/proof_of_stake.ml @@ -28,7 +28,7 @@ let genesis_ledger_total_currency ~ledger = (Balance.to_amount @@ account.Mina_base.Account.Poly.balance) |> Option.value_exn ?here:None ?error:None ~message:"failed to calculate total currency in genesis ledger" - else sum ) + else sum) let genesis_ledger_hash ~ledger = Mina_base.Ledger.merkle_root (Lazy.force ledger) @@ -47,10 +47,10 @@ let compute_delegatee_table keys ~iter_accounts = Public_key.Compressed.Table.update outer_table (Option.value_exn acct.delegate) ~f:(function | None -> - Account.Index.Table.of_alist_exn [(i, acct)] + Account.Index.Table.of_alist_exn [ (i, acct) ] | Some table -> Account.Index.Table.add_exn table ~key:i ~data:acct ; - table ) ) ; + table)) ; (* TODO: this metric tracking currently assumes that the result of compute_delegatee_table is called with the full set of block production keypairs every time the set changes, which is true right now, but this @@ -67,15 +67,15 @@ let compute_delegatee_table keys ~iter_accounts = let compute_delegatee_table_sparse_ledger keys ledger = compute_delegatee_table keys ~iter_accounts:(fun f -> - Mina_base.Sparse_ledger.iteri ledger ~f:(fun i acct -> f i acct) ) + Mina_base.Sparse_ledger.iteri ledger ~f:(fun i acct -> f i acct)) let compute_delegatee_table_ledger_db keys ledger = compute_delegatee_table keys ~iter_accounts:(fun f -> - Mina_base.Ledger.Db.iteri ledger ~f:(fun i acct -> f i acct) ) + Mina_base.Ledger.Db.iteri ledger ~f:(fun i acct -> f i acct)) let compute_delegatee_table_genesis_ledger keys ledger = compute_delegatee_table keys ~iter_accounts:(fun f -> - Mina_base.Ledger.iteri ledger ~f:(fun i acct -> f i acct) ) + Mina_base.Ledger.iteri ledger ~f:(fun i acct -> f i acct)) module Segment_id = Mina_numbers.Nat.Make32 () @@ -86,13 +86,14 @@ module Configuration = struct module Stable = struct module V1 = struct type t = - { delta: int - ; k: int - ; slots_per_epoch: int - ; slot_duration: int - ; epoch_duration: int - ; genesis_state_timestamp: Block_time.Stable.V1.t - ; acceptable_network_delay: int } + { delta : int + ; k : int + ; slots_per_epoch : int + ; slot_duration : int + ; epoch_duration : int + ; genesis_state_timestamp : Block_time.Stable.V1.t + ; acceptable_network_delay : int + } [@@deriving yojson, fields] let to_latest = Fn.id @@ -105,13 +106,14 @@ module Configuration = struct in let of_int32 = UInt32.to_int in let of_span = Fn.compose Int64.to_int Block_time.Span.to_ms in - { delta= of_int32 constants.delta - ; k= of_int32 constants.k - ; slots_per_epoch= of_int32 constants.epoch_size - ; slot_duration= of_span constants.slot_duration_ms - ; epoch_duration= of_span constants.epoch_duration - ; genesis_state_timestamp= constants.genesis_state_timestamp - ; acceptable_network_delay= of_span constants.delta_duration } + { delta = of_int32 constants.delta + ; k = of_int32 constants.k + ; slots_per_epoch = of_int32 constants.epoch_size + ; slot_duration = of_span constants.slot_duration_ms + ; epoch_duration = of_span constants.epoch_duration + ; genesis_state_timestamp = constants.genesis_state_timestamp + ; acceptable_network_delay = of_span constants.delta_duration + } end module Constants = Constants @@ -128,15 +130,15 @@ module Data = struct let update (seed : t) vrf_result = let open Random_oracle in hash ~init:Hash_prefix_states.epoch_seed - [|(seed :> Tick.Field.t); vrf_result|] + [| (seed :> Tick.Field.t); vrf_result |] |> of_hash let update_var (seed : var) vrf_result = let open Random_oracle.Checked in make_checked (fun () -> hash ~init:Hash_prefix_states.epoch_seed - [|var_to_hash_packed seed; vrf_result|] - |> var_of_hash_packed ) + [| var_to_hash_packed seed; vrf_result |] + |> var_of_hash_packed) end module Epoch_and_slot = struct @@ -144,9 +146,7 @@ module Data = struct let of_time_exn ~(constants : Constants.t) tm : t = let epoch = Epoch.of_time_exn tm ~constants in - let time_since_epoch = - Time.diff tm (Epoch.start_time epoch ~constants) - in + let time_since_epoch = Time.diff tm (Epoch.start_time epoch ~constants) in let slot = uint32_of_int64 @@ Int64.Infix.( @@ -158,21 +158,22 @@ module Data = struct module Block_data = struct type t = - { stake_proof: Stake_proof.t - ; global_slot: Mina_numbers.Global_slot.t - ; global_slot_since_genesis: Mina_numbers.Global_slot.t - ; vrf_result: Random_oracle.Digest.t } + { stake_proof : Stake_proof.t + ; global_slot : Mina_numbers.Global_slot.t + ; global_slot_since_genesis : Mina_numbers.Global_slot.t + ; vrf_result : Random_oracle.Digest.t + } - let prover_state {stake_proof; _} = stake_proof + let prover_state { stake_proof; _ } = stake_proof - let global_slot {global_slot; _} = global_slot + let global_slot { global_slot; _ } = global_slot - let epoch_ledger {stake_proof; _} = stake_proof.ledger + let epoch_ledger { stake_proof; _ } = stake_proof.ledger - let global_slot_since_genesis {global_slot_since_genesis; _} = + let global_slot_since_genesis { global_slot_since_genesis; _ } = global_slot_since_genesis - let coinbase_receiver {stake_proof; _} = stake_proof.coinbase_receiver_pk + let coinbase_receiver { stake_proof; _ } = stake_proof.coinbase_receiver_pk end module Local_state = struct @@ -219,15 +220,16 @@ module Data = struct end type t = - { ledger: Ledger_snapshot.t - ; delegatee_table: + { ledger : Ledger_snapshot.t + ; delegatee_table : Mina_base.Account.t Mina_base.Account.Index.Table.t - Public_key.Compressed.Table.t } + Public_key.Compressed.Table.t + } let delegators t key = Public_key.Compressed.Table.find t.delegatee_table key - let to_yojson {ledger; delegatee_table} = + let to_yojson { ledger; delegatee_table } = `Assoc [ ( "ledger_hash" , Ledger_snapshot.merkle_root ledger @@ -241,30 +243,33 @@ module Data = struct ( Hashtbl.to_alist delegators |> List.map ~f:(fun (addr, account) -> ( Int.to_string addr - , Mina_base.Account.to_yojson account ) ) ) - ) ) ) ) ] + , Mina_base.Account.to_yojson account )) ) )) + ) ) + ] let ledger t = t.ledger end module Data = struct type epoch_ledger_uuids = - { staking: Uuid.t - ; next: Uuid.t - ; genesis_state_hash: Mina_base.State_hash.t } + { staking : Uuid.t + ; next : Uuid.t + ; genesis_state_hash : Mina_base.State_hash.t + } (* Invariant: Snapshot's delegators are taken from accounts in block_production_pubkeys *) type t = - { mutable staking_epoch_snapshot: Snapshot.t - ; mutable next_epoch_snapshot: Snapshot.t - ; last_checked_slot_and_epoch: + { mutable staking_epoch_snapshot : Snapshot.t + ; mutable next_epoch_snapshot : Snapshot.t + ; last_checked_slot_and_epoch : (Epoch.t * Slot.t) Public_key.Compressed.Table.t - ; mutable last_epoch_delegatee_table: + ; mutable last_epoch_delegatee_table : Mina_base.Account.t Mina_base.Account.Index.Table.t Public_key.Compressed.Table.t Option.t - ; mutable epoch_ledger_uuids: epoch_ledger_uuids - ; epoch_ledger_location: string } + ; mutable epoch_ledger_uuids : epoch_ledger_uuids + ; epoch_ledger_location : string + } let to_yojson t = `Assoc @@ -278,7 +283,7 @@ module Data = struct t.last_checked_slot_and_epoch |> List.map ~f:(fun (key, epoch_and_slot) -> ( Public_key.Compressed.to_string key - , [%to_yojson: Epoch.t * Slot.t] epoch_and_slot ) ) ) ) + , [%to_yojson: Epoch.t * Slot.t] epoch_and_slot )) ) ) ] end @@ -307,15 +312,17 @@ module Data = struct let last_checked_slot_and_epoch = Table.create () in Set.iter new_keys ~f:(fun pk -> let data = Option.value (Table.find old_table pk) ~default in - Table.add_exn last_checked_slot_and_epoch ~key:pk ~data ) ; + Table.add_exn last_checked_slot_and_epoch ~key:pk ~data) ; last_checked_slot_and_epoch - let epoch_ledger_uuids_to_yojson Data.{staking; next; genesis_state_hash} = + let epoch_ledger_uuids_to_yojson Data.{ staking; next; genesis_state_hash } + = `Assoc [ ("staking", `String (Uuid.to_string staking)) ; ("next", `String (Uuid.to_string next)) ; ( "genesis_state_hash" - , Mina_base.State_hash.to_yojson genesis_state_hash ) ] + , Mina_base.State_hash.to_yojson genesis_state_hash ) + ] let epoch_ledger_uuids_from_file location = let open Yojson.Safe.Util in @@ -332,14 +339,14 @@ module Data = struct let%map genesis_state_hash = json |> member "genesis_state_hash" |> Mina_base.State_hash.of_yojson in - Data.{staking; next; genesis_state_hash} + Data.{ staking; next; genesis_state_hash } let create_epoch_ledger ~location ~logger ~genesis_epoch_ledger ~ledger_depth = let open Mina_base in if Sys.file_exists location then ( [%log info] - ~metadata:[("location", `String location)] + ~metadata:[ ("location", `String location) ] "Loading epoch ledger from disk: $location" ; Snapshot.Ledger_snapshot.Ledger_db (Ledger.Db.create ~directory_name:location ~depth:ledger_depth ()) ) @@ -351,19 +358,20 @@ module Data = struct let genesis_epoch_ledger_staking, genesis_epoch_ledger_next = Option.value_map genesis_epoch_data ~default:(genesis_ledger, genesis_ledger) - ~f:(fun {Genesis_epoch_data.staking; next} -> + ~f:(fun { Genesis_epoch_data.staking; next } -> ( staking.ledger , Option.value_map next ~default:staking.ledger ~f:(fun next -> - next.ledger ) ) ) + next.ledger) )) in let epoch_ledger_uuids_location = epoch_ledger_location ^ ".json" in let logger = Logger.create () in let create_new_uuids () = let epoch_ledger_uuids = Data. - { staking= Uuid_unix.create () - ; next= Uuid_unix.create () - ; genesis_state_hash } + { staking = Uuid_unix.create () + ; next = Uuid_unix.create () + ; genesis_state_hash + } in Yojson.Safe.to_file epoch_ledger_uuids_location (epoch_ledger_uuids_to_yojson epoch_ledger_uuids) ; @@ -382,7 +390,8 @@ module Data = struct Creating new uuids.." ~metadata: [ ("path", `String epoch_ledger_uuids_location) - ; ("error", `String str) ] ; + ; ("error", `String str) + ] ; create_new_uuids () in (*If the genesis hash matches and both the files are present. If only one of them is present then it could be stale data and might cause the node to never be able to bootstrap*) @@ -399,14 +408,15 @@ module Data = struct ledger_location epoch_ledger_uuids.next in [%log info] - "Cleaning up old epoch ledgers with genesis state $state_hash \ - at locations $staking and $next" + "Cleaning up old epoch ledgers with genesis state $state_hash at \ + locations $staking and $next" ~metadata: [ ( "state_hash" , Mina_base.State_hash.to_yojson epoch_ledger_uuids.genesis_state_hash ) ; ("staking", `String staking_ledger_location) - ; ("next", `String next_ledger_location) ] ; + ; ("next", `String next_ledger_location) + ] ; File_system.rmrf staking_ledger_location ; File_system.rmrf next_ledger_location ; create_new_uuids () ) @@ -427,51 +437,56 @@ module Data = struct ~genesis_epoch_ledger:genesis_epoch_ledger_next ~ledger_depth in ref - { Data.staking_epoch_snapshot= - { Snapshot.ledger= staking_epoch_ledger - ; delegatee_table= + { Data.staking_epoch_snapshot = + { Snapshot.ledger = staking_epoch_ledger + ; delegatee_table = Snapshot.Ledger_snapshot.compute_delegatee_table - block_producer_pubkeys staking_epoch_ledger } - ; next_epoch_snapshot= - { Snapshot.ledger= next_epoch_ledger - ; delegatee_table= + block_producer_pubkeys staking_epoch_ledger + } + ; next_epoch_snapshot = + { Snapshot.ledger = next_epoch_ledger + ; delegatee_table = Snapshot.Ledger_snapshot.compute_delegatee_table - block_producer_pubkeys next_epoch_ledger } - ; last_checked_slot_and_epoch= + block_producer_pubkeys next_epoch_ledger + } + ; last_checked_slot_and_epoch = make_last_checked_slot_and_epoch_table (Public_key.Compressed.Table.create ()) block_producer_pubkeys ~default:(Epoch.zero, Slot.zero) - ; last_epoch_delegatee_table= None + ; last_epoch_delegatee_table = None ; epoch_ledger_uuids - ; epoch_ledger_location } + ; epoch_ledger_location + } let block_production_keys_swap ~(constants : Constants.t) t block_production_pubkeys now = let old : Data.t = !t in - let s {Snapshot.ledger; delegatee_table= _} = + let s { Snapshot.ledger; delegatee_table = _ } = { Snapshot.ledger - ; delegatee_table= + ; delegatee_table = Snapshot.Ledger_snapshot.compute_delegatee_table - block_production_pubkeys ledger } + block_production_pubkeys ledger + } in t := - { Data.staking_epoch_snapshot= s old.staking_epoch_snapshot - ; next_epoch_snapshot= + { Data.staking_epoch_snapshot = s old.staking_epoch_snapshot + ; next_epoch_snapshot = s old.next_epoch_snapshot (* assume these keys are different and therefore we haven't checked any - * slots or epochs *) - ; last_checked_slot_and_epoch= + * slots or epochs *) + ; last_checked_slot_and_epoch = make_last_checked_slot_and_epoch_table !t.Data.last_checked_slot_and_epoch block_production_pubkeys ~default: ((* TODO: Be smarter so that we don't have to look at the slot before again *) let epoch, slot = Epoch_and_slot.of_time_exn now ~constants in ( epoch - , UInt32.( - if compare slot zero > 0 then sub slot one else slot) )) - ; last_epoch_delegatee_table= None - ; epoch_ledger_uuids= old.epoch_ledger_uuids - ; epoch_ledger_location= old.epoch_ledger_location } + , UInt32.(if compare slot zero > 0 then sub slot one else slot) + )) + ; last_epoch_delegatee_table = None + ; epoch_ledger_uuids = old.epoch_ledger_uuids + ; epoch_ledger_location = old.epoch_ledger_location + } type snapshot_identifier = Staking_epoch_snapshot | Next_epoch_snapshot [@@deriving to_yojson, equal] @@ -511,9 +526,10 @@ module Data = struct let%map (_ : Ledger.Db.t) = Ledger_transfer.transfer_accounts ~src:sparse_ledger ~dest:ledger in - !t.staking_epoch_snapshot - <- { delegatee_table - ; ledger= Snapshot.Ledger_snapshot.Ledger_db ledger } + !t.staking_epoch_snapshot <- + { delegatee_table + ; ledger = Snapshot.Ledger_snapshot.Ledger_db ledger + } | Next_epoch_snapshot -> let location = next_epoch_ledger_location t in Snapshot.Ledger_snapshot.remove !t.next_epoch_snapshot.ledger @@ -524,9 +540,10 @@ module Data = struct let%map (_ : Ledger.Db.t) = Ledger_transfer.transfer_accounts ~src:sparse_ledger ~dest:ledger in - !t.next_epoch_snapshot - <- { delegatee_table - ; ledger= Snapshot.Ledger_snapshot.Ledger_db ledger } + !t.next_epoch_snapshot <- + { delegatee_table + ; ledger = Snapshot.Ledger_snapshot.Ledger_db ledger + } let next_epoch_ledger (t : t) = Snapshot.ledger @@ get_snapshot t Next_epoch_snapshot @@ -550,7 +567,7 @@ module Data = struct else ( Table.set !t.last_checked_slot_and_epoch ~key:pk ~data:(epoch, slot) ; - Some pk ) ) + Some pk )) in match unseens with | [] -> @@ -563,8 +580,9 @@ module Data = struct include Mina_base.Epoch_ledger let genesis ~ledger = - { Poly.hash= genesis_ledger_hash ~ledger - ; total_currency= genesis_ledger_total_currency ~ledger } + { Poly.hash = genesis_ledger_hash ~ledger + ; total_currency = genesis_ledger_total_currency ~ledger + } let graphql_type () : ('ctx, Value.t option) Graphql_async.Schema.typ = let open Graphql_async in @@ -572,13 +590,14 @@ module Data = struct obj "epochLedger" ~fields:(fun _ -> [ field "hash" ~typ:(non_null string) ~args:Arg.[] - ~resolve:(fun _ {Poly.hash; _} -> - Mina_base.Frozen_ledger_hash.to_string hash ) + ~resolve:(fun _ { Poly.hash; _ } -> + Mina_base.Frozen_ledger_hash.to_string hash) ; field "totalCurrency" ~typ:(non_null @@ Graphql_base_types.uint64 ()) ~args:Arg.[] - ~resolve:(fun _ {Poly.total_currency; _} -> - Amount.to_uint64 total_currency ) ] ) + ~resolve:(fun _ { Poly.total_currency; _ } -> + Amount.to_uint64 total_currency) + ]) end module Vrf = struct @@ -647,7 +666,7 @@ module Data = struct let%bind result, winner_account = get_vrf_evaluation ~constraint_constants shifted ~ledger:epoch_ledger.hash ~block_stake_winner ~block_creator - ~message:{Message.global_slot; seed; delegator= winner_addr} + ~message:{ Message.global_slot; seed; delegator = winner_addr } in let my_stake = winner_account.balance in let%bind truncated_result = Output.Checked.truncate result in @@ -672,14 +691,15 @@ module Data = struct let dummy_sparse_ledger = Mina_base.Sparse_ledger.of_ledger_subset_exn (Lazy.force genesis_epoch_ledger) - [Mina_base.(Account_id.create pk Token_id.default)] + [ Mina_base.(Account_id.create pk Token_id.default) ] in - { delegator= 0 - ; delegator_pk= pk - ; coinbase_receiver_pk= pk - ; ledger= dummy_sparse_ledger - ; producer_private_key= sk - ; producer_public_key= Public_key.decompress_exn pk } + { delegator = 0 + ; delegator_pk = pk + ; coinbase_receiver_pk = pk + ; ledger = dummy_sparse_ledger + ; producer_private_key = sk + ; producer_public_key = Public_key.decompress_exn pk + } let handler : constraint_constants:Genesis_constants.Constraint_constants.t @@ -690,7 +710,7 @@ module Data = struct let dummy_sparse_ledger = Mina_base.Sparse_ledger.of_ledger_subset_exn (Lazy.force genesis_epoch_ledger) - [Mina_base.(Account_id.create pk Token_id.default)] + [ Mina_base.(Account_id.create pk Token_id.default) ] in let empty_pending_coinbase = Mina_base.Pending_coinbase.create @@ -712,7 +732,7 @@ module Data = struct (push fail (create_single pending_coinbase_handler)) (create_single ledger_handler)) in - fun (With {request; respond}) -> + fun (With { request; respond }) -> match request with | Winner_address -> respond (Provide 0) @@ -728,24 +748,24 @@ module Data = struct respond (Provide (Snarky_backendless.Request.Handler.run handlers - ["Ledger Handler"; "Pending Coinbase Handler"] + [ "Ledger Handler"; "Pending Coinbase Handler" ] request)) end let check ~constraint_constants ~global_slot ~global_slot_since_genesis - ~seed ~private_key ~public_key ~public_key_compressed - ~coinbase_receiver ~total_stake ~logger ~epoch_snapshot = + ~seed ~private_key ~public_key ~public_key_compressed ~coinbase_receiver + ~total_stake ~logger ~epoch_snapshot = let open Message in let open Local_state in let open Snapshot in - with_return (fun {return} -> + with_return (fun { return } -> Hashtbl.iteri ( Snapshot.delegators epoch_snapshot public_key_compressed |> Option.value ~default:(Core_kernel.Int.Table.create ()) ) ~f:(fun ~key:delegator ~data:account -> let vrf_result = T.eval ~constraint_constants ~private_key - {global_slot; seed; delegator} + { global_slot; seed; delegator } in let truncated_vrf_result = Output.truncate vrf_result in [%log debug] @@ -763,7 +783,8 @@ module Data = struct (* use sexp representation; int might be too small *) ( Fold.string_bits truncated_vrf_result |> Bignum_bigint.of_bit_fold_lsb - |> Bignum_bigint.sexp_of_t |> Sexp.to_string ) ) ] ; + |> Bignum_bigint.sexp_of_t |> Sexp.to_string ) ) + ] ; Mina_metrics.Counter.inc_one Mina_metrics.Consensus.vrf_evaluations ; if @@ -772,13 +793,13 @@ module Data = struct then return (Some - ( { Block_data.stake_proof= - { producer_private_key= private_key - ; producer_public_key= public_key + ( { Block_data.stake_proof = + { producer_private_key = private_key + ; producer_public_key = public_key ; delegator - ; delegator_pk= account.public_key - ; coinbase_receiver_pk= coinbase_receiver - ; ledger= + ; delegator_pk = account.public_key + ; coinbase_receiver_pk = coinbase_receiver + ; ledger = Local_state.Snapshot.Ledger_snapshot .ledger_subset [ Mina_base.( @@ -787,13 +808,16 @@ module Data = struct Token_id.default) ; Mina_base.( Account_id.create account.public_key - Token_id.default) ] - epoch_snapshot.ledger } + Token_id.default) + ] + epoch_snapshot.ledger + } ; global_slot ; global_slot_since_genesis - ; vrf_result } - , account.public_key )) ) ; - None ) + ; vrf_result + } + , account.public_key ))) ; + None) end module Optional_state_hash = struct @@ -847,7 +871,8 @@ module Data = struct ; Epoch_seed.typ ; Mina_base.State_hash.typ ; Lock_checkpoint.typ - ; Length.typ ] + ; Length.typ + ] let typ : (var, Value.t) Typ.t = Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist @@ -861,64 +886,71 @@ module Data = struct [ field "ledger" ~typ:(non_null @@ Epoch_ledger.graphql_type ()) ~args:Arg.[] - ~resolve:(fun _ {Poly.ledger; _} -> ledger) + ~resolve:(fun _ { Poly.ledger; _ } -> ledger) ; field "seed" ~typ:(non_null string) ~args:Arg.[] - ~resolve:(fun _ {Poly.seed; _} -> - Epoch_seed.to_base58_check seed ) + ~resolve:(fun _ { Poly.seed; _ } -> + Epoch_seed.to_base58_check seed) ; field "startCheckpoint" ~typ:(non_null string) ~args:Arg.[] - ~resolve:(fun _ {Poly.start_checkpoint; _} -> - Mina_base.State_hash.to_base58_check start_checkpoint ) + ~resolve:(fun _ { Poly.start_checkpoint; _ } -> + Mina_base.State_hash.to_base58_check start_checkpoint) ; field "lockCheckpoint" ~typ:(Lock_checkpoint.graphql_type ()) ~args:Arg.[] - ~resolve:(fun _ {Poly.lock_checkpoint; _} -> - Lock_checkpoint.resolve lock_checkpoint ) + ~resolve:(fun _ { Poly.lock_checkpoint; _ } -> + Lock_checkpoint.resolve lock_checkpoint) ; field "epochLength" ~typ:(non_null @@ Graphql_base_types.uint32 ()) ~args:Arg.[] - ~resolve:(fun _ {Poly.epoch_length; _} -> - Mina_numbers.Length.to_uint32 epoch_length ) ] ) + ~resolve:(fun _ { Poly.epoch_length; _ } -> + Mina_numbers.Length.to_uint32 epoch_length) + ]) let to_input - ({ledger; seed; start_checkpoint; lock_checkpoint; epoch_length} : + ({ ledger; seed; start_checkpoint; lock_checkpoint; epoch_length } : Value.t) = let input = - { Random_oracle.Input.field_elements= - [|(seed :> Tick.Field.t); (start_checkpoint :> Tick.Field.t)|] - ; bitstrings= [|Length.Bits.to_bits epoch_length|] } + { Random_oracle.Input.field_elements = + [| (seed :> Tick.Field.t); (start_checkpoint :> Tick.Field.t) |] + ; bitstrings = [| Length.Bits.to_bits epoch_length |] + } in List.reduce_exn ~f:Random_oracle.Input.append [ input ; Epoch_ledger.to_input ledger - ; Lock_checkpoint.to_input lock_checkpoint ] + ; Lock_checkpoint.to_input lock_checkpoint + ] let var_to_input - ({ledger; seed; start_checkpoint; lock_checkpoint; epoch_length} : + ({ ledger; seed; start_checkpoint; lock_checkpoint; epoch_length } : var) = let open Tick in let%map epoch_length = Length.Checked.to_bits epoch_length in let open Random_oracle.Input in let input = - { field_elements= + { field_elements = [| Epoch_seed.var_to_hash_packed seed - ; Mina_base.State_hash.var_to_hash_packed start_checkpoint |] - ; bitstrings= [|Bitstring.Lsb_first.to_list epoch_length|] } + ; Mina_base.State_hash.var_to_hash_packed start_checkpoint + |] + ; bitstrings = [| Bitstring.Lsb_first.to_list epoch_length |] + } in List.reduce_exn ~f:Random_oracle.Input.append [ input ; Epoch_ledger.var_to_input ledger - ; field (Mina_base.State_hash.var_to_hash_packed lock_checkpoint) ] + ; field (Mina_base.State_hash.var_to_hash_packed lock_checkpoint) + ] let genesis ~(genesis_epoch_data : Genesis_epoch_data.Data.t) = - { Poly.ledger= + { Poly.ledger = Epoch_ledger.genesis ~ledger:genesis_epoch_data.ledger (* TODO: epoch_seed needs to be non-determinable by o1-labs before mainnet launch *) - ; seed= genesis_epoch_data.seed - ; start_checkpoint= Mina_base.State_hash.(of_hash zero) - ; lock_checkpoint= Lock_checkpoint.null - ; epoch_length= Length.of_int 1 } + ; seed = genesis_epoch_data.seed + ; start_checkpoint = Mina_base.State_hash.(of_hash zero) + ; lock_checkpoint = Lock_checkpoint.null + ; epoch_length = Length.of_int 1 + } end module T = struct @@ -995,35 +1027,37 @@ module Data = struct let next_to_staking (next : Next.Value.t) : Staking.Value.t = next - let update_pair - ((staking_data, next_data) : Staking.Value.t * Next.Value.t) - epoch_count ~prev_epoch ~next_epoch ~next_slot - ~prev_protocol_state_hash ~producer_vrf_result ~snarked_ledger_hash - ~genesis_ledger_hash ~total_currency ~(constants : Constants.t) = + let update_pair ((staking_data, next_data) : Staking.Value.t * Next.Value.t) + epoch_count ~prev_epoch ~next_epoch ~next_slot ~prev_protocol_state_hash + ~producer_vrf_result ~snarked_ledger_hash ~genesis_ledger_hash + ~total_currency ~(constants : Constants.t) = let next_staking_ledger = (*If snarked ledger hash is still the genesis ledger hash then the epoch ledger should continue to be `next_data.ledger`. This is because the epoch ledgers at genesis can be different from the genesis ledger*) if Mina_base.Frozen_ledger_hash.equal snarked_ledger_hash genesis_ledger_hash then next_data.ledger - else {Epoch_ledger.Poly.hash= snarked_ledger_hash; total_currency} + else { Epoch_ledger.Poly.hash = snarked_ledger_hash; total_currency } in let staking_data', next_data', epoch_count' = if Epoch.(next_epoch > prev_epoch) then ( next_to_staking next_data - , { Poly.seed= next_data.seed - ; ledger= next_staking_ledger - ; start_checkpoint= + , { Poly.seed = next_data.seed + ; ledger = next_staking_ledger + ; start_checkpoint = prev_protocol_state_hash (* TODO: We need to make sure issue #2328 is properly addressed. *) - ; lock_checkpoint= Mina_base.State_hash.(of_hash zero) - ; epoch_length= Length.of_int 1 } + ; lock_checkpoint = Mina_base.State_hash.(of_hash zero) + ; epoch_length = Length.of_int 1 + } , Length.succ epoch_count ) else ( assert (Epoch.equal next_epoch prev_epoch) ; ( staking_data , Poly. - {next_data with epoch_length= Length.succ next_data.epoch_length} + { next_data with + epoch_length = Length.succ next_data.epoch_length + } , epoch_count ) ) in let curr_seed, curr_lock_checkpoint = @@ -1035,8 +1069,9 @@ module Data = struct let next_data'' = Poly. { next_data' with - seed= curr_seed - ; lock_checkpoint= curr_lock_checkpoint } + seed = curr_seed + ; lock_checkpoint = curr_lock_checkpoint + } in (staking_data', next_data'', epoch_count') end @@ -1083,8 +1118,7 @@ module Data = struct of_slot_number ~constants slot end - [%%if - true] + [%%if true] module Min_window_density = struct (* Three cases for updating the lengths of sub_windows @@ -1113,8 +1147,7 @@ module Data = struct in let same_window = Global_sub_window.( - add prev_global_sub_window - (constant constants.sub_windows_per_window) + add prev_global_sub_window (constant constants.sub_windows_per_window) >= next_global_sub_window) in let new_sub_window_densities = @@ -1127,15 +1160,14 @@ module Data = struct in let within_range = if - UInt32.compare prev_relative_sub_window - next_relative_sub_window + UInt32.compare prev_relative_sub_window next_relative_sub_window < 0 then gt_prev_sub_window && lt_next_sub_window else gt_prev_sub_window || lt_next_sub_window in if same_sub_window then length else if same_window && not within_range then length - else Length.zero ) + else Length.zero) in let new_window_length = List.fold new_sub_window_densities ~init:Length.zero ~f:Length.add @@ -1158,7 +1190,7 @@ module Data = struct if is_next_sub_window then let f = if incr_window then Length.succ else Fn.id in if same_sub_window then f length else f Length.zero - else length ) + else length) in (min_window_density, sub_window_densities) @@ -1175,12 +1207,10 @@ module Data = struct Global_sub_window.Checked.of_global_slot ~constants next_global_slot in let%bind prev_relative_sub_window = - Global_sub_window.Checked.sub_window ~constants - prev_global_sub_window + Global_sub_window.Checked.sub_window ~constants prev_global_sub_window in let%bind next_relative_sub_window = - Global_sub_window.Checked.sub_window ~constants - next_global_sub_window + Global_sub_window.Checked.sub_window ~constants next_global_sub_window in let%bind same_sub_window = Global_sub_window.Checked.equal prev_global_sub_window @@ -1223,7 +1253,7 @@ module Data = struct (if_ Boolean.(same_window && not within_range) ~then_:(Checked.return length) - ~else_:(Checked.return Length.Checked.zero)) ) + ~else_:(Checked.return Length.Checked.zero))) in let%bind new_window_length = Checked.List.fold new_sub_window_densities ~init:Length.Checked.zero @@ -1255,7 +1285,7 @@ module Data = struct (Checked.return same_sub_window) ~then_:Length.Checked.(succ length) ~else_:Length.Checked.(succ zero)) - ~else_:(Checked.return length) ) + ~else_:(Checked.return length)) in return (min_window_density, sub_window_densities) end @@ -1289,7 +1319,7 @@ module Data = struct Array.init n ~f:(fun i -> if i + sub_window_diff < n then prev_sub_window_densities.(i + sub_window_diff) - else Length.zero ) + else Length.zero) in let new_window_length = Array.fold new_sub_window_densities ~init:Length.zero ~f:Length.add @@ -1304,8 +1334,8 @@ module Data = struct then prev_min_window_density else Length.min new_window_length prev_min_window_density in - new_sub_window_densities.(n - 1) - <- Length.succ new_sub_window_densities.(n - 1) ; + new_sub_window_densities.(n - 1) <- + Length.succ new_sub_window_densities.(n - 1) ; (min_window_density, new_sub_window_densities) let constants = Lazy.force Constants.for_unit_tests @@ -1323,7 +1353,7 @@ module Data = struct List.to_array @@ List.drop prev_sub_window_densities prev_relative_sub_window @ List.take prev_sub_window_densities prev_relative_sub_window - @ [List.nth_exn prev_sub_window_densities prev_relative_sub_window] + @ [ List.nth_exn prev_sub_window_densities prev_relative_sub_window ] (* slot_diff are generated in such a way so that we can test different cases in the update function, I use a weighted union to generate it. @@ -1343,7 +1373,7 @@ module Data = struct ( 1.0 /. (Float.of_int (i + 1) ** 2.) , Core.Int.gen_incl (i * to_int constants.slots_per_sub_window) - ((i + 1) * to_int constants.slots_per_sub_window) ) ) + ((i + 1) * to_int constants.slots_per_sub_window) )) let num_global_slots_to_test = 1 @@ -1364,12 +1394,12 @@ module Data = struct List.fold slot_diffs ~init:(prev_global_slot, []) ~f:(fun (prev_global_slot, acc) slot_diff -> let next_global_slot = prev_global_slot + slot_diff in - (next_global_slot, next_global_slot :: acc) ) + (next_global_slot, next_global_slot :: acc)) in return ( Global_slot.of_slot_number ~constants (GS.of_int prev_global_slot) , List.map global_slots ~f:(fun s -> - Global_slot.of_slot_number ~constants (GS.of_int s) ) + Global_slot.of_slot_number ~constants (GS.of_int s)) |> List.rev ) let gen_length = @@ -1403,20 +1433,20 @@ module Data = struct ( prev_global_slot , prev_sub_window_densities , prev_min_window_density ) - ~f:(fun ( prev_global_slot - , prev_sub_window_densities - , prev_min_window_density ) - next_global_slot + ~f:(fun + ( prev_global_slot + , prev_sub_window_densities + , prev_min_window_density ) + next_global_slot -> let min_window_density, sub_window_densities = f ~constants ~prev_global_slot ~next_global_slot ~prev_sub_window_densities ~prev_min_window_density in - (next_global_slot, sub_window_densities, min_window_density) ) + (next_global_slot, sub_window_densities, min_window_density)) - let update_several_times_checked ~f ~prev_global_slot - ~next_global_slots ~prev_sub_window_densities - ~prev_min_window_density ~constants = + let update_several_times_checked ~f ~prev_global_slot ~next_global_slots + ~prev_sub_window_densities ~prev_min_window_density ~constants = let open Tick.Checked in let open Tick.Checked.Let_syntax in List.fold next_global_slots @@ -1424,30 +1454,30 @@ module Data = struct ( prev_global_slot , prev_sub_window_densities , prev_min_window_density ) - ~f:(fun ( prev_global_slot - , prev_sub_window_densities - , prev_min_window_density ) - next_global_slot + ~f:(fun + ( prev_global_slot + , prev_sub_window_densities + , prev_min_window_density ) + next_global_slot -> let%bind min_window_density, sub_window_densities = f ~constants ~prev_global_slot ~next_global_slot ~prev_sub_window_densities ~prev_min_window_density in - return - (next_global_slot, sub_window_densities, min_window_density) ) + return (next_global_slot, sub_window_densities, min_window_density)) let%test_unit "the actual implementation is equivalent to the \ reference implementation" = Quickcheck.test ~trials:100 gen - ~f:(fun ( ((prev_global_slot : Global_slot.t), next_global_slots) - , (prev_min_window_density, prev_sub_window_densities) ) + ~f:(fun + ( ((prev_global_slot : Global_slot.t), next_global_slots) + , (prev_min_window_density, prev_sub_window_densities) ) -> let _, _, min_window_density1 = update_several_times ~f:(update_min_window_density ~incr_window:true) ~prev_global_slot ~next_global_slots - ~prev_sub_window_densities ~prev_min_window_density - ~constants + ~prev_sub_window_densities ~prev_min_window_density ~constants in let _, _, min_window_density2 = update_several_times @@ -1458,11 +1488,10 @@ module Data = struct ~prev_sub_window_densities) ~prev_min_window_density ~constants in - assert (Length.(equal min_window_density1 min_window_density2)) - ) + assert (Length.(equal min_window_density1 min_window_density2))) - let%test_unit "Inside snark computation is equivalent to outside \ - snark computation" = + let%test_unit "Inside snark computation is equivalent to outside snark \ + computation" = Quickcheck.test ~trials:100 gen ~f:(fun (slots, min_window_densities) -> Test_util.test_equal @@ -1487,7 +1516,7 @@ module Data = struct update_several_times_checked ~f:Checked.update_min_window_density ~prev_global_slot ~next_global_slots ~prev_sub_window_densities - ~prev_min_window_density ~constants ) + ~prev_min_window_density ~constants) (fun ( (prev_global_slot, next_global_slots) , (prev_min_window_density, prev_sub_window_densities) , constants ) -> @@ -1495,8 +1524,8 @@ module Data = struct ~f:(update_min_window_density ~incr_window:true) ~prev_global_slot ~next_global_slots ~prev_sub_window_densities ~prev_min_window_density - ~constants ) - (slots, min_window_densities, constants) ) + ~constants) + (slots, min_window_densities, constants)) end ) end @@ -1547,21 +1576,22 @@ module Data = struct , 'bool , 'pk ) t = - { blockchain_length: 'length - ; epoch_count: 'length - ; min_window_density: 'length - ; sub_window_densities: 'length list - ; last_vrf_output: 'vrf_output - ; total_currency: 'amount - ; curr_global_slot: 'global_slot - ; global_slot_since_genesis: 'global_slot_since_genesis - ; staking_epoch_data: 'staking_epoch_data - ; next_epoch_data: 'next_epoch_data - ; has_ancestor_in_same_checkpoint_window: 'bool - ; block_stake_winner: 'pk - ; block_creator: 'pk - ; coinbase_receiver: 'pk - ; supercharge_coinbase: 'bool } + { blockchain_length : 'length + ; epoch_count : 'length + ; min_window_density : 'length + ; sub_window_densities : 'length list + ; last_vrf_output : 'vrf_output + ; total_currency : 'amount + ; curr_global_slot : 'global_slot + ; global_slot_since_genesis : 'global_slot_since_genesis + ; staking_epoch_data : 'staking_epoch_data + ; next_epoch_data : 'next_epoch_data + ; has_ancestor_in_same_checkpoint_window : 'bool + ; block_stake_winner : 'pk + ; block_creator : 'pk + ; coinbase_receiver : 'pk + ; supercharge_coinbase : 'bool + } [@@deriving sexp, equal, compare, hash, yojson, fields, hlist] end end] @@ -1593,7 +1623,7 @@ module Data = struct let global_slot_since_genesis : Mina_numbers.Global_slot.t = slot_number in - {state with global_slot_since_genesis} + { state with global_slot_since_genesis } end end @@ -1631,7 +1661,8 @@ module Data = struct ; Public_key.Compressed.typ ; Public_key.Compressed.typ ; Public_key.Compressed.typ - ; Boolean.typ ] + ; Boolean.typ + ] let typ ~constraint_constants : (var, Value.t) Typ.t = Snark_params.Tick.Typ.of_hlistable @@ -1654,10 +1685,11 @@ module Data = struct ; block_stake_winner ; block_creator ; coinbase_receiver - ; supercharge_coinbase } : + ; supercharge_coinbase + } : Value.t) = let input = - { Random_oracle.Input.bitstrings= + { Random_oracle.Input.bitstrings = [| Length.Bits.to_bits blockchain_length ; Length.Bits.to_bits epoch_count ; Length.Bits.to_bits min_window_density @@ -1666,9 +1698,10 @@ module Data = struct ; Amount.to_bits total_currency ; Global_slot.to_bits curr_global_slot ; Mina_numbers.Global_slot.to_bits global_slot_since_genesis - ; [has_ancestor_in_same_checkpoint_window; supercharge_coinbase] + ; [ has_ancestor_in_same_checkpoint_window; supercharge_coinbase ] |] - ; field_elements= [||] } + ; field_elements = [||] + } in List.reduce_exn ~f:Random_oracle.Input.append [ input @@ -1676,7 +1709,8 @@ module Data = struct ; Epoch_data.Next.to_input next_epoch_data ; Public_key.Compressed.to_input block_stake_winner ; Public_key.Compressed.to_input block_creator - ; Public_key.Compressed.to_input coinbase_receiver ] + ; Public_key.Compressed.to_input coinbase_receiver + ] let var_to_input ({ Poly.blockchain_length @@ -1693,7 +1727,8 @@ module Data = struct ; block_stake_winner ; block_creator ; coinbase_receiver - ; supercharge_coinbase } : + ; supercharge_coinbase + } : var) = let open Tick.Checked.Let_syntax in let%map input = @@ -1709,9 +1744,9 @@ module Data = struct and sub_window_densities = Checked.List.fold sub_window_densities ~init:[] ~f:(fun acc l -> let%map res = length l in - List.append acc res ) + List.append acc res) in - { Random_oracle.Input.bitstrings= + { Random_oracle.Input.bitstrings = [| blockchain_length ; epoch_count ; min_window_density @@ -1720,9 +1755,10 @@ module Data = struct ; bs (Amount.var_to_bits total_currency) ; curr_global_slot ; global_slot_since_genesis - ; [has_ancestor_in_same_checkpoint_window; supercharge_coinbase] + ; [ has_ancestor_in_same_checkpoint_window; supercharge_coinbase ] |] - ; field_elements= [||] } + ; field_elements = [||] + } and staking_epoch_data = Epoch_data.Staking.var_to_input staking_epoch_data and next_epoch_data = Epoch_data.Next.var_to_input next_epoch_data in @@ -1741,14 +1777,14 @@ module Data = struct ; next_epoch_data ; block_stake_winner ; block_creator - ; coinbase_receiver ] + ; coinbase_receiver + ] - let global_slot {Poly.curr_global_slot; _} = curr_global_slot + let global_slot { Poly.curr_global_slot; _ } = curr_global_slot let checkpoint_window ~(constants : Constants.t) (slot : Global_slot.t) = UInt32.Infix.( - Global_slot.slot_number slot - / constants.checkpoint_window_size_in_slots) + Global_slot.slot_number slot / constants.checkpoint_window_size_in_slots) let same_checkpoint_window_unchecked ~constants slot1 slot2 = UInt32.equal @@ -1823,27 +1859,28 @@ module Data = struct previous_consensus_state.sub_window_densities ~prev_min_window_density:previous_consensus_state.min_window_density in - { Poly.blockchain_length= + { Poly.blockchain_length = Length.succ previous_consensus_state.blockchain_length ; epoch_count ; min_window_density ; sub_window_densities - ; last_vrf_output= Vrf.Output.truncate producer_vrf_result + ; last_vrf_output = Vrf.Output.truncate producer_vrf_result ; total_currency - ; curr_global_slot= next_global_slot - ; global_slot_since_genesis= + ; curr_global_slot = next_global_slot + ; global_slot_since_genesis = Mina_numbers.Global_slot.add previous_consensus_state.global_slot_since_genesis slot_diff ; staking_epoch_data ; next_epoch_data - ; has_ancestor_in_same_checkpoint_window= + ; has_ancestor_in_same_checkpoint_window = same_checkpoint_window_unchecked ~constants (Global_slot.create ~constants ~epoch:prev_epoch ~slot:prev_slot) (Global_slot.create ~constants ~epoch:next_epoch ~slot:next_slot) ; block_stake_winner ; block_creator ; coinbase_receiver - ; supercharge_coinbase } + ; supercharge_coinbase + } let same_checkpoint_window ~(constants : Constants.var) ~prev:(slot1 : Global_slot.Checked.t) @@ -1879,43 +1916,44 @@ module Data = struct match constraint_constants.fork with | None -> (Length.zero, Mina_numbers.Global_slot.zero) - | Some {previous_length; previous_global_slot; _} -> + | Some { previous_length; previous_global_slot; _ } -> (*Note: global_slot_since_genesis at fork point is the same as global_slot_since_genesis in the new genesis. This value is used to check transaction validity and existence of locked tokens. - For reviewers, should this be incremented by 1 because it's technically a new block? we don't really know how many slots passed since the fork point*) + For reviewers, should this be incremented by 1 because it's technically a new block? we don't really know how many slots passed since the fork point*) (previous_length, previous_global_slot) in let default_epoch_data = Genesis_epoch_data.Data. - {ledger= genesis_ledger; seed= Epoch_seed.initial} + { ledger = genesis_ledger; seed = Epoch_seed.initial } in let genesis_epoch_data_staking, genesis_epoch_data_next = Option.value_map genesis_epoch_data ~default:(default_epoch_data, default_epoch_data) ~f:(fun data -> - (data.staking, Option.value ~default:data.staking data.next) ) + (data.staking, Option.value ~default:data.staking data.next)) in let genesis_winner_pk = fst Vrf.Precomputed.genesis_winner in { Poly.blockchain_length - ; epoch_count= Length.zero - ; min_window_density= max_window_density - ; sub_window_densities= + ; epoch_count = Length.zero + ; min_window_density = max_window_density + ; sub_window_densities = Length.zero :: List.init (Length.to_int constants.sub_windows_per_window - 1) ~f:(Fn.const max_sub_window_density) - ; last_vrf_output= Vrf.Output.Truncated.dummy - ; total_currency= genesis_ledger_total_currency ~ledger:genesis_ledger - ; curr_global_slot= Global_slot.zero ~constants + ; last_vrf_output = Vrf.Output.Truncated.dummy + ; total_currency = genesis_ledger_total_currency ~ledger:genesis_ledger + ; curr_global_slot = Global_slot.zero ~constants ; global_slot_since_genesis - ; staking_epoch_data= + ; staking_epoch_data = Epoch_data.Staking.genesis ~genesis_epoch_data:genesis_epoch_data_staking - ; next_epoch_data= + ; next_epoch_data = Epoch_data.Next.genesis ~genesis_epoch_data:genesis_epoch_data_next - ; has_ancestor_in_same_checkpoint_window= false - ; block_stake_winner= genesis_winner_pk - ; block_creator= genesis_winner_pk - ; coinbase_receiver= genesis_winner_pk - ; supercharge_coinbase= true } + ; has_ancestor_in_same_checkpoint_window = false + ; block_stake_winner = genesis_winner_pk + ; block_creator = genesis_winner_pk + ; coinbase_receiver = genesis_winner_pk + ; supercharge_coinbase = true + } let create_genesis_from_transition ~negative_one_protocol_state_hash ~consensus_transition ~genesis_ledger @@ -1928,9 +1966,10 @@ module Data = struct let producer_vrf_result = let _, sk = Vrf.Precomputed.genesis_winner in Vrf.eval ~constraint_constants ~private_key:sk - { Vrf.Message.global_slot= consensus_transition - ; seed= staking_seed - ; delegator= 0 } + { Vrf.Message.global_slot = consensus_transition + ; seed = staking_seed + ; delegator = 0 + } in let snarked_ledger_hash = Lazy.force genesis_ledger |> Mina_base.Ledger.merkle_root @@ -1949,8 +1988,8 @@ module Data = struct ~consensus_transition ~supply_increase:Currency.Amount.zero ~snarked_ledger_hash ~genesis_ledger_hash:snarked_ledger_hash ~block_stake_winner:genesis_winner_pk - ~block_creator:genesis_winner_pk - ~coinbase_receiver:genesis_winner_pk ~supercharge_coinbase:true) + ~block_creator:genesis_winner_pk ~coinbase_receiver:genesis_winner_pk + ~supercharge_coinbase:true) let create_genesis ~negative_one_protocol_state_hash ~genesis_ledger ~genesis_epoch_data ~constraint_constants ~constants : Value.t = @@ -1997,7 +2036,7 @@ module Data = struct let%bind constants = Constants.Checked.create ~constraint_constants ~protocol_constants in - let {Poly.curr_global_slot= prev_global_slot; _} = previous_state in + let { Poly.curr_global_slot = prev_global_slot; _ } = previous_state in let next_global_slot = Global_slot.Checked.of_slot_number ~constants transition_data in @@ -2010,7 +2049,7 @@ module Data = struct Global_slot.Checked.(prev_global_slot < next_global_slot) in let%bind is_genesis = is_genesis next_global_slot in - Boolean.Assert.any [global_slot_increased; is_genesis] + Boolean.Assert.any [ global_slot_increased; is_genesis ] in let%bind next_epoch, next_slot = Global_slot.Checked.to_epoch_and_slot next_global_slot @@ -2090,8 +2129,9 @@ module Data = struct and ledger = Epoch_ledger.if_ update_next_epoch_ledger ~then_: - { total_currency= new_total_currency - ; hash= previous_blockchain_state_ledger_hash } + { total_currency = new_total_currency + ; hash = previous_blockchain_state_ledger_hash + } ~else_:previous_state.next_epoch_data.ledger and start_checkpoint = Mina_base.State_hash.if_ epoch_increased @@ -2113,7 +2153,8 @@ module Data = struct ; epoch_length ; ledger ; start_checkpoint - ; lock_checkpoint } + ; lock_checkpoint + } and blockchain_length = Length.Checked.succ previous_state.blockchain_length (* TODO: keep track of total_currency in transaction snark. The current_slot @@ -2135,36 +2176,39 @@ module Data = struct ; epoch_count ; min_window_density ; sub_window_densities - ; last_vrf_output= truncated_vrf_result - ; curr_global_slot= next_global_slot + ; last_vrf_output = truncated_vrf_result + ; curr_global_slot = next_global_slot ; global_slot_since_genesis - ; total_currency= new_total_currency + ; total_currency = new_total_currency ; staking_epoch_data ; next_epoch_data ; has_ancestor_in_same_checkpoint_window ; block_stake_winner ; block_creator ; coinbase_receiver - ; supercharge_coinbase } ) + ; supercharge_coinbase + } ) type display = - { blockchain_length: int - ; epoch_count: int - ; curr_epoch: int - ; curr_slot: int - ; global_slot_since_genesis: int - ; total_currency: int } + { blockchain_length : int + ; epoch_count : int + ; curr_epoch : int + ; curr_slot : int + ; global_slot_since_genesis : int + ; total_currency : int + } [@@deriving yojson] let display (t : Value.t) = let epoch, slot = Global_slot.to_epoch_and_slot t.curr_global_slot in - { blockchain_length= Length.to_int t.blockchain_length - ; epoch_count= Length.to_int t.epoch_count - ; curr_epoch= Segment_id.to_int epoch - ; curr_slot= Segment_id.to_int slot - ; global_slot_since_genesis= + { blockchain_length = Length.to_int t.blockchain_length + ; epoch_count = Length.to_int t.epoch_count + ; curr_epoch = Segment_id.to_int epoch + ; curr_slot = Segment_id.to_int slot + ; global_slot_since_genesis = Mina_numbers.Global_slot.to_int t.global_slot_since_genesis - ; total_currency= Amount.to_int t.total_currency } + ; total_currency = Amount.to_int t.total_currency + } let curr_global_slot (t : Value.t) = t.curr_global_slot @@ -2182,8 +2226,7 @@ module Data = struct let total_currency_var (t : var) = t.total_currency - let staking_epoch_data_var (t : var) : Epoch_data.var = - t.staking_epoch_data + let staking_epoch_data_var (t : var) : Epoch_data.var = t.staking_epoch_data let staking_epoch_data (t : Value.t) = t.staking_epoch_data @@ -2221,7 +2264,10 @@ module Data = struct if increase_epoch_count then Length.succ t.epoch_count else t.epoch_count in - {t with epoch_count= new_epoch_count; curr_global_slot= new_global_slot} + { t with + epoch_count = new_epoch_count + ; curr_global_slot = new_global_slot + } end let graphql_type () : ('ctx, Value.t option) Graphql_async.Schema.typ = @@ -2235,66 +2281,67 @@ module Data = struct ~doc:"Length of the blockchain at this block" ~deprecated:(Deprecated (Some "use blockHeight instead")) ~args:Arg.[] - ~resolve:(fun _ {Poly.blockchain_length; _} -> - Mina_numbers.Length.to_uint32 blockchain_length ) + ~resolve:(fun _ { Poly.blockchain_length; _ } -> + Mina_numbers.Length.to_uint32 blockchain_length) ; field "blockHeight" ~typ:(non_null uint32) ~doc:"Height of the blockchain at this block" ~args:Arg.[] - ~resolve:(fun _ {Poly.blockchain_length; _} -> - Mina_numbers.Length.to_uint32 blockchain_length ) + ~resolve:(fun _ { Poly.blockchain_length; _ } -> + Mina_numbers.Length.to_uint32 blockchain_length) ; field "epochCount" ~typ:(non_null uint32) ~args:Arg.[] - ~resolve:(fun _ {Poly.epoch_count; _} -> - Mina_numbers.Length.to_uint32 epoch_count ) + ~resolve:(fun _ { Poly.epoch_count; _ } -> + Mina_numbers.Length.to_uint32 epoch_count) ; field "minWindowDensity" ~typ:(non_null uint32) ~args:Arg.[] - ~resolve:(fun _ {Poly.min_window_density; _} -> - Mina_numbers.Length.to_uint32 min_window_density ) + ~resolve:(fun _ { Poly.min_window_density; _ } -> + Mina_numbers.Length.to_uint32 min_window_density) ; field "lastVrfOutput" ~typ:(non_null string) ~args:Arg.[] ~resolve: - (fun (_ : 'ctx resolve_info) {Poly.last_vrf_output; _} -> - Vrf.Output.Truncated.to_base58_check last_vrf_output ) + (fun (_ : 'ctx resolve_info) { Poly.last_vrf_output; _ } -> + Vrf.Output.Truncated.to_base58_check last_vrf_output) ; field "totalCurrency" ~doc:"Total currency in circulation at this block" ~typ:(non_null uint64) ~args:Arg.[] - ~resolve:(fun _ {Poly.total_currency; _} -> - Amount.to_uint64 total_currency ) + ~resolve:(fun _ { Poly.total_currency; _ } -> + Amount.to_uint64 total_currency) ; field "stakingEpochData" ~typ: (non_null @@ Epoch_data.Staking.graphql_type "StakingEpochData") ~args:Arg.[] ~resolve: - (fun (_ : 'ctx resolve_info) {Poly.staking_epoch_data; _} -> - staking_epoch_data ) + (fun (_ : 'ctx resolve_info) { Poly.staking_epoch_data; _ } -> + staking_epoch_data) ; field "nextEpochData" ~typ:(non_null @@ Epoch_data.Next.graphql_type "NextEpochData") ~args:Arg.[] ~resolve: - (fun (_ : 'ctx resolve_info) {Poly.next_epoch_data; _} -> - next_epoch_data ) + (fun (_ : 'ctx resolve_info) { Poly.next_epoch_data; _ } -> + next_epoch_data) ; field "hasAncestorInSameCheckpointWindow" ~typ:(non_null bool) ~args:Arg.[] ~resolve: - (fun _ {Poly.has_ancestor_in_same_checkpoint_window; _} -> - has_ancestor_in_same_checkpoint_window ) + (fun _ { Poly.has_ancestor_in_same_checkpoint_window; _ } -> + has_ancestor_in_same_checkpoint_window) ; field "slot" ~doc:"Slot in which this block was created" ~typ:(non_null uint32) ~args:Arg.[] - ~resolve:(fun _ {Poly.curr_global_slot; _} -> - Global_slot.slot curr_global_slot ) + ~resolve:(fun _ { Poly.curr_global_slot; _ } -> + Global_slot.slot curr_global_slot) ; field "slotSinceGenesis" ~doc:"Slot since genesis (across all hard-forks)" ~typ:(non_null uint32) ~args:Arg.[] - ~resolve:(fun _ {Poly.global_slot_since_genesis; _} -> - global_slot_since_genesis ) + ~resolve:(fun _ { Poly.global_slot_since_genesis; _ } -> + global_slot_since_genesis) ; field "epoch" ~doc:"Epoch in which this block was created" ~typ:(non_null uint32) ~args:Arg.[] - ~resolve:(fun _ {Poly.curr_global_slot; _} -> - Global_slot.epoch curr_global_slot ) ] ) + ~resolve:(fun _ { Poly.curr_global_slot; _ } -> + Global_slot.epoch curr_global_slot) + ]) end module Prover_state = struct @@ -2310,10 +2357,11 @@ module Data = struct ; coinbase_receiver_pk ; ledger ; producer_private_key - ; producer_public_key } - ~(constraint_constants : Genesis_constants.Constraint_constants.t) - ~pending_coinbase:{ Mina_base.Pending_coinbase_witness.pending_coinbases - ; is_new_stack } : Snark_params.Tick.Handler.t = + ; producer_public_key + } ~(constraint_constants : Genesis_constants.Constraint_constants.t) + ~pending_coinbase: + { Mina_base.Pending_coinbase_witness.pending_coinbases; is_new_stack } + : Snark_params.Tick.Handler.t = let ledger_handler = unstage (Mina_base.Sparse_ledger.handler ledger) in let pending_coinbase_handler = unstage @@ -2327,7 +2375,7 @@ module Data = struct (push fail (create_single pending_coinbase_handler)) (create_single ledger_handler)) in - fun (With {request; respond}) -> + fun (With { request; respond }) -> match request with | Vrf.Winner_address -> respond (Provide delegator) @@ -2343,15 +2391,15 @@ module Data = struct respond (Provide (Snarky_backendless.Request.Handler.run handlers - ["Ledger Handler"; "Pending Coinbase Handler"] + [ "Ledger Handler"; "Pending Coinbase Handler" ] request)) - let ledger_depth {ledger; _} = ledger.depth + let ledger_depth { ledger; _ } = ledger.depth end end module Coinbase_receiver = struct - type t = [`Producer | `Other of Public_key.Compressed.t] [@@deriving yojson] + type t = [ `Producer | `Other of Public_key.Compressed.t ] [@@deriving yojson] let resolve ~self : t -> Public_key.Compressed.t = function | `Producer -> @@ -2392,13 +2440,13 @@ module Hooks = struct module V1 = struct module T = struct type query = Mina_base.Ledger_hash.Stable.V1.t - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] type response = ( Mina_base.Sparse_ledger.Stable.V1.t , string ) Core_kernel.Result.Stable.V1.t - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -2410,7 +2458,8 @@ module Hooks = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -2440,7 +2489,8 @@ module Hooks = struct else let candidate_snapshots = [ !local_state.Data.staking_epoch_snapshot - ; !local_state.Data.next_epoch_snapshot ] + ; !local_state.Data.next_epoch_snapshot + ] in let res = List.find_map candidate_snapshots ~f:(fun snapshot -> @@ -2466,7 +2516,7 @@ module Hooks = struct @@ Mina_base.Ledger.Any_ledger.cast (module Mina_base.Ledger.Db) ledger )) - else None ) + else None) in Option.value res ~default:(Error "epoch ledger not found") in @@ -2476,11 +2526,12 @@ module Hooks = struct [ ("peer", Network_peer.Peer.to_yojson conn) ; ("error", `String err) ; ( "ledger_hash" - , Mina_base.Ledger_hash.to_yojson ledger_hash ) ] + , Mina_base.Ledger_hash.to_yojson ledger_hash ) + ] "Failed to serve epoch ledger query with hash $ledger_hash \ - from $peer: $error" ) ; + from $peer: $error") ; if Ivar.is_full ivar then [%log error] "Ivar.fill bug is here!" ; - Ivar.fill ivar response ) + Ivar.fill ivar response) end open Mina_base.Rpc_intf @@ -2491,37 +2542,42 @@ module Hooks = struct type rpc_handler = | Rpc_handler : - { rpc: ('q, 'r) rpc - ; f: ('q, 'r) rpc_fn - ; cost: 'q -> int - ; budget: int * [`Per of Core.Time.Span.t] } + { rpc : ('q, 'r) rpc + ; f : ('q, 'r) rpc_fn + ; cost : 'q -> int + ; budget : int * [ `Per of Core.Time.Span.t ] + } -> rpc_handler type query = - { query: + { query : 'q 'r. Network_peer.Peer.t -> ('q, 'r) rpc -> 'q - -> 'r Mina_base.Rpc_intf.rpc_response Deferred.t } + -> 'r Mina_base.Rpc_intf.rpc_response Deferred.t + } - let implementation_of_rpc : type q r. - (q, r) rpc -> (q, r) rpc_implementation = function + let implementation_of_rpc : + type q r. (q, r) rpc -> (q, r) rpc_implementation = function | Get_epoch_ledger -> (module Get_epoch_ledger) - let match_handler : type q r. + let match_handler : + type q r. rpc_handler -> (q, r) rpc -> do_:((q, r) rpc_fn -> 'a) -> 'a option = fun handler rpc ~do_ -> match (rpc, handler) with - | Get_epoch_ledger, Rpc_handler {rpc= Get_epoch_ledger; f; _} -> + | Get_epoch_ledger, Rpc_handler { rpc = Get_epoch_ledger; f; _ } -> Some (do_ f) let rpc_handlers ~logger ~local_state ~genesis_ledger_hash = [ Rpc_handler - { rpc= Get_epoch_ledger - ; f= + { rpc = Get_epoch_ledger + ; f = Get_epoch_ledger.implementation ~logger ~local_state ~genesis_ledger_hash - ; cost= (fun _ -> 1) - ; budget= (2, `Per Core.Time.Span.minute) } ] + ; cost = (fun _ -> 1) + ; budget = (2, `Per Core.Time.Span.minute) + } + ] end let is_genesis_epoch ~(constants : Constants.t) time = @@ -2532,7 +2588,7 @@ module Hooks = struct * the consensus state we are selecting from is in the epoch we want to select. * There is also a special case for when the consensus state we are selecting * from is in the genesis epoch. - *) + *) let select_epoch_data ~(consensus_state : Consensus_state.Value.t) ~epoch = let curr_epoch = Consensus_state.curr_epoch consensus_state in (* are we in the same epoch as the consensus state? *) @@ -2569,7 +2625,7 @@ module Hooks = struct * This function does not guarantee that the selected epoch snapshot is valid * (i.e. it does not check that the epoch snapshot's ledger hash is the same * as the ledger hash specified by the epoch data). - *) + *) let select_epoch_snapshot ~(constants : Constants.t) ~(consensus_state : Consensus_state.Value.t) ~local_state ~epoch = let open Local_state in @@ -2601,15 +2657,17 @@ module Hooks = struct Data.Local_state.Snapshot.ledger snapshot type required_snapshot = - { snapshot_id: Local_state.snapshot_identifier - ; expected_root: Mina_base.Frozen_ledger_hash.t } + { snapshot_id : Local_state.snapshot_identifier + ; expected_root : Mina_base.Frozen_ledger_hash.t + } [@@deriving to_yojson] type local_state_sync = | One of required_snapshot | Both of - { next: Mina_base.Frozen_ledger_hash.t - ; staking: Mina_base.Frozen_ledger_hash.t } + { next : Mina_base.Frozen_ledger_hash.t + ; staking : Mina_base.Frozen_ledger_hash.t + } [@@deriving to_yojson] let local_state_sync_count (s : local_state_sync) = @@ -2629,7 +2687,7 @@ module Hooks = struct (Frozen_ledger_hash.to_ledger_hash expected_root) (Local_state.Snapshot.Ledger_snapshot.merkle_root (Local_state.get_snapshot local_state snapshot_id).ledger))) - {snapshot_id; expected_root} + { snapshot_id; expected_root } in match source with | `Curr -> @@ -2637,19 +2695,21 @@ module Hooks = struct (required_snapshot_sync Next_epoch_snapshot consensus_state.staking_epoch_data.ledger.hash) ~f:(fun s -> One s) | `Last -> ( - match - ( required_snapshot_sync Next_epoch_snapshot - consensus_state.next_epoch_data.ledger.hash - , required_snapshot_sync Staking_epoch_snapshot - consensus_state.staking_epoch_data.ledger.hash ) - with - | None, None -> - None - | Some x, None | None, Some x -> - Some (One x) - | Some next, Some staking -> - Some - (Both {next= next.expected_root; staking= staking.expected_root}) ) + match + ( required_snapshot_sync Next_epoch_snapshot + consensus_state.next_epoch_data.ledger.hash + , required_snapshot_sync Staking_epoch_snapshot + consensus_state.staking_epoch_data.ledger.hash ) + with + | None, None -> + None + | Some x, None | None, Some x -> + Some (One x) + | Some next, Some staking -> + Some + (Both + { next = next.expected_root; staking = staking.expected_root }) + ) let sync_local_state ~logger ~trust_system ~local_state ~random_peers ~(query_peer : Rpcs.query) ~ledger_depth requested_syncs = @@ -2661,8 +2721,9 @@ module Hooks = struct ~metadata: [ ("num_requested", `Int (local_state_sync_count requested_syncs)) ; ("requested_syncs", local_state_sync_to_yojson requested_syncs) - ; ("local_state", Local_state.to_yojson local_state) ] ; - let sync {snapshot_id; expected_root= target_ledger_hash} = + ; ("local_state", Local_state.to_yojson local_state) + ] ; + let sync { snapshot_id; expected_root = target_ledger_hash } = (* if requested last epoch ledger is equal to the current epoch ledger then we don't need make a rpc call to the peers. *) if @@ -2688,9 +2749,10 @@ module Hooks = struct () in set_snapshot local_state Staking_epoch_snapshot - { ledger= Ledger_snapshot.Ledger_db ledger - ; delegatee_table= - !local_state.next_epoch_snapshot.delegatee_table } ; + { ledger = Ledger_snapshot.Ledger_db ledger + ; delegatee_table = + !local_state.next_epoch_snapshot.delegatee_table + } ; Deferred.Or_error.ok_unit ) else let%bind peers = random_peers 5 in @@ -2706,40 +2768,43 @@ module Hooks = struct (Mina_base.Frozen_ledger_hash.to_ledger_hash target_ledger_hash) with - | Connected {data= Ok (Ok sparse_ledger); _} -> ( - match - reset_snapshot local_state snapshot_id ~sparse_ledger - ~ledger_depth - with - | Ok () -> - (*Don't fail if recording fails*) - don't_wait_for - Trust_system.( - record trust_system logger peer - Actions.(Epoch_ledger_provided, None)) ; - Deferred.Or_error.ok_unit - | Error e -> - [%log faulty_peer_without_punishment] - ~metadata: - [ ("peer", Network_peer.Peer.to_yojson peer) - ; ("error", Error_json.error_to_yojson e) ] - "Peer $peer failed to serve requested epoch ledger: \ - $error" ; - return (Error e) ) - | Connected {data= Ok (Error err); _} -> + | Connected { data = Ok (Ok sparse_ledger); _ } -> ( + match + reset_snapshot local_state snapshot_id ~sparse_ledger + ~ledger_depth + with + | Ok () -> + (*Don't fail if recording fails*) + don't_wait_for + Trust_system.( + record trust_system logger peer + Actions.(Epoch_ledger_provided, None)) ; + Deferred.Or_error.ok_unit + | Error e -> + [%log faulty_peer_without_punishment] + ~metadata: + [ ("peer", Network_peer.Peer.to_yojson peer) + ; ("error", Error_json.error_to_yojson e) + ] + "Peer $peer failed to serve requested epoch ledger: \ + $error" ; + return (Error e) ) + | Connected { data = Ok (Error err); _ } -> (* TODO figure out punishments here. *) [%log faulty_peer_without_punishment] ~metadata: [ ("peer", Network_peer.Peer.to_yojson peer) - ; ("error", `String err) ] + ; ("error", `String err) + ] "Peer $peer failed to serve requested epoch ledger: \ $error" ; return (Or_error.error_string err) - | Connected {data= Error err; _} -> + | Connected { data = Error err; _ } -> [%log faulty_peer_without_punishment] ~metadata: [ ("peer", Network_peer.Peer.to_yojson peer) - ; ("error", `String (Error.to_string_mach err)) ] + ; ("error", `String (Error.to_string_mach err)) + ] "Peer $peer failed to serve requested epoch ledger: \ $error" ; return (Error err) @@ -2747,21 +2812,22 @@ module Hooks = struct [%log faulty_peer_without_punishment] ~metadata: [ ("peer", Network_peer.Peer.to_yojson peer) - ; ("error", Error_json.error_to_yojson err) ] + ; ("error", Error_json.error_to_yojson err) + ] "Failed to connect to $peer to retrieve epoch ledger: \ $error" ; - return (Error err) ) ) + return (Error err) )) in match requested_syncs with | One required_sync -> sync required_sync - | Both {staking; next} -> + | Both { staking; next } -> (*Sync staking ledger before syncing the next ledger*) let open Deferred.Or_error.Let_syntax in let%bind () = - sync {snapshot_id= Staking_epoch_snapshot; expected_root= staking} + sync { snapshot_id = Staking_epoch_snapshot; expected_root = staking } in - sync {snapshot_id= Next_epoch_snapshot; expected_root= next} + sync { snapshot_id = Next_epoch_snapshot; expected_root = next } let received_within_window ~constants (epoch, slot) ~time_received = let open Int64 in @@ -2769,8 +2835,7 @@ module Hooks = struct let ( >= ) x y = compare x y >= 0 in let time_received = Time.( - of_span_since_epoch - (Span.of_ms (Unix_timestamp.to_int64 time_received))) + of_span_since_epoch (Span.of_ms (Unix_timestamp.to_int64 time_received))) in let slot_diff = Epoch.diff_in_slots ~constants @@ -2778,8 +2843,8 @@ module Hooks = struct (epoch, slot) in if slot_diff < 0L then Error `Too_early - else if slot_diff >= UInt32.(to_int64 (add constants.delta (of_int 1))) - then Error (`Too_late (sub slot_diff UInt32.(to_int64 constants.delta))) + else if slot_diff >= UInt32.(to_int64 (add constants.delta (of_int 1))) then + Error (`Too_late (sub slot_diff UInt32.(to_int64 constants.delta))) else Ok () let received_at_valid_time ~(constants : Constants.t) @@ -2806,12 +2871,14 @@ module Hooks = struct c2.staking_epoch_data.lock_checkpoint else pred_case c1 c2 || pred_case c2 c1 - type select_status = [`Keep | `Take] [@@deriving equal] + type select_status = [ `Keep | `Take ] [@@deriving equal] let select ~constants ~existing:existing_with_hash ~candidate:candidate_with_hash ~logger = - let {With_hash.hash= existing_hash; data= existing} = existing_with_hash in - let {With_hash.hash= candidate_hash; data= candidate} = + let { With_hash.hash = existing_hash; data = existing } = + existing_with_hash + in + let { With_hash.hash = candidate_hash; data = candidate } = candidate_with_hash in let string_of_choice = function `Take -> "Take" | `Keep -> "Keep" in @@ -2819,7 +2886,8 @@ module Hooks = struct [%log debug] "Select result: $choice -- $message" ~metadata: [ ("choice", `String (string_of_choice choice)) - ; ("message", `String msg) ] + ; ("message", `String msg) + ] in let log_choice ~precondition_msg ~choice_msg choice = let choice_msg = @@ -2835,7 +2903,8 @@ module Hooks = struct [%log debug] "Selecting best consensus state" ~metadata: [ ("existing", Consensus_state.Value.to_yojson existing) - ; ("candidate", Consensus_state.Value.to_yojson candidate) ] ; + ; ("candidate", Consensus_state.Value.to_yojson candidate) + ] ; (* TODO: add fork_before_checkpoint check *) (* Each branch contains a precondition predicate and a choice predicate, * which takes the new state when true. Each predicate is also decorated @@ -2852,9 +2921,8 @@ module Hooks = struct let compare_blake2 a b = String.compare (string_of_blake2 a) (string_of_blake2 b) in - less_than_or_equal_when existing.last_vrf_output - candidate.last_vrf_output ~compare:compare_blake2 - ~condition:candidate_hash_is_bigger + less_than_or_equal_when existing.last_vrf_output candidate.last_vrf_output + ~compare:compare_blake2 ~condition:candidate_hash_is_bigger in let blockchain_length_is_longer = less_than_or_equal_when existing.blockchain_length @@ -2956,8 +3024,8 @@ module Hooks = struct let total_stake = epoch_data.ledger.total_currency in let epoch_snapshot = let source, snapshot = - select_epoch_snapshot ~constants ~consensus_state:state - ~local_state ~epoch + select_epoch_snapshot ~constants ~consensus_state:state ~local_state + ~epoch in let snapshot_ledger_hash = Local_state.Snapshot.Ledger_snapshot.merkle_root snapshot.ledger @@ -2965,8 +3033,8 @@ module Hooks = struct [%log debug] ~metadata: [ ( "ledger_hash" - , Mina_base.Frozen_ledger_hash.to_yojson snapshot_ledger_hash - ) ] + , Mina_base.Frozen_ledger_hash.to_yojson snapshot_ledger_hash ) + ] !"Using %s_epoch_snapshot root hash $ledger_hash" (epoch_snapshot_name source) ; (*TODO: uncomment after #6956 is resolved*) @@ -2978,7 +3046,7 @@ module Hooks = struct let block_data unseen_pks slot = (* Try vrfs for all keypairs that are unseen within this slot until one wins or all lose *) (* TODO: Don't do this, and instead pick the one that has the highest - * chance of winning. See #2573 *) + * chance of winning. See #2573 *) Keypair.And_compressed_pk.Set.fold_until keypairs ~init:() ~f:(fun () (keypair, public_key_compressed) -> let coinbase_receiver = @@ -3010,9 +3078,9 @@ module Hooks = struct ; ("state", Consensus_state.Value.to_yojson state) ] ; failwith - "Checking slot-winner for a slot which is older \ - than the slot in the latest consensus state. \ - System time might be out-of-sync" + "Checking slot-winner for a slot which is older than \ + the slot in the latest consensus state. System time \ + might be out-of-sync" | Some diff -> diff in @@ -3024,7 +3092,8 @@ module Hooks = struct "Checking VRF evaluations at epoch: $epoch, slot: $slot" ~metadata: [ ("epoch", `Int (Epoch.to_int epoch)) - ; ("slot", `Int (Slot.to_int slot)) ] ; + ; ("slot", `Int (Slot.to_int slot)) + ] ; match Vrf.check ~constraint_constants ~global_slot:(Global_slot.slot_number global_slot) @@ -3036,7 +3105,7 @@ module Hooks = struct | None -> Continue_or_stop.Continue () | Some (data, delegator_pk) -> - Continue_or_stop.Stop (Some (data, delegator_pk)) ) + Continue_or_stop.Stop (Some (data, delegator_pk))) ~finish:(fun () -> None) in let rec find_winning_slot (slot : Slot.t) = @@ -3089,8 +3158,8 @@ module Hooks = struct (Consensus_state.curr_epoch prev) (Consensus_state.curr_epoch next)) then ( - !local_state.last_epoch_delegatee_table - <- Some !local_state.staking_epoch_snapshot.delegatee_table ; + !local_state.last_epoch_delegatee_table <- + Some !local_state.staking_epoch_snapshot.delegatee_table ; Local_state.Snapshot.Ledger_snapshot.remove !local_state.staking_epoch_snapshot.ledger ~location:(Local_state.staking_epoch_ledger_location local_state) ; @@ -3103,27 +3172,29 @@ module Hooks = struct then ( let epoch_ledger_uuids = Local_state.Data. - { staking= !local_state.epoch_ledger_uuids.next - ; next= Uuid_unix.create () - ; genesis_state_hash= - !local_state.epoch_ledger_uuids.genesis_state_hash } + { staking = !local_state.epoch_ledger_uuids.next + ; next = Uuid_unix.create () + ; genesis_state_hash = + !local_state.epoch_ledger_uuids.genesis_state_hash + } in !local_state.epoch_ledger_uuids <- epoch_ledger_uuids ; Yojson.Safe.to_file (!local_state.epoch_ledger_location ^ ".json") (Local_state.epoch_ledger_uuids_to_yojson epoch_ledger_uuids) ; - !local_state.next_epoch_snapshot - <- { ledger= - Local_state.Snapshot.Ledger_snapshot.Ledger_db - (Mina_base.Ledger.Db.create_checkpoint snarked_ledger - ~directory_name: - ( !local_state.epoch_ledger_location - ^ Uuid.to_string epoch_ledger_uuids.next ) - ()) - ; delegatee_table= - compute_delegatee_table_ledger_db - (Local_state.current_block_production_keys local_state) - snarked_ledger } ) ) + !local_state.next_epoch_snapshot <- + { ledger = + Local_state.Snapshot.Ledger_snapshot.Ledger_db + (Mina_base.Ledger.Db.create_checkpoint snarked_ledger + ~directory_name: + ( !local_state.epoch_ledger_location + ^ Uuid.to_string epoch_ledger_uuids.next ) + ()) + ; delegatee_table = + compute_delegatee_table_ledger_db + (Local_state.current_block_production_keys local_state) + snarked_ledger + } ) ) let should_bootstrap_len ~(constants : Constants.t) ~existing ~candidate = let open UInt32.Infix in @@ -3131,8 +3202,7 @@ module Hooks = struct ((UInt32.of_int 2 * constants.k) + (constants.delta + UInt32.of_int 1)) > 0 - let should_bootstrap ~(constants : Constants.t) ~existing ~candidate ~logger - = + let should_bootstrap ~(constants : Constants.t) ~existing ~candidate ~logger = match select ~constants ~existing ~candidate ~logger with | `Keep -> false @@ -3193,7 +3263,8 @@ module Hooks = struct let consensus_state = { negative_one with curr_global_slot - ; global_slot_since_genesis= Global_slot.slot_number curr_global_slot } + ; global_slot_since_genesis = Global_slot.slot_number curr_global_slot + } in let too_early = (* TODO: Does this make sense? *) @@ -3204,37 +3275,37 @@ module Hooks = struct let delayed_slot = UInt32.Infix.(curr_slot + delay) in Epoch.slot_start_time ~constants curr_epoch delayed_slot in - let times = [too_late; too_early] in + let times = [ too_late; too_early ] in List.for_all times ~f:(fun time -> not ( received_at_valid_time ~constants consensus_state ~time_received:(to_unix_timestamp time) - |> Result.is_ok ) ) + |> Result.is_ok )) module type State_hooks_intf = Intf.State_hooks - with type consensus_state := Consensus_state.Value.t - and type consensus_state_var := Consensus_state.var - and type consensus_transition := Consensus_transition.t - and type block_data := Block_data.t + with type consensus_state := Consensus_state.Value.t + and type consensus_state_var := Consensus_state.var + and type consensus_transition := Consensus_transition.t + and type block_data := Block_data.t module Make_state_hooks (Blockchain_state : Intf.Blockchain_state) (Protocol_state : Intf.Protocol_state - with type blockchain_state := Blockchain_state.Value.t - and type blockchain_state_var := Blockchain_state.var - and type consensus_state := Consensus_state.Value.t - and type consensus_state_var := Consensus_state.var) + with type blockchain_state := Blockchain_state.Value.t + and type blockchain_state_var := Blockchain_state.var + and type consensus_state := Consensus_state.Value.t + and type consensus_state_var := Consensus_state.var) (Snark_transition : Intf.Snark_transition - with type blockchain_state_var := - Blockchain_state.var - and type consensus_transition_var := - Consensus_transition.var) : + with type blockchain_state_var := + Blockchain_state.var + and type consensus_transition_var := + Consensus_transition.var) : State_hooks_intf - with type blockchain_state := Blockchain_state.Value.t - and type protocol_state := Protocol_state.Value.t - and type protocol_state_var := Protocol_state.var - and type snark_transition_var := Snark_transition.var = struct + with type blockchain_state := Blockchain_state.Value.t + and type protocol_state := Protocol_state.Value.t + and type protocol_state_var := Protocol_state.var + and type snark_transition_var := Snark_transition.var = struct (* TODO: only track total currency from accounts > 1% of the currency using transactions *) let genesis_winner = Vrf.Precomputed.genesis_winner @@ -3287,8 +3358,8 @@ module Hooks = struct (Consensus_state.update ~constants ~previous_consensus_state ~consensus_transition ~producer_vrf_result:block_data.Block_data.vrf_result - ~previous_protocol_state_hash ~supply_increase - ~snarked_ledger_hash ~genesis_ledger_hash + ~previous_protocol_state_hash ~supply_increase ~snarked_ledger_hash + ~genesis_ledger_hash ~block_stake_winner:block_data.stake_proof.delegator_pk ~block_creator ~coinbase_receiver:block_data.stake_proof.coinbase_receiver_pk @@ -3329,9 +3400,8 @@ module Hooks = struct let gen_consensus_state ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~constants ~(gen_slot_advancement : int Quickcheck.Generator.t) : - ( previous_protocol_state:( Protocol_state.Value.t - , Mina_base.State_hash.t ) - With_hash.t + ( previous_protocol_state: + (Protocol_state.Value.t, Mina_base.State_hash.t) With_hash.t -> snarked_ledger_hash:Mina_base.Frozen_ledger_hash.t -> coinbase_receiver:Public_key.Compressed.t -> supercharge_coinbase:bool @@ -3378,8 +3448,7 @@ module Hooks = struct (prev.staking_epoch_data, prev.next_epoch_data) prev.epoch_count ~prev_epoch ~next_epoch:curr_epoch ~next_slot:curr_slot - ~prev_protocol_state_hash: - (With_hash.hash previous_protocol_state) + ~prev_protocol_state_hash:(With_hash.hash previous_protocol_state) ~producer_vrf_result ~snarked_ledger_hash ~genesis_ledger_hash ~total_currency in @@ -3395,22 +3464,23 @@ module Hooks = struct ; epoch_count ; min_window_density ; sub_window_densities - ; last_vrf_output= Vrf.Output.truncate producer_vrf_result + ; last_vrf_output = Vrf.Output.truncate producer_vrf_result ; total_currency ; curr_global_slot ; global_slot_since_genesis ; staking_epoch_data ; next_epoch_data - ; has_ancestor_in_same_checkpoint_window= + ; has_ancestor_in_same_checkpoint_window = same_checkpoint_window_unchecked ~constants (Global_slot.create ~constants ~epoch:prev_epoch ~slot:prev_slot) (Global_slot.create ~constants ~epoch:curr_epoch ~slot:curr_slot) - ; block_stake_winner= genesis_winner_pk - ; block_creator= genesis_winner_pk + ; block_stake_winner = genesis_winner_pk + ; block_creator = genesis_winner_pk ; coinbase_receiver - ; supercharge_coinbase } + ; supercharge_coinbase + } end end end @@ -3505,7 +3575,7 @@ let%test_module "Proof of stake tests" = else previous_consensus_state.staking_epoch_data.seed in Vrf.eval ~constraint_constants ~private_key:producer_private_key - {global_slot= Global_slot.slot_number global_slot; seed; delegator} + { global_slot = Global_slot.slot_number global_slot; seed; delegator } in let next_consensus_state = update ~constants ~previous_consensus_state ~consensus_transition @@ -3570,15 +3640,15 @@ let%test_module "Proof of stake tests" = Genesis_constants.for_unit_tests.protocol)) in let result = - update_var previous_state transition_data - previous_protocol_state_hash ~supply_increase - ~previous_blockchain_state_ledger_hash ~genesis_ledger_hash - ~constraint_constants ~protocol_constants:constants_checked + update_var previous_state transition_data previous_protocol_state_hash + ~supply_increase ~previous_blockchain_state_ledger_hash + ~genesis_ledger_hash ~constraint_constants + ~protocol_constants:constants_checked in (* setup handler *) let indices = Ledger.Any_ledger.M.foldi ~init:[] ledger ~f:(fun i accum _acct -> - Ledger.Any_ledger.M.Addr.to_int i :: accum ) + Ledger.Any_ledger.M.Addr.to_int i :: accum) in let sparse_ledger = Sparse_ledger.of_ledger_index_subset_exn ledger indices @@ -3589,13 +3659,16 @@ let%test_module "Proof of stake tests" = let handler = Prover_state.handler ~constraint_constants { delegator - ; delegator_pk= producer_public_key_compressed - ; coinbase_receiver_pk= producer_public_key_compressed - ; ledger= sparse_ledger + ; delegator_pk = producer_public_key_compressed + ; coinbase_receiver_pk = producer_public_key_compressed + ; ledger = sparse_ledger ; producer_private_key - ; producer_public_key } + ; producer_public_key + } ~pending_coinbase: - {Pending_coinbase_witness.pending_coinbases; is_new_stack= true} + { Pending_coinbase_witness.pending_coinbases + ; is_new_stack = true + } in let%map `Success _, var = Snark_params.Tick.handle result handler in As_prover.read (typ ~constraint_constants) var @@ -3627,15 +3700,16 @@ let%test_module "Proof of stake tests" = let constraint_constants_with_fork = let fork_constants = Some - { Genesis_constants.Fork_constants.previous_state_hash= + { Genesis_constants.Fork_constants.previous_state_hash = Result.ok_or_failwith (State_hash.of_yojson (`String "3NL3bc213VQEFx6XTLbc3HxHqHH9ANbhHxRxSnBcRzXcKgeFA6TY")) - ; previous_length= Mina_numbers.Length.of_int 100 - ; previous_global_slot= Mina_numbers.Global_slot.of_int 200 } + ; previous_length = Mina_numbers.Length.of_int 100 + ; previous_global_slot = Mina_numbers.Global_slot.of_int 200 + } in - {constraint_constants with fork= fork_constants} + { constraint_constants with fork = fork_constants } in test_update constraint_constants_with_fork @@ -3661,7 +3735,7 @@ let%test_module "Proof of stake tests" = genesis_ledger_total_currency ~ledger:Genesis_ledger.t in let block_producer_pubkeys = - Public_key.Compressed.Set.of_list [public_key_compressed] + Public_key.Compressed.Set.of_list [ public_key_compressed ] in let ledger = Lazy.force Genesis_ledger.t in let delegatee_table = @@ -3669,7 +3743,8 @@ let%test_module "Proof of stake tests" = in let epoch_snapshot = { Local_state.Snapshot.delegatee_table - ; ledger= Genesis_epoch_ledger ledger } + ; ledger = Genesis_epoch_ledger ledger + } in let balance = Balance.to_int account.balance in let total_stake_int = Currency.Amount.to_int total_stake in @@ -3728,7 +3803,7 @@ let%test_module "Proof of stake tests" = let open Quickcheck.Generator.Let_syntax in let%bind data = gen in let%map hash = State_hash.gen in - {With_hash.data; hash} + { With_hash.data; hash } let gen_num_blocks_in_slots ~slot_fill_rate ~slot_fill_rate_delta n = let open Quickcheck.Generator.Let_syntax in @@ -3765,7 +3840,8 @@ let%test_module "Proof of stake tests" = in let min_window_density = List.fold ~init:Length.zero ~f:Length.add - (List.take sub_window_densities (List.length sub_window_densities - 1)) + (List.take sub_window_densities + (List.length sub_window_densities - 1)) in (min_window_density, sub_window_densities) @@ -3797,16 +3873,18 @@ let%test_module "Proof of stake tests" = in let%map lock_checkpoint = opt_gen lock_checkpoint ~gen:State_hash.gen in let ledger : Epoch_ledger.Value.t = - { hash= ledger_hash - ; total_currency= + { hash = ledger_hash + ; total_currency = currency_at_height ~genesis_currency - (Length.to_int height_at_end_of_epoch) } + (Length.to_int height_at_end_of_epoch) + } in { Epoch_data.Poly.ledger ; seed ; start_checkpoint ; lock_checkpoint - ; epoch_length } + ; epoch_length + } let default_slot_fill_rate = 0.65 @@ -3858,8 +3936,7 @@ let%test_module "Proof of stake tests" = * - vrf outputs and ledger hashes are entirely fake * - density windows are computed distinctly from block heights and epoch lengths, so some non-obvious invariants may be broken there *) - let gen_spot ?root_epoch_position - ?(slot_fill_rate = default_slot_fill_rate) + let gen_spot ?root_epoch_position ?(slot_fill_rate = default_slot_fill_rate) ?(slot_fill_rate_delta = default_slot_fill_rate_delta) ?(genesis_currency = Currency.Amount.of_int 200000) ?gen_staking_epoch_length ?gen_next_epoch_length @@ -3881,8 +3958,7 @@ let%test_module "Proof of stake tests" = ~default:(gen_num_blocks_in_epochs 1) in let gen_next_epoch_length = - Option.value gen_next_epoch_length - ~default:(gen_num_blocks_in_epochs 1) + Option.value gen_next_epoch_length ~default:(gen_num_blocks_in_epochs 1) in let gen_curr_epoch_position = let default = @@ -3919,7 +3995,8 @@ let%test_module "Proof of stake tests" = [ root_block_height ; staking_epoch_length ; next_staking_epoch_length - ; curr_epoch_length ] + ; curr_epoch_length + ] in (* Compute total currency for state. *) let total_currency = @@ -3948,22 +4025,23 @@ let%test_module "Proof of stake tests" = (* Generate block reward information (unused in chain selection). *) let%map staker_pk = Public_key.Compressed.gen in { Consensus_state.Poly.blockchain_length - ; epoch_count= curr_epoch + ; epoch_count = curr_epoch ; min_window_density ; sub_window_densities - ; last_vrf_output= vrf_output + ; last_vrf_output = vrf_output ; total_currency ; curr_global_slot ; staking_epoch_data - ; next_epoch_data= next_staking_epoch_data - ; global_slot_since_genesis= + ; next_epoch_data = next_staking_epoch_data + ; global_slot_since_genesis = Global_slot.slot_number curr_global_slot (* These values are not used in selection, so we just set them to something. *) - ; has_ancestor_in_same_checkpoint_window= true - ; block_stake_winner= staker_pk - ; block_creator= staker_pk - ; coinbase_receiver= staker_pk - ; supercharge_coinbase= false } + ; has_ancestor_in_same_checkpoint_window = true + ; block_stake_winner = staker_pk + ; block_creator = staker_pk + ; coinbase_receiver = staker_pk + ; supercharge_coinbase = false + } (** This generator generates pairs of spot blocks that share common checkpoints. * The overlap of the checkpoints and the root epoch positions of the blocks @@ -3988,12 +4066,12 @@ let%test_module "Proof of stake tests" = let%bind hashes = gen_unique_list 2 ~gen:State_hash.gen ~equal:State_hash.equal in - let[@warning "-8"] [hash_a; hash_b] = hashes in + let[@warning "-8"] [ hash_a; hash_b ] = hashes in (* Generate common checkpoints. *) let%bind checkpoints = gen_unique_list 2 ~gen:State_hash.gen ~equal:State_hash.equal in - let[@warning "-8"] [start_checkpoint; lock_checkpoint] = checkpoints in + let[@warning "-8"] [ start_checkpoint; lock_checkpoint ] = checkpoints in let%bind a, a_curr_epoch_length = (* If we are constraining the second state to have a greater blockchain length than the * first, we need to constrain the first blockchain length such that there is some room @@ -4037,7 +4115,8 @@ let%test_module "Proof of stake tests" = sum_lengths [ root_epoch_length ; a.staking_epoch_data.epoch_length - ; a.next_epoch_data.epoch_length ] + ; a.next_epoch_data.epoch_length + ] in Option.value_exn (Length.sub a.blockchain_length length_till_curr_epoch) @@ -4111,11 +4190,11 @@ let%test_module "Proof of stake tests" = in gen_spot ~slot_fill_rate ~slot_fill_rate_delta ~root_epoch_position ?staking_start_checkpoint ?staking_lock_checkpoint - ?next_start_checkpoint ?next_lock_checkpoint - ?gen_staking_epoch_length ?gen_next_epoch_length - ?gen_curr_epoch_position ?gen_vrf_output () + ?next_start_checkpoint ?next_lock_checkpoint ?gen_staking_epoch_length + ?gen_next_epoch_length ?gen_curr_epoch_position ?gen_vrf_output () in - (With_hash.{data= a; hash= hash_a}, With_hash.{data= b; hash= hash_b}) + ( With_hash.{ data = a; hash = hash_a } + , With_hash.{ data = b; hash = hash_b } ) let gen_spot_pair_short_aligned ?blockchain_length_relativity ?vrf_output_relativity () = @@ -4155,7 +4234,7 @@ let%test_module "Proof of stake tests" = let%bind hashes = gen_unique_list 2 ~gen:State_hash.gen ~equal:State_hash.equal in - let[@warning "-8"] [hash_a; hash_b] = hashes in + let[@warning "-8"] [ hash_a; hash_b ] = hashes in let%bind checkpoints = gen_unique_list 8 ~gen:State_hash.gen ~equal:State_hash.equal in @@ -4166,7 +4245,8 @@ let%test_module "Proof of stake tests" = ; b_staking_start_checkpoint ; b_staking_lock_checkpoint ; b_next_start_checkpoint - ; b_next_lock_checkpoint ] = + ; b_next_lock_checkpoint + ] = checkpoints in let%bind a = @@ -4181,14 +4261,15 @@ let%test_module "Proof of stake tests" = ~next_start_checkpoint:b_next_start_checkpoint ~next_lock_checkpoint:b_next_lock_checkpoint () in - (With_hash.{data= a; hash= hash_a}, With_hash.{data= b; hash= hash_b}) + ( With_hash.{ data = a; hash = hash_a } + , With_hash.{ data = b; hash = hash_b } ) let gen_spot_pair = let open Quickcheck.Generator.Let_syntax in let%bind a, b = match%bind Quickcheck.Generator.of_list - [`Short_aligned; `Short_misaligned; `Long] + [ `Short_aligned; `Short_misaligned; `Long ] with | `Short_aligned -> gen_spot_pair_short_aligned () @@ -4221,15 +4302,15 @@ let%test_module "Proof of stake tests" = raise (Failure message) let assert_consensus_state_pair = - assert_consensus_state_set ~project:(fun (a, b) -> [a; b]) + assert_consensus_state_set ~project:(fun (a, b) -> [ a; b ]) let assert_hashed_consensus_state_pair = assert_consensus_state_set ~project:(fun (a, b) -> - [With_hash.data a; With_hash.data b] ) + [ With_hash.data a; With_hash.data b ]) let assert_hashed_consensus_state_triple = assert_consensus_state_set ~project:(fun (a, b, c) -> - [With_hash.data a; With_hash.data b; With_hash.data c] ) + [ With_hash.data a; With_hash.data b; With_hash.data c ]) let is_selected ?(log = false) (a, b) = let logger = if log then Logger.create () else Logger.null () in @@ -4257,7 +4338,7 @@ let%test_module "Proof of stake tests" = Quickcheck.test (gen_spot ()) ~f:(fun state -> assert_consensus_state_pair (state, state) ~assertion:"within long range" ~f:(fun (a, b) -> - Hooks.is_short_range a b ~constants ) ) + Hooks.is_short_range a b ~constants)) let%test_unit "generator sanity check: gen_spot_pair_short_aligned always \ generates pairs of states in short fork range" = @@ -4266,7 +4347,7 @@ let%test_module "Proof of stake tests" = assert_consensus_state_pair (With_hash.data a, With_hash.data b) ~assertion:"within short range" - ~f:(fun (a, b) -> Hooks.is_short_range a b ~constants) ) + ~f:(fun (a, b) -> Hooks.is_short_range a b ~constants)) let%test_unit "generator sanity check: gen_spot_pair_short_misaligned \ always generates pairs of states in short fork range" = @@ -4275,23 +4356,23 @@ let%test_module "Proof of stake tests" = assert_consensus_state_pair (With_hash.data a, With_hash.data b) ~assertion:"within short range" - ~f:(fun (a, b) -> Hooks.is_short_range a b ~constants) ) + ~f:(fun (a, b) -> Hooks.is_short_range a b ~constants)) - let%test_unit "generator sanity check: gen_spot_pair_long always \ - generates pairs of states in long fork range" = + let%test_unit "generator sanity check: gen_spot_pair_long always generates \ + pairs of states in long fork range" = let constants = Lazy.force Constants.for_unit_tests in Quickcheck.test gen_spot_pair_long ~f:(fun (a, b) -> assert_consensus_state_pair (With_hash.data a, With_hash.data b) ~assertion:"within long range" - ~f:(fun (a, b) -> not (Hooks.is_short_range ~constants a b)) ) + ~f:(fun (a, b) -> not (Hooks.is_short_range ~constants a b))) let%test_unit "selection case: equal states" = Quickcheck.test (Quickcheck.Generator.tuple2 State_hash.gen (gen_spot ())) ~f:(fun (hash, state) -> - let hashed_state = {With_hash.data= state; hash} in - assert_not_selected (hashed_state, hashed_state) ) + let hashed_state = { With_hash.data = state; hash } in + assert_not_selected (hashed_state, hashed_state)) let%test_unit "selection case: aligned checkpoints & different lengths" = Quickcheck.test @@ -4306,21 +4387,20 @@ let%test_module "Proof of stake tests" = ~vrf_output_relativity:`Ascending ()) ~f:assert_selected - let%test_unit "selection case: aligned checkpoints & equal lengths & \ - equal vrfs & different hashes" = + let%test_unit "selection case: aligned checkpoints & equal lengths & equal \ + vrfs & different hashes" = Quickcheck.test (gen_spot_pair_short_aligned ~blockchain_length_relativity:`Equal ~vrf_output_relativity:`Equal ()) ~f:(fun (a, b) -> if State_hash.(With_hash.hash b > With_hash.hash a) then assert_selected (a, b) - else assert_selected (b, a) ) + else assert_selected (b, a)) - let%test_unit "selection case: misaligned checkpoints & different lengths" - = + let%test_unit "selection case: misaligned checkpoints & different lengths" = Quickcheck.test - (gen_spot_pair_short_misaligned - ~blockchain_length_relativity:`Ascending ()) + (gen_spot_pair_short_misaligned ~blockchain_length_relativity:`Ascending + ()) ~f:assert_selected (* TODO: This test always succeeds, but this could be a false positive as the blockchain length equality constraint @@ -4334,28 +4414,28 @@ let%test_module "Proof of stake tests" = ~f:assert_selected (* TODO: This test fails because the blockchain length equality constraint is broken for misaligned short forks. - let%test_unit "selection case: misaligned checkpoints & equal lengths & equal vrfs & different hashes" = - Quickcheck.test - (gen_spot_pair_short_misaligned ~blockchain_length_relativity:`Equal ~vrf_output_relativity:`Equal ()) - ~f:(fun (a, b) -> - if State_hash.compare (With_hash.hash a) (With_hash.hash b) > 0 then - assert_selected (a, b) - else - assert_selected (b, a)) + let%test_unit "selection case: misaligned checkpoints & equal lengths & equal vrfs & different hashes" = + Quickcheck.test + (gen_spot_pair_short_misaligned ~blockchain_length_relativity:`Equal ~vrf_output_relativity:`Equal ()) + ~f:(fun (a, b) -> + if State_hash.compare (With_hash.hash a) (With_hash.hash b) > 0 then + assert_selected (a, b) + else + assert_selected (b, a)) *) (* TODO: expand long fork generation to support relativity constraints - let%test_unit "selection case: distinct checkpoints & different min window densities" = - failwith "TODO" + let%test_unit "selection case: distinct checkpoints & different min window densities" = + failwith "TODO" - let%test_unit "selection case: distinct checkpoints & equal min window densities & different lengths" = - failwith "TODO" + let%test_unit "selection case: distinct checkpoints & equal min window densities & different lengths" = + failwith "TODO" - let%test_unit "selection case: distinct checkpoints & equal min window densities & equal lengths & different vrfs" = - failwith "TODO" + let%test_unit "selection case: distinct checkpoints & equal min window densities & equal lengths & different vrfs" = + failwith "TODO" - let%test_unit "selection case: distinct checkpoints & equal min window densities & equal lengths & qequals vrfs & different hashes" = - failwith "TODO" + let%test_unit "selection case: distinct checkpoints & equal min window densities & equal lengths & qequals vrfs & different hashes" = + failwith "TODO" *) let%test_unit "selection invariant: candidate selections are not \ @@ -4373,7 +4453,7 @@ let%test_module "Proof of stake tests" = not ([%equal: Hooks.select_status * Hooks.select_status] (select a b, select b a) - (`Take, `Take)) )) + (`Take, `Take)))) (* We define a homogeneous binary relation for consensus states by adapting the binary chain * selection rule and extending it to consider equality of chains. From this, we can test diff --git a/src/lib/consensus/proof_of_stake.mli b/src/lib/consensus/proof_of_stake.mli index c8f4cb40cdc..dc9658016e1 100644 --- a/src/lib/consensus/proof_of_stake.mli +++ b/src/lib/consensus/proof_of_stake.mli @@ -15,8 +15,8 @@ module Exported : sig module Consensus_state : sig include module type of Data.Consensus_state - with type Value.Stable.V1.t = Data.Consensus_state.Value.Stable.V1.t - and type var = Data.Consensus_state.var + with type Value.Stable.V1.t = Data.Consensus_state.Value.Stable.V1.t + and type var = Data.Consensus_state.var val global_slot : Value.t -> Global_slot.t diff --git a/src/lib/consensus/proof_of_stake_fuzzer.ml b/src/lib/consensus/proof_of_stake_fuzzer.ml index 01d78fc010a..3b9b0167ad2 100644 --- a/src/lib/consensus/proof_of_stake_fuzzer.ml +++ b/src/lib/consensus/proof_of_stake_fuzzer.ml @@ -1,5 +1,4 @@ -[%%import -"../../config.mlh"] +[%%import "../../config.mlh"] open Core_kernel open Async @@ -23,16 +22,18 @@ let rec fold_until_none ~init ~f = (* TODO: optimize epoch ledgers? (many duplcate copies right now *) module Staker = struct - type t = {keypair: Keypair.And_compressed_pk.t; local_state: Local_state.t} + type t = + { keypair : Keypair.And_compressed_pk.t; local_state : Local_state.t } end module Vrf_distribution = struct open Staker type t = - { start_slot: Global_slot.t - ; term_slot: Global_slot.t - ; proposal_table: Block_data.t Public_key.Compressed.Map.t Int.Table.t } + { start_slot : Global_slot.t + ; term_slot : Global_slot.t + ; proposal_table : Block_data.t Public_key.Compressed.Map.t Int.Table.t + } (** Creates an empty vrf distribution for [~for_epoch]. Note that here, * the ~for_epoch refers to the epoch after the epoch where the vrf @@ -75,9 +76,9 @@ module Vrf_distribution = struct let slot = UInt32.to_int @@ Block_data.global_slot proposal_data in Hashtbl.update proposal_table slot ~f:(function | None -> - Public_key.Compressed.Map.of_alist_exn [(pk, proposal_data)] + Public_key.Compressed.Map.of_alist_exn [ (pk, proposal_data) ] | Some map -> - Map.add_exn map ~key:pk ~data:proposal_data ) + Map.add_exn map ~key:pk ~data:proposal_data) in List.iter stakers ~f:(fun staker -> ignore @@ -93,7 +94,8 @@ module Vrf_distribution = struct |> Block_time.Span.to_ms ) dummy_consensus_state ~local_state:staker.local_state ~keypairs: - (Keypair.And_compressed_pk.Set.of_list [staker.keypair]) + (Keypair.And_compressed_pk.Set.of_list + [ staker.keypair ]) ~logger:(Logger.null ()) with | `Check_again _ -> @@ -125,8 +127,8 @@ module Vrf_distribution = struct Consensus_state.Unsafe.dummy_advance dummy_consensus_state ~increase_epoch_count ~new_global_slot in - (next_dummy_consensus_state, proposal_time) ) ) ; - {start_slot; term_slot; proposal_table} + (next_dummy_consensus_state, proposal_time))) ; + { start_slot; term_slot; proposal_table } (** Picks a single chain of proposals from a distribution. Does not attempt * to simulate any regular properties of how a real chain would be built. *) @@ -222,7 +224,7 @@ let fuzz_vrf_round ~stakers ~base_chains = let run () = (* ... *) fuzz_vrf_round ~stakers ~base_chains:[genesis_chain] -*) + *) (* TODO: Should these be runtime configurable? *) @@ -245,13 +247,15 @@ let create_genesis_data () = Account.public_key (snd (List.hd_exn (Lazy.force Genesis_ledger.accounts))) in let empty_diff = - { Staged_ledger_diff.diff= - ( { completed_works= [] - ; user_commands= [] - ; coinbase= Staged_ledger_diff.At_most_two.Zero } + { Staged_ledger_diff.diff = + ( { completed_works = [] + ; user_commands = [] + ; coinbase = Staged_ledger_diff.At_most_two.Zero + } , None ) - ; creator= genesis_dummy_pk - ; coinbase_receiver= genesis_dummy_pk } + ; creator = genesis_dummy_pk + ; coinbase_receiver = genesis_dummy_pk + } in let genesis_transition = External_transition.create @@ -274,8 +278,7 @@ let create_genesis_data () = in (genesis_transition, Or_error.ok_exn genesis_staged_ledger_res) -[%%if -proof_level = "full"] +[%%if proof_level = "full"] let prove_blockchain ~logger (module Keys : Keys_lib.Keys.S) (chain : Blockchain.t) (next_state : Protocol_state.Value.t) @@ -284,16 +287,19 @@ let prove_blockchain ~logger (module Keys : Keys_lib.Keys.S) let module Wrap = Keys.Wrap in Tock.prove (Tock.Keypair.pk Wrap.keys) - Wrap.input {Wrap.Prover_state.proof} Wrap.main + Wrap.input + { Wrap.Prover_state.proof } + Wrap.main (Wrap_input.of_tick_field hash) in let next_state_top_hash = Keys.Step.instance_hash next_state in let prover_state = - { Keys.Step.Prover_state.prev_proof= chain.proof - ; wrap_vk= Tock.Keypair.vk Keys.Wrap.keys - ; prev_state= chain.state - ; expected_next_state= Some next_state - ; update= block } + { Keys.Step.Prover_state.prev_proof = chain.proof + ; wrap_vk = Tock.Keypair.vk Keys.Wrap.keys + ; prev_state = chain.state + ; expected_next_state = Some next_state + ; update = block + } in let main x = Tick.handle (Keys.Step.main ~logger x) @@ -306,29 +312,30 @@ let prove_blockchain ~logger (module Keys : Keys_lib.Keys.S) (Tick.Keypair.pk Keys.Step.keys) (Keys.Step.input ()) prover_state main next_state_top_hash in - { Blockchain.state= next_state - ; proof= wrap next_state_top_hash prev_proof } ) + { Blockchain.state = next_state + ; proof = wrap next_state_top_hash prev_proof + }) in Or_error.iter_error res ~f:(fun e -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] - "Prover threw an error while extending block: $error" ) ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] + "Prover threw an error while extending block: $error") ; res -[%%elif -proof_level = "check"] +[%%elif proof_level = "check"] let prove_blockchain ~logger (module Keys : Keys_lib.Keys.S) (chain : Blockchain.t) (next_state : Protocol_state.Value.t) (block : Snark_transition.value) state_for_handler pending_coinbase = let next_state_top_hash = Keys.Step.instance_hash next_state in let prover_state = - { Keys.Step.Prover_state.prev_proof= chain.proof - ; wrap_vk= Tock.Keypair.vk Keys.Wrap.keys - ; prev_state= chain.state - ; expected_next_state= Some next_state - ; update= block - ; genesis_state_hash= With_hash.hash genesis_protocol_state } + { Keys.Step.Prover_state.prev_proof = chain.proof + ; wrap_vk = Tock.Keypair.vk Keys.Wrap.keys + ; prev_state = chain.state + ; expected_next_state = Some next_state + ; update = block + ; genesis_state_hash = With_hash.hash genesis_protocol_state + } in let main x = Tick.handle (Keys.Step.main ~logger x) @@ -340,28 +347,26 @@ let prove_blockchain ~logger (module Keys : Keys_lib.Keys.S) (main @@ Tick.Field.Var.constant next_state_top_hash) prover_state) ~f:(fun () -> - {Blockchain.state= next_state; proof= precomputed_values.genesis_proof} - ) + { Blockchain.state = next_state + ; proof = precomputed_values.genesis_proof + }) in Or_error.iter_error res ~f:(fun e -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] - "Prover threw an error while extending block: $error" ) ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] + "Prover threw an error while extending block: $error") ; res -[%%elif -proof_level = "none"] +[%%elif proof_level = "none"] let prove_blockchain ~logger:_ _ _ _ _ _ _ = failwith "cannot run fuzzer with proof_level = \"none\"" [%%else] -[%%show -proof_level] +[%%show proof_level] -[%%error -"invalid proof_level"] +[%%error "invalid proof_level"] [%%endif] @@ -401,8 +406,7 @@ let propose_block_onto_chain ~logger ~keys let%map ( `Hash_after_applying next_staged_ledger_hash , `Ledger_proof ledger_proof_opt , `Staged_ledger staged_ledger - , `Pending_coinbase_update (is_new_stack, pending_coinbase_update) ) - = + , `Pending_coinbase_update (is_new_stack, pending_coinbase_update) ) = let%map res = Staged_ledger.apply_diff_unchecked previous_staged_ledger ~logger staged_ledger_diff ~state_body_hash:previous_protocol_state_body_hash @@ -414,7 +418,7 @@ let propose_block_onto_chain ~logger ~keys let next_ledger_hash = Option.value_map ledger_proof_opt ~f:(fun (proof, _) -> - Ledger_proof.statement proof |> Ledger_proof.statement_target ) + Ledger_proof.statement proof |> Ledger_proof.statement_target) ~default:previous_ledger_hash in let blockchain_state = @@ -443,10 +447,10 @@ let propose_block_onto_chain ~logger ~keys Snark_transition.create_value ?sok_digest: (Option.map ledger_proof_opt ~f:(fun (proof, _) -> - Ledger_proof.sok_digest proof )) + Ledger_proof.sok_digest proof)) ?ledger_proof: (Option.map ledger_proof_opt ~f:(fun (proof, _) -> - Ledger_proof.underlying_proof proof )) + Ledger_proof.underlying_proof proof)) ~supply_increase: (Option.value_map ~default:Currency.Amount.zero ~f:(fun (proof, _) -> (Ledger_proof.statement proof).supply_increase) @@ -460,11 +464,12 @@ let propose_block_onto_chain ~logger ~keys ~staged_ledger_diff:(Staged_ledger_diff.forget staged_ledger_diff) in let pending_coinbase_witness = - { Pending_coinbase_witness.pending_coinbases= + { Pending_coinbase_witness.pending_coinbases = Staged_ledger.pending_coinbase_collection previous_staged_ledger - ; is_new_stack } + ; is_new_stack + } in - let {Blockchain.proof= protocol_state_proof; _} = + let { Blockchain.proof = protocol_state_proof; _ } = prove_blockchain ~logger keys (Blockchain.create ~proof:previous_protocol_state_proof ~state:previous_protocol_state) @@ -507,10 +512,10 @@ let main () = let keypair = (raw_keypair, compressed_pk) in let local_state = Local_state.create - (Public_key.Compressed.Set.of_list [compressed_pk]) + (Public_key.Compressed.Set.of_list [ compressed_pk ]) ~genesis_ledger:Genesis_ledger.t in - Staker.{keypair; local_state} ) + Staker.{ keypair; local_state }) in let rec loop epoch (base_transition, base_staged_ledger) = let dist = @@ -541,7 +546,7 @@ let main () = slot (of_slot_number ~constants:consensus_constants (Block_data.global_slot block_data))) ) ; - propose_block_onto_chain ~logger ~keys previous_chain proposal ) + propose_block_onto_chain ~logger ~keys previous_chain proposal) in loop (UInt32.succ epoch) final_chain in diff --git a/src/lib/consensus/slot.ml b/src/lib/consensus/slot.ml index 2d51e8ff6c6..f82b55aa70d 100644 --- a/src/lib/consensus/slot.ml +++ b/src/lib/consensus/slot.ml @@ -30,12 +30,12 @@ module Checked = struct in Tick.Run.Boolean.Assert.is_true (Integer.equal ~m r (Integer.constant ~m Bignum_bigint.zero)) ; - q ) + q) in let two = Integer.constant ~m (Bignum_bigint.of_int 2) in let%bind ck_times_2 = integer_mul third_epoch two in make_checked (fun () -> - Integer.lt ~m (T.Checked.to_integer slot) ck_times_2 ) + Integer.lt ~m (T.Checked.to_integer slot) ck_times_2) end let gen (constants : Constants.t) = @@ -58,6 +58,7 @@ let%test_unit "in_seed_update_range unchecked vs. checked equality" = UInt32.div constants.slots_per_epoch (UInt32.of_int 3) |> UInt32.to_int in let examples = - List.map ~f:UInt32.of_int [x; x - 1; x + 1; x * 2; (x * 2) - 1; (x * 2) + 1] + List.map ~f:UInt32.of_int + [ x; x - 1; x + 1; x * 2; (x * 2) - 1; (x * 2) + 1 ] in Quickcheck.test ~trials:100 ~examples (gen constants) ~f:test diff --git a/src/lib/consensus/stake_proof.ml b/src/lib/consensus/stake_proof.ml index 1023cd3fc5e..e7830d43cd9 100644 --- a/src/lib/consensus/stake_proof.ml +++ b/src/lib/consensus/stake_proof.ml @@ -8,12 +8,13 @@ module Stable = struct module V1 = struct type t = - { delegator: Account.Index.Stable.V1.t - ; delegator_pk: Public_key.Compressed.Stable.V1.t - ; coinbase_receiver_pk: Public_key.Compressed.Stable.V1.t - ; ledger: Sparse_ledger.Stable.V1.t - ; producer_private_key: Private_key.Stable.V1.t - ; producer_public_key: Public_key.Stable.V1.t } + { delegator : Account.Index.Stable.V1.t + ; delegator_pk : Public_key.Compressed.Stable.V1.t + ; coinbase_receiver_pk : Public_key.Compressed.Stable.V1.t + ; ledger : Sparse_ledger.Stable.V1.t + ; producer_private_key : Private_key.Stable.V1.t + ; producer_public_key : Public_key.Stable.V1.t + } let to_latest = Fn.id end @@ -25,10 +26,11 @@ end] is for. *) type t = Stable.Latest.t = - { delegator: Account.Index.t - ; delegator_pk: Public_key.Compressed.t - ; coinbase_receiver_pk: Public_key.Compressed.t - ; ledger: Sparse_ledger.t - ; producer_private_key: Private_key.t - ; producer_public_key: Public_key.t } + { delegator : Account.Index.t + ; delegator_pk : Public_key.Compressed.t + ; coinbase_receiver_pk : Public_key.Compressed.t + ; ledger : Sparse_ledger.t + ; producer_private_key : Private_key.t + ; producer_public_key : Public_key.t + } [@@deriving to_yojson, sexp] diff --git a/src/lib/consensus/vrf/consensus_vrf.ml b/src/lib/consensus/vrf/consensus_vrf.ml index c4fb5812a90..462f82adaf0 100644 --- a/src/lib/consensus/vrf/consensus_vrf.ml +++ b/src/lib/consensus/vrf/consensus_vrf.ml @@ -38,10 +38,10 @@ module Group = struct let to_string_list_exn (t : t) = let x, y = Inner_curve.to_affine_exn t in - [Field.to_string x; Field.to_string y] + [ Field.to_string x; Field.to_string y ] let of_string_list_exn = function - | [x; y] -> + | [ x; y ] -> Inner_curve.of_affine (Field.of_string x, Field.of_string y) | _ -> invalid_arg @@ -78,7 +78,7 @@ module Message = struct module Global_slot = Mina_numbers.Global_slot type ('global_slot, 'epoch_seed, 'delegator) t = - {global_slot: 'global_slot; seed: 'epoch_seed; delegator: 'delegator} + { global_slot : 'global_slot; seed : 'epoch_seed; delegator : 'delegator } [@@deriving sexp, hlist] type value = @@ -93,12 +93,14 @@ module Message = struct let to_input ~(constraint_constants : Genesis_constants.Constraint_constants.t) - ({global_slot; seed; delegator} : value) = - { Random_oracle.Input.field_elements= [|(seed :> Tick.field)|] - ; bitstrings= + ({ global_slot; seed; delegator } : value) = + { Random_oracle.Input.field_elements = [| (seed :> Tick.field) |] + ; bitstrings = [| Global_slot.Bits.to_bits global_slot ; Mina_base.Account.Index.to_bits - ~ledger_depth:constraint_constants.ledger_depth delegator |] } + ~ledger_depth:constraint_constants.ledger_depth delegator + |] + } let data_spec ~(constraint_constants : Genesis_constants.Constraint_constants.t) = @@ -106,7 +108,8 @@ module Message = struct [ Global_slot.typ ; Mina_base.Epoch_seed.typ ; Mina_base.Account.Index.Unpacked.typ - ~ledger_depth:constraint_constants.ledger_depth ] + ~ledger_depth:constraint_constants.ledger_depth + ] let typ ~constraint_constants : (var, value) Tick.Typ.t = Tick.Typ.of_hlistable @@ -122,20 +125,21 @@ module Message = struct module Checked = struct open Tick - let to_input ({global_slot; seed; delegator} : var) = + let to_input ({ global_slot; seed; delegator } : var) = let open Tick.Checked.Let_syntax in let%map global_slot = Global_slot.Checked.to_bits global_slot in let s = Bitstring_lib.Bitstring.Lsb_first.to_list in - { Random_oracle.Input.field_elements= - [|Mina_base.Epoch_seed.var_to_hash_packed seed|] - ; bitstrings= [|s global_slot; delegator|] } + { Random_oracle.Input.field_elements = + [| Mina_base.Epoch_seed.var_to_hash_packed seed |] + ; bitstrings = [| s global_slot; delegator |] + } let hash_to_group msg = let%bind input = to_input msg in Tick.make_checked (fun () -> Random_oracle.Checked.hash ~init:Mina_base.Hash_prefix.vrf_message (Random_oracle.Checked.pack_input input) - |> Group_map.Checked.to_group ) + |> Group_map.Checked.to_group) end let gen ~(constraint_constants : Genesis_constants.Constraint_constants.t) = @@ -146,7 +150,7 @@ module Message = struct Mina_base.Account.Index.gen ~ledger_depth:constraint_constants.ledger_depth in - {global_slot; seed; delegator} + { global_slot; seed; delegator } end (* c is a constant factor on vrf-win likelihood *) @@ -176,7 +180,7 @@ module Output = struct sprintf "Error decoding vrf output in \ Vrf.Output.Truncated.Stable.V1.of_yojson: %s" - err ) + err) | _ -> Error "Vrf.Output.Truncated.Stable.V1.of_yojson: Expected a string" @@ -203,7 +207,7 @@ module Output = struct Typ.array ~length:length_in_bits Boolean.typ |> Typ.transport ~there:(fun s -> - Array.sub (Blake2.string_to_bits s) ~pos:0 ~len:length_in_bits ) + Array.sub (Blake2.string_to_bits s) ~pos:0 ~len:length_in_bits) ~back:Blake2.bits_to_string let dummy = @@ -240,7 +244,7 @@ module Output = struct Random_oracle.Input.( append (Message.to_input ~constraint_constants msg) - (field_elements [|x; y|])) + (field_elements [| x; y |])) in let open Random_oracle in hash ~init:Hash_prefix_states.vrf_output (pack_input input) @@ -250,14 +254,16 @@ module Output = struct Tick.make_checked (fun () -> Random_oracle.Checked.Digest.to_bits ~length:Truncated.length_in_bits x - |> Array.of_list ) + |> Array.of_list) let hash msg (x, y) = let%bind msg = Message.Checked.to_input msg in - let input = Random_oracle.Input.(append msg (field_elements [|x; y|])) in + let input = + Random_oracle.Input.(append msg (field_elements [| x; y |])) + in make_checked (fun () -> let open Random_oracle.Checked in - hash ~init:Hash_prefix_states.vrf_output (pack_input input) ) + hash ~init:Hash_prefix_states.vrf_output (pack_input input)) end let%test_unit "hash unchecked vs. checked equality" = @@ -300,8 +306,8 @@ module Threshold = struct let bigint_of_uint64 = Fn.compose Bigint.of_string UInt64.to_string - (* Check if - vrf_output / 2^256 <= c * (1 - (1 - f)^(amount / total_stake)) + (* Check if + vrf_output / 2^256 <= c * (1 - (1 - f)^(amount / total_stake)) *) let is_satisfied ~my_stake ~total_stake vrf_output = let open Currency in @@ -311,7 +317,7 @@ module Threshold = struct This is equal to floor(2^params.per_term_precision * top / bottom) / 2^params.per_term_precision - *) + *) let k = params.per_term_precision in let top = bigint_of_uint64 (Balance.to_uint64 my_stake) in let bottom = bigint_of_uint64 (Amount.to_uint64 total_stake) in @@ -345,7 +351,7 @@ module Threshold = struct Floating_point.( le ~m (of_bits ~m lhs ~precision:Output.Truncated.length_in_bits) - rhs) ) + rhs)) end end @@ -354,16 +360,18 @@ module Evaluation_hash = struct let input = let open Random_oracle_input in let g_to_input g = - { field_elements= + { field_elements = (let f1, f2 = Group.to_affine_exn g in - [|f1; f2|]) - ; bitstrings= [||] } + [| f1; f2 |]) + ; bitstrings = [||] + } in Array.reduce_exn ~f:Random_oracle_input.append [| Message.to_input ~constraint_constants message ; g_to_input public_key ; g_to_input g1 - ; g_to_input g2 |] + ; g_to_input g2 + |] in let tick_output = Random_oracle.hash ~init:Mina_base.Hash_prefix.vrf_evaluation @@ -378,17 +386,21 @@ module Evaluation_hash = struct let%bind input = let open Random_oracle_input in let g_to_input (f1, f2) = - {field_elements= [|f1; f2|]; bitstrings= [||]} + { field_elements = [| f1; f2 |]; bitstrings = [||] } in let%map message_input = Message.Checked.to_input message in Array.reduce_exn ~f:Random_oracle_input.append - [|message_input; g_to_input public_key; g_to_input g1; g_to_input g2|] + [| message_input + ; g_to_input public_key + ; g_to_input g1 + ; g_to_input g2 + |] in let%bind tick_output = Tick.make_checked (fun () -> Random_oracle.Checked.hash ~init:Mina_base.Hash_prefix.vrf_evaluation - (Random_oracle.Checked.pack_input input) ) + (Random_oracle.Checked.pack_input input)) in (* This isn't great cryptographic practice.. *) Tick.Field.Checked.unpack_full tick_output @@ -457,26 +469,30 @@ module Layout = struct *) module Message = struct type t = - { global_slot: Mina_numbers.Global_slot.t [@key "globalSlot"] - ; epoch_seed: Mina_base.Epoch_seed.t [@key "epochSeed"] - ; delegator_index: int [@key "delegatorIndex"] } + { global_slot : Mina_numbers.Global_slot.t [@key "globalSlot"] + ; epoch_seed : Mina_base.Epoch_seed.t [@key "epochSeed"] + ; delegator_index : int [@key "delegatorIndex"] + } [@@deriving yojson] let to_message (t : t) : Message.value = - { global_slot= t.global_slot - ; seed= t.epoch_seed - ; delegator= t.delegator_index } + { global_slot = t.global_slot + ; seed = t.epoch_seed + ; delegator = t.delegator_index + } let of_message (t : Message.value) : t = - { global_slot= t.global_slot - ; epoch_seed= t.seed - ; delegator_index= t.delegator } + { global_slot = t.global_slot + ; epoch_seed = t.seed + ; delegator_index = t.delegator + } end module Threshold = struct type t = - { delegated_stake: Currency.Balance.t [@key "delegatedStake"] - ; total_stake: Currency.Amount.t [@key "totalStake"] } + { delegated_stake : Currency.Balance.t [@key "delegatedStake"] + ; total_stake : Currency.Amount.t [@key "totalStake"] + } [@@deriving yojson] let is_satisfied vrf_output t = @@ -486,36 +502,40 @@ module Layout = struct module Evaluation = struct type t = - { message: Message.t - ; public_key: Signature_lib.Public_key.t [@key "publicKey"] - ; c: Scalar.t - ; s: Scalar.t - ; scaled_message_hash: Group.t [@key "ScaledMessageHash"] - ; vrf_threshold: Threshold.t option [@default None] [@key "vrfThreshold"] - ; vrf_output: Output.Truncated.t option + { message : Message.t + ; public_key : Signature_lib.Public_key.t [@key "publicKey"] + ; c : Scalar.t + ; s : Scalar.t + ; scaled_message_hash : Group.t [@key "ScaledMessageHash"] + ; vrf_threshold : Threshold.t option [@default None] [@key "vrfThreshold"] + ; vrf_output : Output.Truncated.t option [@default None] [@key "vrfOutput"] - ; vrf_output_fractional: float option + ; vrf_output_fractional : float option [@default None] [@key "vrfOutputFractional"] - ; threshold_met: bool option [@default None] [@key "thresholdMet"] } + ; threshold_met : bool option [@default None] [@key "thresholdMet"] + } [@@deriving yojson] let to_evaluation_and_context (t : t) : evaluation * context = - ( { discrete_log_equality= {c= t.c; s= t.s} - ; scaled_message_hash= t.scaled_message_hash } - , { message= Message.to_message t.message - ; public_key= Group.of_affine t.public_key } ) - - let of_evaluation_and_context - ((evaluation, context) : evaluation * context) : t = - { message= Message.of_message context.message - ; public_key= Group.to_affine_exn context.public_key - ; c= evaluation.discrete_log_equality.c - ; s= evaluation.discrete_log_equality.s - ; scaled_message_hash= evaluation.scaled_message_hash - ; vrf_threshold= None - ; vrf_output= None - ; vrf_output_fractional= None - ; threshold_met= None } + ( { discrete_log_equality = { c = t.c; s = t.s } + ; scaled_message_hash = t.scaled_message_hash + } + , { message = Message.to_message t.message + ; public_key = Group.of_affine t.public_key + } ) + + let of_evaluation_and_context ((evaluation, context) : evaluation * context) + : t = + { message = Message.of_message context.message + ; public_key = Group.to_affine_exn context.public_key + ; c = evaluation.discrete_log_equality.c + ; s = evaluation.discrete_log_equality.s + ; scaled_message_hash = evaluation.scaled_message_hash + ; vrf_threshold = None + ; vrf_output = None + ; vrf_output_fractional = None + ; threshold_met = None + } let of_message_and_sk ~constraint_constants (message : Message.t) (private_key : Signature_lib.Private_key.t) = @@ -526,9 +546,10 @@ module Layout = struct let standalone_eval = Standalone.Evaluation.create private_key message in let context : Standalone.Context.t = { message - ; public_key= + ; public_key = Signature_lib.Public_key.of_private_key_exn private_key - |> Group.of_affine } + |> Group.of_affine + } in of_evaluation_and_context (standalone_eval, context) @@ -544,9 +565,10 @@ module Layout = struct match to_vrf ~constraint_constants t with | None -> { t with - vrf_output= None - ; vrf_output_fractional= None - ; threshold_met= None } + vrf_output = None + ; vrf_output_fractional = None + ; threshold_met = None + } | Some vrf -> let vrf_output = Output.truncate vrf in let vrf_output_fractional = @@ -555,7 +577,7 @@ module Layout = struct let vrf_threshold = match (delegated_stake, total_stake) with | Some delegated_stake, Some total_stake -> - Some {Threshold.delegated_stake; total_stake} + Some { Threshold.delegated_stake; total_stake } | _ -> t.vrf_threshold in @@ -564,9 +586,10 @@ module Layout = struct in { t with vrf_threshold - ; vrf_output= Some vrf_output - ; vrf_output_fractional= Some vrf_output_fractional - ; threshold_met } + ; vrf_output = Some vrf_output + ; vrf_output_fractional = Some vrf_output_fractional + ; threshold_met + } end end @@ -589,12 +612,12 @@ let%test_unit "Standalone and integrates vrfs are consistent" = let standalone_eval = Standalone.Evaluation.create private_key message in let context : Standalone.Context.t = { message - ; public_key= + ; public_key = Signature_lib.Public_key.of_private_key_exn private_key - |> Group.of_affine } + |> Group.of_affine + } in let standalone_vrf = Standalone.Evaluation.verified_output standalone_eval context in - [%test_eq: Output_hash.value option] (Some integrated_vrf) standalone_vrf - ) + [%test_eq: Output_hash.value option] (Some integrated_vrf) standalone_vrf) diff --git a/src/lib/crs/crs.ml b/src/lib/crs/crs.ml index 22e8c38ce9a..bda54f613fd 100644 --- a/src/lib/crs/crs.ml +++ b/src/lib/crs/crs.ml @@ -5,22 +5,25 @@ let ith_bit s i = (Char.code s.[i / 8] lsr (i mod 8)) land 1 = 1 let digest_length_in_bits = 256 module State = struct - type t = {digest: string; i: int; j: int} + type t = { digest : string; i : int; j : int } - let update ~seed ({digest; i; j} as state) = + let update ~seed ({ digest; i; j } as state) = if j = digest_length_in_bits then let digest = Digestif.SHA256.( digest_string (seed ^ string_of_int i) |> to_raw_string) in let b = ith_bit digest 0 in - (b, {digest; i= i + 1; j= 1}) + (b, { digest; i = i + 1; j = 1 }) else let b = ith_bit digest j in - (b, {state with j= j + 1}) + (b, { state with j = j + 1 }) let init ~seed = - {digest= Digestif.SHA256.(digest_string seed |> to_raw_string); i= 0; j= 0} + { digest = Digestif.SHA256.(digest_string seed |> to_raw_string) + ; i = 0 + ; j = 0 + } end let create ~seed : t = diff --git a/src/lib/crypto_params/crypto_params.ml b/src/lib/crypto_params/crypto_params.ml index 5ec423d6815..da4c5c91977 100644 --- a/src/lib/crypto_params/crypto_params.ml +++ b/src/lib/crypto_params/crypto_params.ml @@ -21,7 +21,7 @@ module Tick = struct let group_map_params = Group_map.Params.create (module Pickles.Backend.Tock.Field) - Pickles.Backend.Tock.Inner_curve.Params.{a; b} + Pickles.Backend.Tock.Inner_curve.Params.{ a; b } include Full.Internal_Basic module Number = Snarky_backendless.Number.Make (Full.Internal_Basic) diff --git a/src/lib/crypto_params/gen/gen.ml b/src/lib/crypto_params/gen/gen.ml index 5c5bdedac10..5456284620d 100644 --- a/src/lib/crypto_params/gen/gen.ml +++ b/src/lib/crypto_params/gen/gen.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Ppxlib open Asttypes @@ -12,7 +11,7 @@ module Group = Pickles.Backend.Tick.Inner_curve let group_map_params = Group_map.Params.create (module Pickles.Backend.Tick.Field) - Group.Params.{a; b} + Group.Params.{ a; b } let group_map_params_structure ~loc = let module T = struct diff --git a/src/lib/currency/currency.ml b/src/lib/currency/currency.ml index e047a0e6adc..d7fdccb52cc 100644 --- a/src/lib/currency/currency.ml +++ b/src/lib/currency/currency.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_bits open Bitstring_lib @@ -85,9 +83,9 @@ end = struct let of_formatted_string input = let parts = String.split ~on:'.' input in match parts with - | [whole] -> + | [ whole ] -> of_string (whole ^ String.make precision '0') - | [whole; decimal] -> + | [ whole; decimal ] -> let decimal_length = String.length decimal in if Int.(decimal_length > precision) then of_string (whole ^ String.sub decimal ~pos:0 ~len:precision) @@ -133,16 +131,14 @@ end = struct let get t i = Infix.((t lsr i) land one = one) let set v i b = - if b then Infix.(v lor (one lsl i)) - else Infix.(v land lognot (one lsl i)) + if b then Infix.(v lor (one lsl i)) else Infix.(v land lognot (one lsl i)) end module B = Bits.Vector.Make (Vector) include (B : Bits_intf.Convertible_bits with type t := t) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] include Bits.Snarkable.Small_bit_vector (Tick) (Vector) include Unpacked @@ -193,7 +189,7 @@ end = struct module Signed = struct type ('magnitude, 'sgn) typ = ('magnitude, 'sgn) Signed_poly.t = - {magnitude: 'magnitude; sgn: 'sgn} + { magnitude : 'magnitude; sgn : 'sgn } [@@deriving sexp, hash, compare, yojson, hlist] type t = (Unsigned.t, Sgn.t) Signed_poly.t @@ -201,22 +197,22 @@ end = struct type magnitude = Unsigned.t [@@deriving sexp, compare] - let create ~magnitude ~sgn = {magnitude; sgn} + let create ~magnitude ~sgn = { magnitude; sgn } - let sgn {sgn; _} = sgn + let sgn { sgn; _ } = sgn - let magnitude {magnitude; _} = magnitude + let magnitude { magnitude; _ } = magnitude let zero = create ~magnitude:zero ~sgn:Sgn.Pos let gen = Quickcheck.Generator.map2 gen Sgn.gen ~f:(fun magnitude sgn -> if Unsigned.(equal zero magnitude) then zero - else create ~magnitude ~sgn ) + else create ~magnitude ~sgn) let sgn_to_bool = function Sgn.Pos -> true | Neg -> false - let to_bits ({sgn; magnitude} : t) = sgn_to_bool sgn :: to_bits magnitude + let to_bits ({ sgn; magnitude } : t) = sgn_to_bool sgn :: to_bits magnitude let to_input t = Random_oracle.Input.bitstring (to_bits t) @@ -239,45 +235,45 @@ end = struct let negate t = if Unsigned.(equal zero t.magnitude) then zero - else {t with sgn= Sgn.negate t.sgn} + else { t with sgn = Sgn.negate t.sgn } let of_unsigned magnitude = create ~magnitude ~sgn:Sgn.Pos let ( + ) = add - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] type nonrec var = (var, Sgn.var) Signed_poly.t let typ = - Typ.of_hlistable [typ; Sgn.typ] ~var_to_hlist:typ_to_hlist + Typ.of_hlistable [ typ; Sgn.typ ] ~var_to_hlist:typ_to_hlist ~var_of_hlist:typ_of_hlist ~value_to_hlist:typ_to_hlist ~value_of_hlist:typ_of_hlist module Checked = struct type t = var - let to_bits {magnitude; sgn} = + let to_bits { magnitude; sgn } = Sgn.Checked.is_pos sgn :: (var_to_bits magnitude :> Boolean.var list) let to_input t = Random_oracle.Input.bitstring (to_bits t) - let constant {magnitude; sgn} = - {magnitude= var_of_t magnitude; sgn= Sgn.Checked.constant sgn} + let constant { magnitude; sgn } = + { magnitude = var_of_t magnitude; sgn = Sgn.Checked.constant sgn } - let of_unsigned magnitude = {magnitude; sgn= Sgn.Checked.pos} + let of_unsigned magnitude = { magnitude; sgn = Sgn.Checked.pos } - let negate {magnitude; sgn} = {magnitude; sgn= Sgn.Checked.negate sgn} + let negate { magnitude; sgn } = + { magnitude; sgn = Sgn.Checked.negate sgn } let if_ cond ~then_ ~else_ = let%map sgn = Sgn.Checked.if_ cond ~then_:then_.sgn ~else_:else_.sgn and magnitude = if_ cond ~then_:then_.magnitude ~else_:else_.magnitude in - {sgn; magnitude} + { sgn; magnitude } - let to_field_var ({magnitude; sgn} : var) = + let to_field_var ({ magnitude; sgn } : var) = Field.Checked.mul (pack_var magnitude) (sgn :> Field.Var.t) let add (x : var) (y : var) = @@ -293,7 +289,7 @@ end = struct Tick.Field.Checked.mul (sgn :> Field.Var.t) (Field.Var.add xv yv) in let%map magnitude = unpack_var res in - {magnitude; sgn} + { magnitude; sgn } let ( + ) = add @@ -332,19 +328,18 @@ end = struct let%map l = unpack_var l and r = unpack_var r in (l, r) in - ({sgn= l_sgn; magnitude= l_mag}, {sgn= r_sgn; magnitude= r_mag}) + ({ sgn = l_sgn; magnitude = l_mag }, { sgn = r_sgn; magnitude = r_mag }) let scale (f : Field.Var.t) (t : var) = let%bind x = Field.Checked.mul (pack_var t.magnitude) f in let%map x = unpack_var x in - {sgn= t.sgn; magnitude= x} + { sgn = t.sgn; magnitude = x } end [%%endif] end - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked = struct module N = Mina_numbers.Nat.Make_checked (Unsigned) (B) @@ -363,7 +358,7 @@ end = struct | true, false -> cond | false, true -> - Boolean.not cond ) + Boolean.not cond) (* Unpacking protects against underflow *) let sub (x : Unpacked.var) (y : Unpacked.var) = @@ -451,7 +446,7 @@ end = struct if Unsigned.equal i Unsigned.zero then None else let n = Unsigned.div i (Unsigned.of_int 10) in - Some (n, n) ) ) + Some (n, n))) (* TODO: When we do something to make snarks run fast for tests, increase the trials *) let qc_test_fast = Quickcheck.test ~trials:100 @@ -466,7 +461,7 @@ end = struct qc_test_fast generator ~f:(fun (lo, hi) -> expect_success (sprintf !"subtraction: lo=%{Unsigned} hi=%{Unsigned}" lo hi) - (var_of_t lo - var_of_t hi) ) + (var_of_t lo - var_of_t hi)) let%test_unit "subtraction_soundness" = let generator = @@ -478,7 +473,7 @@ end = struct qc_test_fast generator ~f:(fun (lo, hi) -> expect_failure (sprintf !"underflow: lo=%{Unsigned} hi=%{Unsigned}" lo hi) - (var_of_t lo - var_of_t hi) ) + (var_of_t lo - var_of_t hi)) let%test_unit "addition_completeness" = let generator = @@ -490,7 +485,7 @@ end = struct qc_test_fast generator ~f:(fun (x, y) -> expect_success (sprintf !"overflow: x=%{Unsigned} y=%{Unsigned}" x y) - (var_of_t x + var_of_t y) ) + (var_of_t x + var_of_t y)) let%test_unit "addition_soundness" = let generator = @@ -504,7 +499,7 @@ end = struct qc_test_fast generator ~f:(fun (x, y) -> expect_failure (sprintf !"overflow: x=%{Unsigned} y=%{Unsigned}" x y) - (var_of_t x + var_of_t y) ) + (var_of_t x + var_of_t y)) let%test_unit "formatting_roundtrip" = let generator = gen_incl Unsigned.zero Unsigned.max_int in @@ -526,7 +521,7 @@ end = struct raise (tag ~tag:(sprintf !"formatting: num=%{Unsigned}" num) - err)) ) + err))) let%test_unit "formatting_trailing_zeros" = let generator = gen_incl Unsigned.zero Unsigned.max_int in @@ -540,7 +535,7 @@ end = struct (of_string (sprintf !"formatting: num=%{Unsigned} formatted=%{String}" - num (to_formatted_string num)))) ) + num (to_formatted_string num))))) end ) end @@ -567,8 +562,7 @@ module Fee = struct type t = Unsigned_extended.UInt64.Stable.V1.t [@@deriving sexp, compare, hash, equal] - [%%define_from_scope - to_yojson, of_yojson, dhall_type] + [%%define_from_scope to_yojson, of_yojson, dhall_type] let to_latest = Fn.id end @@ -585,15 +579,14 @@ module Amount = struct let length = currency_length end) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] include ( T : module type of T - with type var = T.var - and module Signed = T.Signed - and module Checked := T.Checked ) + with type var = T.var + and module Signed = T.Signed + and module Checked := T.Checked ) [%%else] @@ -609,8 +602,7 @@ module Amount = struct type t = Unsigned_extended.UInt64.Stable.V1.t [@@deriving sexp, compare, hash, equal, yojson] - [%%define_from_scope - to_yojson, of_yojson, dhall_type] + [%%define_from_scope to_yojson, of_yojson, dhall_type] let to_latest = Fn.id end @@ -622,8 +614,7 @@ module Amount = struct let add_fee (t : t) (fee : Fee.t) = add t (of_fee fee) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked = struct include T.Checked @@ -653,8 +644,7 @@ module Balance = struct end end] - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] include (Amount : Basic with type t := t with type var = Amount.var) @@ -674,8 +664,7 @@ module Balance = struct let ( - ) = sub_amount - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked = struct include Amount.Checked @@ -702,8 +691,7 @@ end let%test_module "sub_flagged module" = ( module struct - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] open Tick @@ -725,7 +713,7 @@ let%test_module "sub_flagged module" = module Checked : sig val sub_flagged : - var -> var -> (var * [`Underflow of Boolean.var], 'a) Tick.Checked.t + var -> var -> (var * [ `Underflow of Boolean.var ], 'a) Tick.Checked.t end end @@ -749,7 +737,7 @@ let%test_module "sub_flagged module" = let m, u = sub_flagged_unchecked p in let m_checked, u_checked = sub_flagged_checked p in assert (Bool.equal u u_checked) ; - if not u then [%test_eq: M.magnitude] m m_checked ) + if not u then [%test_eq: M.magnitude] m m_checked) let%test_unit "fee sub_flagged" = run_test (module Fee) diff --git a/src/lib/currency/currency.mli b/src/lib/currency/currency.mli index c807ca24500..0b342540498 100644 --- a/src/lib/currency/currency.mli +++ b/src/lib/currency/currency.mli @@ -47,15 +47,16 @@ module Fee : sig module Checked : sig include Checked_arithmetic_intf - with type var := var - and type signed_var := Signed.var - and type value := t + with type var := var + and type signed_var := Signed.var + and type value := t val add_signed : var -> Signed.var -> (var, _) Checked.t end [%%endif] -end [@@warning "-32"] +end +[@@warning "-32"] module Amount : sig [%%versioned: @@ -98,9 +99,9 @@ module Amount : sig module Checked : sig include Checked_arithmetic_intf - with type var := var - and type signed_var := Signed.var - and type value := t + with type var := var + and type signed_var := Signed.var + and type value := t val add_signed : var -> Signed.var -> (var, _) Checked.t @@ -112,7 +113,8 @@ module Amount : sig end [%%endif] -end [@@warning "-32"] +end +[@@warning "-32"] module Balance : sig [%%versioned: @@ -149,15 +151,15 @@ module Balance : sig val sub_amount : var -> Amount.var -> (var, _) Checked.t val sub_amount_flagged : - var -> Amount.var -> (var * [`Underflow of Boolean.var], _) Checked.t + var -> Amount.var -> (var * [ `Underflow of Boolean.var ], _) Checked.t val add_amount_flagged : - var -> Amount.var -> (var * [`Overflow of Boolean.var], _) Checked.t + var -> Amount.var -> (var * [ `Overflow of Boolean.var ], _) Checked.t val add_signed_amount_flagged : var -> Amount.Signed.var - -> (var * [`Overflow of Boolean.var], _) Checked.t + -> (var * [ `Overflow of Boolean.var ], _) Checked.t val ( + ) : var -> Amount.var -> (var, _) Checked.t @@ -179,4 +181,5 @@ module Balance : sig end [%%endif] -end [@@warning "-32"] +end +[@@warning "-32"] diff --git a/src/lib/currency/intf.ml b/src/lib/currency/intf.ml index 22cfb6b70cf..36cd5f6f375 100644 --- a/src/lib/currency/intf.ml +++ b/src/lib/currency/intf.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_bits open Snark_params.Tick @@ -168,8 +166,7 @@ module type Signed_intf = sig [%%endif] end -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module type Checked_arithmetic_intf = sig type value @@ -189,10 +186,10 @@ module type Checked_arithmetic_intf = sig val sub : var -> var -> (var, _) Checked.t val sub_flagged : - var -> var -> (var * [`Underflow of Boolean.var], _) Checked.t + var -> var -> (var * [ `Underflow of Boolean.var ], _) Checked.t val add_flagged : - var -> var -> (var * [`Overflow of Boolean.var], _) Checked.t + var -> var -> (var * [ `Overflow of Boolean.var ], _) Checked.t val ( + ) : var -> var -> (var, _) Checked.t @@ -201,7 +198,7 @@ module type Checked_arithmetic_intf = sig val add_signed : var -> signed_var -> (var, _) Checked.t val add_signed_flagged : - var -> signed_var -> (var * [`Overflow of Boolean.var], _) Checked.t + var -> signed_var -> (var * [ `Overflow of Boolean.var ], _) Checked.t val assert_equal : var -> var -> (unit, _) Checked.t @@ -234,9 +231,9 @@ module type S = sig module Checked : Checked_arithmetic_intf - with type var := var - and type signed_var := Signed.var - and type value := t + with type var := var + and type signed_var := Signed.var + and type value := t [%%else] diff --git a/src/lib/currency/signed_poly.ml b/src/lib/currency/signed_poly.ml index a59ddb34ce2..05d5d27e112 100644 --- a/src/lib/currency/signed_poly.ml +++ b/src/lib/currency/signed_poly.ml @@ -3,9 +3,9 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type ('magnitude, 'sgn) t = {magnitude: 'magnitude; sgn: 'sgn} + type ('magnitude, 'sgn) t = { magnitude : 'magnitude; sgn : 'sgn } [@@deriving sexp, hash, compare, equal, yojson] end end] -let map ~f {magnitude; sgn} = {magnitude= f magnitude; sgn} +let map ~f { magnitude; sgn } = { magnitude = f magnitude; sgn } diff --git a/src/lib/daemon_rpcs/client.ml b/src/lib/daemon_rpcs/client.ml index 7a37f33b3e8..c445932fbd6 100644 --- a/src/lib/daemon_rpcs/client.ml +++ b/src/lib/daemon_rpcs/client.ml @@ -30,11 +30,11 @@ let dispatch rpc query (host_and_port : Host_and_port.t) = | Error exn -> return (Or_error.errorf - !"Error connecting to the daemon on \ - %{sexp:Host_and_port.t} using the RPC call, %s,: %s" + !"Error connecting to the daemon on %{sexp:Host_and_port.t} \ + using the RPC call, %s,: %s" host_and_port (Rpc.Rpc.name rpc) (Exn.to_string exn)) | Ok conn -> - Rpc.Rpc.dispatch rpc conn query ) ) + Rpc.Rpc.dispatch rpc conn query)) let dispatch_join_errors rpc query port = let open Deferred.Let_syntax in diff --git a/src/lib/daemon_rpcs/daemon_rpcs.ml b/src/lib/daemon_rpcs/daemon_rpcs.ml index 72ef281c403..be9bf2d4b9c 100644 --- a/src/lib/daemon_rpcs/daemon_rpcs.ml +++ b/src/lib/daemon_rpcs/daemon_rpcs.ml @@ -32,8 +32,7 @@ module Send_user_commands = struct end module Get_ledger = struct - type query = State_hash.Stable.Latest.t option - [@@deriving bin_io_unversioned] + type query = State_hash.Stable.Latest.t option [@@deriving bin_io_unversioned] type response = Account.Stable.Latest.t list Or_error.t [@@deriving bin_io_unversioned] @@ -43,8 +42,7 @@ module Get_ledger = struct end module Get_snarked_ledger = struct - type query = State_hash.Stable.Latest.t option - [@@deriving bin_io_unversioned] + type query = State_hash.Stable.Latest.t option [@@deriving bin_io_unversioned] type response = Account.Stable.Latest.t list Or_error.t [@@deriving bin_io_unversioned] @@ -119,8 +117,7 @@ end module Chain_id_inputs = struct type query = unit [@@deriving bin_io_unversioned] - type response = - State_hash.Stable.Latest.t * Genesis_constants.t * string list + type response = State_hash.Stable.Latest.t * Genesis_constants.t * string list [@@deriving bin_io_unversioned] let rpc : (query, response) Rpc.Rpc.t = @@ -162,7 +159,7 @@ module Get_nonce = struct end module Get_status = struct - type query = [`Performance | `None] [@@deriving bin_io_unversioned] + type query = [ `Performance | `None ] [@@deriving bin_io_unversioned] type response = Types.Status.t [@@deriving bin_io_unversioned] @@ -171,13 +168,12 @@ module Get_status = struct end module Clear_hist_status = struct - type query = [`Performance | `None] [@@deriving bin_io_unversioned] + type query = [ `Performance | `None ] [@@deriving bin_io_unversioned] type response = Types.Status.t [@@deriving bin_io_unversioned] let rpc : (query, response) Rpc.Rpc.t = - Rpc.Rpc.create ~name:"Clear_hist_status" ~version:0 ~bin_query - ~bin_response + Rpc.Rpc.create ~name:"Clear_hist_status" ~version:0 ~bin_query ~bin_response end module Get_public_keys_with_details = struct @@ -258,7 +254,7 @@ module Visualization = struct module Frontier = struct type query = string [@@deriving bin_io_unversioned] - type response = [`Active of unit | `Bootstrapping] + type response = [ `Active of unit | `Bootstrapping ] [@@deriving bin_io_unversioned] let rpc : (query, response) Rpc.Rpc.t = @@ -305,8 +301,7 @@ module Get_trustlist = struct end module Get_node_status = struct - type query = Mina_net2.Multiaddr.t list option - [@@deriving bin_io_unversioned] + type query = Mina_net2.Multiaddr.t list option [@@deriving bin_io_unversioned] type response = Mina_networking.Rpcs.Get_node_status.Node_status.Stable.Latest.t Or_error.t diff --git a/src/lib/daemon_rpcs/types.ml b/src/lib/daemon_rpcs/types.ml index 525f59ab535..9621d69a835 100644 --- a/src/lib/daemon_rpcs/types.ml +++ b/src/lib/daemon_rpcs/types.ml @@ -30,13 +30,13 @@ module Status = struct let padding = String.init (max_key_length - String.length s) ~f:(fun _ -> ' ') in - sprintf "%s: %s %s" s padding x ) + sprintf "%s: %s %s" s padding x) |> String.concat ~sep:"\n" in title ^ "\n" ^ output ^ "\n" let summarize_report - {Perf_histograms.Report.values; intervals; overflow; underflow} = + { Perf_histograms.Report.values; intervals; overflow; underflow } = (* Show the largest 3 buckets *) let zipped = List.zip_exn values intervals in let best = @@ -47,7 +47,7 @@ module Status = struct List.map best ~f:(fun (v, (lo, hi)) -> Printf.sprintf !"(%{sexp: Time.Span.t}, %{sexp: Time.Span.t}): %d" - lo hi v ) + lo hi v) in let total = List.sum (module Int) values ~f:Fn.id in List.fold msgs @@ -58,21 +58,22 @@ module Status = struct module Rpc_timings = struct module Rpc_pair = struct - type 'a t = {dispatch: 'a; impl: 'a} + type 'a t = { dispatch : 'a; impl : 'a } [@@deriving to_yojson, bin_io_unversioned, fields] end type t = - { get_staged_ledger_aux: Perf_histograms.Report.t option Rpc_pair.t - ; answer_sync_ledger_query: Perf_histograms.Report.t option Rpc_pair.t - ; get_ancestry: Perf_histograms.Report.t option Rpc_pair.t - ; get_transition_chain_proof: Perf_histograms.Report.t option Rpc_pair.t - ; get_transition_chain: Perf_histograms.Report.t option Rpc_pair.t } + { get_staged_ledger_aux : Perf_histograms.Report.t option Rpc_pair.t + ; answer_sync_ledger_query : Perf_histograms.Report.t option Rpc_pair.t + ; get_ancestry : Perf_histograms.Report.t option Rpc_pair.t + ; get_transition_chain_proof : Perf_histograms.Report.t option Rpc_pair.t + ; get_transition_chain : Perf_histograms.Report.t option Rpc_pair.t + } [@@deriving to_yojson, bin_io_unversioned, fields] let to_text s = let entries = - let add_rpcs ~name {Rpc_pair.dispatch; impl} acc = + let add_rpcs ~name { Rpc_pair.dispatch; impl } acc = let name k = let go s = sprintf "%s (%s)" name s in match k with `Dispatch -> go "dispatch" | `Impl -> go "impl" @@ -90,14 +91,14 @@ module Status = struct let f x = Field.get x s in Fields.fold ~init:[] ~get_staged_ledger_aux:(fun acc x -> - add_rpcs ~name:"Get Staged Ledger Aux" (f x) acc ) + add_rpcs ~name:"Get Staged Ledger Aux" (f x) acc) ~answer_sync_ledger_query:(fun acc x -> - add_rpcs ~name:"Answer Sync Ledger Query" (f x) acc ) + add_rpcs ~name:"Answer Sync Ledger Query" (f x) acc) ~get_ancestry:(fun acc x -> add_rpcs ~name:"Get Ancestry" (f x) acc) ~get_transition_chain_proof:(fun acc x -> - add_rpcs ~name:"Get transition chain proof" (f x) acc ) + add_rpcs ~name:"Get transition chain proof" (f x) acc) ~get_transition_chain:(fun acc x -> - add_rpcs ~name:"Get transition chain" (f x) acc ) + add_rpcs ~name:"Get transition chain" (f x) acc) |> List.rev in digest_entries ~title:"RPCs" entries @@ -105,12 +106,13 @@ module Status = struct module Histograms = struct type t = - { rpc_timings: Rpc_timings.t - ; external_transition_latency: Perf_histograms.Report.t option - ; accepted_transition_local_latency: Perf_histograms.Report.t option - ; accepted_transition_remote_latency: Perf_histograms.Report.t option - ; snark_worker_transition_time: Perf_histograms.Report.t option - ; snark_worker_merge_time: Perf_histograms.Report.t option } + { rpc_timings : Rpc_timings.t + ; external_transition_latency : Perf_histograms.Report.t option + ; accepted_transition_local_latency : Perf_histograms.Report.t option + ; accepted_transition_remote_latency : Perf_histograms.Report.t option + ; snark_worker_transition_time : Perf_histograms.Report.t option + ; snark_worker_merge_time : Perf_histograms.Report.t option + } [@@deriving to_yojson, bin_io_unversioned, fields] let to_text s = @@ -118,13 +120,13 @@ module Status = struct let f x = Field.get x s in Fields.fold ~init:[] ~rpc_timings:(fun acc x -> - ("RPC Timings", Rpc_timings.to_text (f x)) :: acc ) + ("RPC Timings", Rpc_timings.to_text (f x)) :: acc) ~external_transition_latency:(fun acc x -> match f x with | None -> acc | Some report -> - ("Block Latencies (hist.)", summarize_report report) :: acc ) + ("Block Latencies (hist.)", summarize_report report) :: acc) ~accepted_transition_local_latency:(fun acc x -> match f x with | None -> @@ -132,7 +134,7 @@ module Status = struct | Some report -> ( "Accepted local block Latencies (hist.)" , summarize_report report ) - :: acc ) + :: acc) ~accepted_transition_remote_latency:(fun acc x -> match f x with | None -> @@ -140,33 +142,32 @@ module Status = struct | Some report -> ( "Accepted remote block Latencies (hist.)" , summarize_report report ) - :: acc ) + :: acc) ~snark_worker_transition_time:(fun acc x -> match f x with | None -> acc | Some report -> - ("Snark Worker a->b (hist.)", summarize_report report) :: acc - ) + ("Snark Worker a->b (hist.)", summarize_report report) :: acc) ~snark_worker_merge_time:(fun acc x -> match f x with | None -> acc | Some report -> - ("Snark Worker Merge (hist.)", summarize_report report) :: acc - ) + ("Snark Worker Merge (hist.)", summarize_report report) :: acc) in digest_entries ~title:"Performance Histograms" entries end module Next_producer_timing = struct type slot = - { slot: Mina_numbers.Global_slot.Stable.Latest.t - ; global_slot_since_genesis: Mina_numbers.Global_slot.Stable.Latest.t } + { slot : Mina_numbers.Global_slot.Stable.Latest.t + ; global_slot_since_genesis : Mina_numbers.Global_slot.Stable.Latest.t + } [@@deriving to_yojson, fields, bin_io_unversioned] (* time is the start-time of for_slot.slot*) - type producing_time = {time: Block_time.Stable.Latest.t; for_slot: slot} + type producing_time = { time : Block_time.Stable.Latest.t; for_slot : slot } [@@deriving to_yojson, bin_io_unversioned, fields] type timing = @@ -175,7 +176,7 @@ module Status = struct | Produce_now of producing_time [@@deriving to_yojson, bin_io_unversioned] - type t = {generated_from_consensus_at: slot; timing: timing} + type t = { generated_from_consensus_at : slot; timing : timing } [@@deriving to_yojson, bin_io_unversioned] end @@ -208,7 +209,7 @@ module Status = struct let list_str = if len > 0 then " " ^ List.to_string ~f:to_string list else "" in - Printf.sprintf "%d%s" len list_str ) + Printf.sprintf "%d%s" len list_str) let num_accounts = int_option_entry "Global number of accounts" @@ -221,7 +222,7 @@ module Status = struct let uptime_secs = map_entry "Local uptime" ~f:(fun secs -> - Time.Span.to_string (Time.Span.of_int_sec secs) ) + Time.Span.to_string (Time.Span.of_int_sec secs)) let ledger_merkle_root = string_option_entry "Ledger Merkle root" @@ -255,7 +256,7 @@ module Status = struct | None -> "Block producer" | Some pk -> - pk ) + pk) let histograms = option_entry "Histograms" ~f:Histograms.to_text @@ -289,11 +290,11 @@ module Status = struct | Check_again time -> sprintf "None this epoch… checking at %s (%s)" (str time) generated_from - | Produce {time; for_slot} -> + | Produce { time; for_slot } -> sprintf "%s for %s (%s)" (str time) (slot_str for_slot) generated_from - | Produce_now {for_slot; _} -> - sprintf "Now (for %s %s)" (slot_str for_slot) generated_from ) + | Produce_now { for_slot; _ } -> + sprintf "Now (for %s %s)" (slot_str for_slot) generated_from) let consensus_time_best_tip = option_entry "Best tip consensus time" @@ -333,7 +334,7 @@ module Status = struct let addrs_and_ports = let render conf = - let fmt_field name op field = [(name, op (Field.get field conf))] in + let fmt_field name op field = [ (name, op (Field.get field conf)) ] in Node_addrs_and_ports.Display.Stable.V1.Fields.to_list ~external_ip:(fmt_field "External IP" Fn.id) ~bind_ip:(fmt_field "Bind IP" Fn.id) @@ -343,9 +344,9 @@ module Status = struct let peer = Field.get field conf in match peer with | Some peer -> - [("Libp2p PeerID", peer.peer_id)] + [ ("Libp2p PeerID", peer.peer_id) ] | None -> - [] ) + []) |> List.concat |> List.map ~f:(fun (s, v) -> ("\t" ^ s, v)) |> digest_entries ~title:"" @@ -376,47 +377,48 @@ module Status = struct | Wait_for_parent -> "Waiting for parent to finish" in - ("\t" ^ s, Int.to_string n) ) + ("\t" ^ s, Int.to_string n)) |> digest_entries ~title:"" in option_entry "Catchup status" ~f:render end type t = - { num_accounts: int option - ; blockchain_length: int option - ; highest_block_length_received: int - ; highest_unvalidated_block_length_received: int - ; uptime_secs: int - ; ledger_merkle_root: string option - ; state_hash: string option - ; chain_id: string - ; commit_id: Git_sha.Stable.Latest.t - ; conf_dir: string - ; peers: Network_peer.Peer.Display.Stable.Latest.t list - ; user_commands_sent: int - ; snark_worker: string option - ; snark_work_fee: int - ; sync_status: Sync_status.Stable.Latest.t - ; catchup_status: + { num_accounts : int option + ; blockchain_length : int option + ; highest_block_length_received : int + ; highest_unvalidated_block_length_received : int + ; uptime_secs : int + ; ledger_merkle_root : string option + ; state_hash : string option + ; chain_id : string + ; commit_id : Git_sha.Stable.Latest.t + ; conf_dir : string + ; peers : Network_peer.Peer.Display.Stable.Latest.t list + ; user_commands_sent : int + ; snark_worker : string option + ; snark_work_fee : int + ; sync_status : Sync_status.Stable.Latest.t + ; catchup_status : (Transition_frontier.Full_catchup_tree.Node.State.Enum.t * int) list option - ; block_production_keys: string list - ; coinbase_receiver: string option - ; histograms: Histograms.t option - ; consensus_time_best_tip: + ; block_production_keys : string list + ; coinbase_receiver : string option + ; histograms : Histograms.t option + ; consensus_time_best_tip : Consensus.Data.Consensus_time.Stable.Latest.t option - ; global_slot_since_genesis_best_tip: int option - ; next_block_production: Next_producer_timing.t option - ; consensus_time_now: Consensus.Data.Consensus_time.Stable.Latest.t - ; consensus_mechanism: string - ; consensus_configuration: Consensus.Configuration.Stable.Latest.t - ; addrs_and_ports: Node_addrs_and_ports.Display.Stable.Latest.t } + ; global_slot_since_genesis_best_tip : int option + ; next_block_production : Next_producer_timing.t option + ; consensus_time_now : Consensus.Data.Consensus_time.Stable.Latest.t + ; consensus_mechanism : string + ; consensus_configuration : Consensus.Configuration.Stable.Latest.t + ; addrs_and_ports : Node_addrs_and_ports.Display.Stable.Latest.t + } [@@deriving to_yojson, bin_io_unversioned, fields] let entries (s : t) = let module M = Make_entries (struct - type nonrec 'a t = ([`Read | `Set_and_create], t, 'a) Field.t_with_perm + type nonrec 'a t = ([ `Read | `Set_and_create ], t, 'a) Field.t_with_perm let get field = Field.get field s end) in diff --git a/src/lib/data_hash_lib/data_hash.ml b/src/lib/data_hash_lib/data_hash.ml index a7067bc4d10..872b0da0efd 100644 --- a/src/lib/data_hash_lib/data_hash.ml +++ b/src/lib/data_hash_lib/data_hash.ml @@ -1,12 +1,10 @@ (* data_hash.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Bitstring_lib @@ -37,8 +35,7 @@ struct let to_input t = Random_oracle.Input.field t - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] (* this is in consensus code, because Bigint comes from snarky functors @@ -54,21 +51,23 @@ struct ~f:(fun x -> Bigint.(to_field (of_bignum_bigint x))) type var = - { digest: Random_oracle.Checked.Digest.t - ; mutable bits: Boolean.var Bitstring.Lsb_first.t option } + { digest : Random_oracle.Checked.Digest.t + ; mutable bits : Boolean.var Bitstring.Lsb_first.t option + } let var_of_t t = let n = Bigint.of_field t in - { digest= Field.Var.constant t - ; bits= + { digest = Field.Var.constant t + ; bits = Some (Bitstring.Lsb_first.of_list (List.init M.length_in_bits ~f:(fun i -> - Boolean.var_of_value (Bigint.test_bit n i) ))) } + Boolean.var_of_value (Bigint.test_bit n i)))) + } open Let_syntax - let var_to_hash_packed {digest; _} = digest + let var_to_hash_packed { digest; _ } = digest (* TODO: Audit this usage of choose_preimage *) let unpack = @@ -104,8 +103,8 @@ struct let typ : (var, t) Typ.t = Typ.transport_var Typ.field - ~there:(fun {digest; bits= _} -> digest) - ~back:(fun digest -> {digest; bits= None}) + ~there:(fun { digest; bits = _ } -> digest) + ~back:(fun digest -> { digest; bits = None }) [%%endif] end @@ -115,7 +114,7 @@ module T0 = struct module Stable = struct module V1 = struct type t = Field.t - [@@deriving sexp, compare, hash, version {asserted}, bin_io] + [@@deriving sexp, compare, hash, version { asserted }, bin_io] let to_latest = Fn.id end @@ -133,8 +132,7 @@ module T0 = struct Quickcheck.random_value ~seed:(`Deterministic "Data_hash.T0 tests") Field.gen - [%%if - curve_size = 255] + [%%if curve_size = 255] let%test "Binable from stringable V1" = let known_good_digest = "fa43c8180f9f3cef1cf5767592e964c1" in @@ -170,11 +168,9 @@ module Make_full_size (B58_data : Data_hash_intf.Data_hash_descriptor) = struct [%%define_locally Base58_check.(to_base58_check, of_base58_check, of_base58_check_exn)] - [%%define_locally - Base58_check.String_ops.(to_string, of_string)] + [%%define_locally Base58_check.String_ops.(to_string, of_string)] - [%%define_locally - Base58_check.(to_yojson, of_yojson)] + [%%define_locally Base58_check.(to_yojson, of_yojson)] module T = struct type t = Field.t [@@deriving sexp, compare, hash] @@ -185,16 +181,15 @@ module Make_full_size (B58_data : Data_hash_intf.Data_hash_descriptor) = struct let of_hash = Fn.id - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] - let var_of_hash_packed digest = {digest; bits= None} + let var_of_hash_packed digest = { digest; bits = None } let if_ cond ~then_ ~else_ = let%map digest = Field.Checked.if_ cond ~then_:then_.digest ~else_:else_.digest in - {digest; bits= None} + { digest; bits = None } [%%endif] end diff --git a/src/lib/data_hash_lib/data_hash.mli b/src/lib/data_hash_lib/data_hash.mli index f9a793ab652..023eebd017d 100644 --- a/src/lib/data_hash_lib/data_hash.mli +++ b/src/lib/data_hash_lib/data_hash.mli @@ -3,4 +3,5 @@ module type Full_size = Data_hash_intf.Full_size module Make_full_size (B58_data : Data_hash_intf.Data_hash_descriptor) : - Full_size [@@warning "-67"] + Full_size +[@@warning "-67"] diff --git a/src/lib/data_hash_lib/data_hash_intf.ml b/src/lib/data_hash_lib/data_hash_intf.ml index f1f4b1ef9ce..5245dfd8f31 100644 --- a/src/lib/data_hash_lib/data_hash_intf.ml +++ b/src/lib/data_hash_lib/data_hash_intf.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Snark_bits diff --git a/src/lib/data_hash_lib/state_hash.ml b/src/lib/data_hash_lib/state_hash.ml index c45afbdfa48..d495e5efce1 100644 --- a/src/lib/data_hash_lib/state_hash.ml +++ b/src/lib/data_hash_lib/state_hash.ml @@ -1,12 +1,10 @@ (* state_hash.ml -- defines the type for the protocol state hash *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] module Outside_hash_image = Outside_hash_image_nonconsensus.Outside_hash_image module Random_oracle = Random_oracle_nonconsensus.Random_oracle @@ -26,8 +24,7 @@ end) let dummy = of_hash Outside_hash_image.t -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let zero = dummy @@ -54,15 +51,14 @@ module Stable = struct module V1 = struct module T = struct - type t = Field.t [@@deriving sexp, compare, hash, version {asserted}] + type t = Field.t [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) diff --git a/src/lib/data_hash_lib/state_hash.mli b/src/lib/data_hash_lib/state_hash.mli index 04c1a2b8e1b..56f3547263b 100644 --- a/src/lib/data_hash_lib/state_hash.mli +++ b/src/lib/data_hash_lib/state_hash.mli @@ -20,7 +20,7 @@ include Codable.Base58_check_intf with type t := t val raw_hash_bytes : t -> string -val to_bytes : [`Use_to_base58_check_or_raw_hash_bytes] +val to_bytes : [ `Use_to_base58_check_or_raw_hash_bytes ] (* value of type t, not a valid hash *) val dummy : t diff --git a/src/lib/direction/direction.ml b/src/lib/direction/direction.ml index 3986add96d0..f893529fff3 100644 --- a/src/lib/direction/direction.ml +++ b/src/lib/direction/direction.ml @@ -33,4 +33,4 @@ let shrinker = | Left -> None | Right -> - Some (Left, Left) ) ) + Some (Left, Left))) diff --git a/src/lib/distributed_dsl/distributed_dsl.ml b/src/lib/distributed_dsl/distributed_dsl.ml index ca6d1c61505..810a92775cf 100644 --- a/src/lib/distributed_dsl/distributed_dsl.ml +++ b/src/lib/distributed_dsl/distributed_dsl.ml @@ -19,23 +19,25 @@ end module Time_queue = struct type 'action t = - { mutable curr_time: Time.Span.t - ; pending_actions: ('action * Time.Span.t) Pairing_heap.t - ; mutable on_new_action: unit Ivar.t option } + { mutable curr_time : Time.Span.t + ; pending_actions : ('action * Time.Span.t) Pairing_heap.t + ; mutable on_new_action : unit Ivar.t option + } let handle_in_future t ~after action = Option.iter t.on_new_action ~f:(fun ivar -> Ivar.fill_if_empty ivar () ; - t.on_new_action <- None ) ; + t.on_new_action <- None) ; Pairing_heap.add t.pending_actions (action, Time.Span.(after + t.curr_time)) let create ~now = - { curr_time= now - ; pending_actions= + { curr_time = now + ; pending_actions = Pairing_heap.create ~cmp:(fun (_, ts) (_, ts') -> Time.Span.compare ts ts') () - ; on_new_action= None } + ; on_new_action = None + } let actions_ready t = match (Pairing_heap.top t.pending_actions, t.on_new_action) with @@ -79,7 +81,7 @@ module Time_queue = struct let%test_unit "time_queue_empty_returns" = Async.Thread_safe.block_on_async_exn (fun () -> let t = create ~now:Time.Span.zero in - tick_forwards t ~f:(fun _ -> return (assert false)) ) + tick_forwards t ~f:(fun _ -> return (assert false))) let%test_unit "time_queue_handles_in_order" = Async.Thread_safe.block_on_async_exn (fun () -> @@ -88,7 +90,7 @@ module Time_queue = struct let%map () = tick_forwards t ~f:(fun next -> Char.Table.add_exn table ~key:next ~data:() ; - return () ) + return ()) in if Char.Table.length table <> List.length actions then failwithf @@ -102,11 +104,11 @@ module Time_queue = struct let t : char t = create ~now:Time.Span.zero in handle_in_future t ~after:(Time.Span.of_int_sec 100) 'a' ; handle_in_future t ~after:(Time.Span.of_int_sec 10) 'b' ; - let%bind () = tick_assert_sees t ['b'] in + let%bind () = tick_assert_sees t [ 'b' ] in handle_in_future t ~after:(Time.Span.of_int_sec 10) 'c' ; handle_in_future t ~after:(Time.Span.of_int_sec 10) 'd' ; - let%bind () = tick_assert_sees t ['c'; 'd'] in - tick_assert_sees t ['a'] ) + let%bind () = tick_assert_sees t [ 'c'; 'd' ] in + tick_assert_sees t [ 'a' ]) end module type Temporal_intf = sig @@ -128,15 +130,15 @@ module type Fake_timer_transport_intf = sig end module type Fake_timer_transport_s = functor - (Message :sig - - type t - end) + (Message : sig + type t + end) (Message_delay : Message_delay_intf with type message := Message.t) (Peer : Node.Peer_intf) - -> Fake_timer_transport_intf - with type message := Message.t - and type peer := Peer.t + -> + Fake_timer_transport_intf + with type message := Message.t + and type peer := Peer.t [@@warning "-67"] module Fake_timer_transport (Message : sig @@ -154,20 +156,22 @@ struct type peer = Peer.t type action = - | Timeout of [`Cancelled | `Finished] Ivar.t + | Timeout of [ `Cancelled | `Finished ] Ivar.t | Msg of message * peer type t = - { network: + { network : (message Linear_pipe.Reader.t * message Linear_pipe.Writer.t) Peer.Table.t - ; q: action Time_queue.t - ; timer_stoppers: [`Cancelled | `Finished] Ivar.t Token.Table.t } + ; q : action Time_queue.t + ; timer_stoppers : [ `Cancelled | `Finished ] Ivar.t Token.Table.t + } let create ~now = - { network= Peer.Table.create () - ; q= Time_queue.create ~now - ; timer_stoppers= Token.Table.create () } + { network = Peer.Table.create () + ; q = Time_queue.create ~now + ; timer_stoppers = Token.Table.create () + } let actions_ready t = Time_queue.actions_ready t.q @@ -177,14 +181,14 @@ struct Ivar.fill_if_empty ivar `Finished ; Ivar.read ivar >>| Fn.const () | Msg (m, p) -> ( - match Peer.Table.find t.network p with - | None -> - failwithf "Unknown recipient %s" - (Peer.sexp_of_t p |> Sexp.to_string_hum) - () - | Some (r, w) -> - Linear_pipe.write_or_exn ~capacity:1024 w r m ; - Linear_pipe.values_available r >>| Fn.const () ) ) + match Peer.Table.find t.network p with + | None -> + failwithf "Unknown recipient %s" + (Peer.sexp_of_t p |> Sexp.to_string_hum) + () + | Some (r, w) -> + Linear_pipe.write_or_exn ~capacity:1024 w r m ; + Linear_pipe.values_available r >>| Fn.const () )) let wait t ts = let tok = Ident.next () in @@ -239,52 +243,48 @@ module Trivial_peer : Trivial_peer_intf = struct end [@@@warning "-67"] -module type S = functor - (State :sig - type t [@@deriving equal, sexp, yojson] - end) - (Message :sig - - type t - end) +module type S = functor + (State : sig + type t [@@deriving equal, sexp, yojson] + end) + (Message : sig + type t + end) (Message_delay : Message_delay_intf with type message := Message.t) - (Message_label :sig - - type label [@@deriving enum, sexp] + (Message_label : sig + type label [@@deriving enum, sexp] - include Hashable.S with type t = label - end) - (Timer_label :sig + include Hashable.S with type t = label + end) + (Timer_label : sig + type label [@@deriving enum, sexp] - type label [@@deriving enum, sexp] + include Hashable.S with type t = label + end) + (Condition_label : sig + type label [@@deriving enum, sexp, yojson] - include Hashable.S with type t = label - end) - (Condition_label :sig - - type label [@@deriving enum, sexp, yojson] - - include Hashable.S with type t = label - end) + include Hashable.S with type t = label + end) -> sig type t module Timer_transport : Fake_timer_transport_intf - with type message := Message.t - and type peer := Trivial_peer.t + with type message := Message.t + and type peer := Trivial_peer.t module MyNode : Node.S - with type message := Message.t - and type state := State.t - and type transport := Timer_transport.t - and type peer := Trivial_peer.t - and module Message_label := Message_label - and module Timer_label := Timer_label - and module Condition_label := Condition_label - and module Timer := Timer_transport + with type message := Message.t + and type state := State.t + and type transport := Timer_transport.t + and type peer := Trivial_peer.t + and module Message_label := Message_label + and module Timer_label := Timer_label + and module Condition_label := Condition_label + and module Timer := Timer_transport module Identifier : sig type t = Trivial_peer.t @@ -304,6 +304,7 @@ module type S = functor -> stop:unit Deferred.t -> t end + [@@@warning "+67"] module Make (State : sig @@ -312,10 +313,10 @@ end) (Message : sig type t end) (Message_delay : Message_delay_intf with type message := Message.t) - (Message_label : sig - type label [@@deriving enum, sexp] +(Message_label : sig + type label [@@deriving enum, sexp] - include Hashable.S with type t = label + include Hashable.S with type t = label end) (Timer_label : sig type label [@@deriving enum, sexp] @@ -329,8 +330,7 @@ struct module Timer_transport = Fake_timer_transport (Message) (Message_delay) (Trivial_peer) module MyNode = - Node.Make (State) (Message) (Trivial_peer) (Timer_transport) - (Message_label) + Node.Make (State) (Message) (Trivial_peer) (Timer_transport) (Message_label) (Timer_label) (Condition_label) (Timer_transport) @@ -341,7 +341,7 @@ struct include MyNode.Identifier end - type t = {nodes: MyNode.t Identifier.Table.t; timer: Timer_transport.t} + type t = { nodes : MyNode.t Identifier.Table.t; timer : Timer_transport.t } type change = Delete of Identifier.t | Add of MyNode.t @@ -350,7 +350,7 @@ struct | Delete ident -> Identifier.Table.remove t.nodes ident | Add n -> - Identifier.Table.add_exn t.nodes ~key:(MyNode.ident n) ~data:n ) + Identifier.Table.add_exn t.nodes ~key:(MyNode.ident n) ~data:n) let rec loop t ~stop ~max_iters = match max_iters with @@ -387,13 +387,14 @@ struct >>| fun () -> ( merge (Deferred.peek a) a_imm , merge (Deferred.peek b) b_imm - , Some () ) ) ] + , Some () ) ) + ] in let node_ready : MyNode.t Deferred.t = let any_ready : MyNode.t Deferred.t = Deferred.any (List.map (Identifier.Table.data t.nodes) ~f:(fun n -> - MyNode.next_ready n >>| Fn.const n )) + MyNode.next_ready n >>| Fn.const n)) in any_ready >>| fun n -> @@ -406,7 +407,7 @@ struct | None, x when MyNode.is_ready x -> Some x | None, _ -> - acc ) + acc) in Option.value maybe_real ~default:n in @@ -443,17 +444,17 @@ struct let messages = Timer_transport.listen timer ~me:i in let msg_commands, handle_commands = cmds_per_node i in MyNode.make_node ~logger:(Logger.create ()) ~transport:timer ~me:i - ~messages ~initial_state ~timer msg_commands handle_commands ) + ~messages ~initial_state ~timer msg_commands handle_commands) in (* Schedule cleanup *) don't_wait_for (let%map () = stop in List.iter nodes ~f:(fun n -> - Timer_transport.stop_listening timer ~me:(MyNode.ident n) )) ; + Timer_transport.stop_listening timer ~me:(MyNode.ident n))) ; (* Fill table *) List.iter nodes ~f:(fun n -> - Identifier.Table.add_exn table ~key:(MyNode.ident n) ~data:n ) ; - {nodes= table; timer} + Identifier.Table.add_exn table ~key:(MyNode.ident n) ~data:n) ; + { nodes = table; timer } end let%test_module "Distributed_dsl" = @@ -464,7 +465,7 @@ let%test_module "Distributed_dsl" = | `Success -> () | `Failure s -> - failwith s ) + failwith s) module State = struct type t = Start | Wait_msg | Sent_msg | Got_msg of int | Timeout @@ -540,15 +541,17 @@ let%test_module "Distributed_dsl" = (List.init (count - 1) ~f:(fun i -> i + 1)) (Msg 10) in - Sent_msg ) ; - return Wait_msg ) ] ) + Sent_msg) ; + return Wait_msg) + ] ) in let specRest = let open Machine.MyNode in ( [ msg Send_msg (Fn.const (Fn.const true)) ~f:(fun _t (Msg i) -> function Wait_msg -> return (Got_msg i) - | m -> return m ) ] + | m -> return m) + ] , [ on Init (function Start -> true | _ -> false) ~f:(fun _ _ -> return Wait_msg) @@ -557,8 +560,8 @@ let%test_module "Distributed_dsl" = ~f:(fun t state -> timeout' t Timeout_message (Time.Span.of_sec 20.) ~f:(fun _t -> function - | Got_msg _ as m -> return m | _ -> return Timeout ) ; - return state ) + | Got_msg _ as m -> return m | _ -> return Timeout) ; + return state) ; on Failure_case (function | Timeout -> @@ -566,22 +569,22 @@ let%test_module "Distributed_dsl" = | Got_msg i when i <= 5 -> true | _ -> - false ) + false) ~f:(fun _ _ -> failwith "All nodes should have received a message containing a \ - number more than five" ) + number more than five") ; on Bigger_than_five (function Got_msg i -> i > 5 | _ -> false) ~f:(fun t state -> cancel t Timeout_message ; Ivar.fill_if_empty finish_ivar `Success ; - return state ) ] ) + return state) + ] ) in let machine = - Machine.create ~count ~initial_state:Start - ~stop:(Deferred.never ()) (fun i -> - if i = 0 then spec0 else specRest ) + Machine.create ~count ~initial_state:Start ~stop:(Deferred.never ()) + (fun i -> if i = 0 then spec0 else specRest) in don't_wait_for (let%map () = @@ -589,6 +592,5 @@ let%test_module "Distributed_dsl" = ~max_iters:(Some 10000) in Ivar.fill_if_empty finish_ivar - (`Failure "Stopped looping without getting to success state")) - ) + (`Failure "Stopped looping without getting to success state"))) end ) diff --git a/src/lib/distributed_dsl/distributed_dsl.mli b/src/lib/distributed_dsl/distributed_dsl.mli index fda3eaf06be..027405ed437 100644 --- a/src/lib/distributed_dsl/distributed_dsl.mli +++ b/src/lib/distributed_dsl/distributed_dsl.mli @@ -26,16 +26,16 @@ module type Fake_timer_transport_intf = sig end module type Fake_timer_transport_s = functor - (Message :sig - - type t - end) + (Message : sig + type t + end) (Message_delay : Message_delay_intf with type message := Message.t) (Peer : Node.Peer_intf) - -> Fake_timer_transport_intf - with type message := Message.t - and type peer := Peer.t - [@@warning "-67"] + -> + Fake_timer_transport_intf + with type message := Message.t + and type peer := Peer.t +[@@warning "-67"] module type Trivial_peer_intf = sig type t = int [@@deriving equal, hash, compare, sexp, yojson] @@ -46,52 +46,48 @@ end module Trivial_peer : Trivial_peer_intf [@@@warning "-67"] -module type S = functor - (State :sig - type t [@@deriving equal, sexp, yojson] - end) - (Message :sig - - type t - end) +module type S = functor + (State : sig + type t [@@deriving equal, sexp, yojson] + end) + (Message : sig + type t + end) (Message_delay : Message_delay_intf with type message := Message.t) - (Message_label :sig - - type label [@@deriving enum, sexp] + (Message_label : sig + type label [@@deriving enum, sexp] - include Hashable.S with type t = label - end) - (Timer_label :sig + include Hashable.S with type t = label + end) + (Timer_label : sig + type label [@@deriving enum, sexp] - type label [@@deriving enum, sexp] + include Hashable.S with type t = label + end) + (Condition_label : sig + type label [@@deriving enum, sexp, yojson] - include Hashable.S with type t = label - end) - (Condition_label :sig - - type label [@@deriving enum, sexp, yojson] - - include Hashable.S with type t = label - end) + include Hashable.S with type t = label + end) -> sig type t module Timer_transport : Fake_timer_transport_intf - with type message := Message.t - and type peer := Trivial_peer.t + with type message := Message.t + and type peer := Trivial_peer.t module MyNode : Node.S - with type message := Message.t - and type state := State.t - and type transport := Timer_transport.t - and type peer := Trivial_peer.t - and module Message_label := Message_label - and module Timer_label := Timer_label - and module Condition_label := Condition_label - and module Timer := Timer_transport + with type message := Message.t + and type state := State.t + and type transport := Timer_transport.t + and type peer := Trivial_peer.t + and module Message_label := Message_label + and module Timer_label := Timer_label + and module Condition_label := Condition_label + and module Timer := Timer_transport module Identifier : sig type t = Trivial_peer.t @@ -111,6 +107,7 @@ module type S = functor -> stop:unit Deferred.t -> t end + [@@@warning "+67"] module Make : S diff --git a/src/lib/distributed_dsl/node.ml b/src/lib/distributed_dsl/node.ml index a50d167d28c..7c9031367e4 100644 --- a/src/lib/distributed_dsl/node.ml +++ b/src/lib/distributed_dsl/node.ml @@ -25,7 +25,7 @@ module type Timer_intf = sig type tok [@@deriving equal] - val wait : t -> Time.Span.t -> tok * [`Cancelled | `Finished] Deferred.t + val wait : t -> Time.Span.t -> tok * [ `Cancelled | `Finished ] Deferred.t (* No-ops if already cancelled *) @@ -113,45 +113,42 @@ module type S = sig end module type F = functor - (State :sig - - type t [@@deriving equal, sexp, yojson] - end) - (Message :sig - - type t - end) + (State : sig + type t [@@deriving equal, sexp, yojson] + end) + (Message : sig + type t + end) (Peer : Peer_intf) (Timer : Timer_intf) - (Message_label :sig - - type label [@@deriving enum, sexp] - - include Hashable.S with type t = label - end) - (Timer_label :sig - - type label [@@deriving enum, sexp] - - include Hashable.S with type t = label - end) - (Condition_label :sig - - type label [@@deriving enum, sexp, yojson] - - include Hashable.S with type t = label - end) - (Transport : - Transport_intf with type message := Message.t and type peer := Peer.t) - -> S - with type message := Message.t - and type state := State.t - and type transport := Transport.t - and type peer := Peer.t - and module Message_label := Message_label - and module Timer_label := Timer_label - and module Condition_label := Condition_label - and module Timer := Timer + (Message_label : sig + type label [@@deriving enum, sexp] + + include Hashable.S with type t = label + end) + (Timer_label : sig + type label [@@deriving enum, sexp] + + include Hashable.S with type t = label + end) + (Condition_label : sig + type label [@@deriving enum, sexp, yojson] + + include Hashable.S with type t = label + end) + (Transport : Transport_intf + with type message := Message.t + and type peer := Peer.t) + -> + S + with type message := Message.t + and type state := State.t + and type transport := Transport.t + and type peer := Peer.t + and module Message_label := Message_label + and module Timer_label := Timer_label + and module Condition_label := Condition_label + and module Timer := Timer module Make (State : sig type t [@@deriving equal, sexp, to_yojson] @@ -160,9 +157,9 @@ end) (Message : sig end) (Peer : Peer_intf) (Timer : Timer_intf) (Message_label : sig - type label [@@deriving sexp] + type label [@@deriving sexp] - include Hashable.S with type t = label + include Hashable.S with type t = label end) (Timer_label : sig type label [@@deriving sexp] @@ -173,8 +170,8 @@ end) (Condition_label : sig include Hashable.S with type t = label end) (Transport : Transport_intf - with type message := Message.t - and type peer := Peer.t) = + with type message := Message.t + and type peer := Peer.t) = struct module Identifier = Peer @@ -187,19 +184,20 @@ struct and message_transition = t -> Message.t -> State.t -> State.t Deferred.t and t = - { state: State.t - ; last_state: State.t option - ; conditions: (condition * transition) Condition_label.Table.t - ; message_pipe: Message.t Linear_pipe.Reader.t - ; message_handlers: + { state : State.t + ; last_state : State.t option + ; conditions : (condition * transition) Condition_label.Table.t + ; message_pipe : Message.t Linear_pipe.Reader.t + ; message_handlers : (message_condition * message_transition) Message_label.Table.t - ; triggered_timers_r: transition Linear_pipe.Reader.t - ; triggered_timers_w: transition Linear_pipe.Writer.t - ; timer: Timer.t - ; timers: Timer.tok list Timer_label.Table.t - ; ident: Identifier.t - ; transport: Transport.t - ; logger: Logger.t } + ; triggered_timers_r : transition Linear_pipe.Reader.t + ; triggered_timers_w : transition Linear_pipe.Writer.t + ; timer : Timer.t + ; timers : Timer.tok list Timer_label.Table.t + ; ident : Identifier.t + ; transport : Transport.t + ; logger : Logger.t + } type handle_command = Condition_label.t * condition * transition @@ -224,7 +222,7 @@ struct | None -> `Fst tok' | Some tok -> - if Timer.equal_tok tok tok' then `Fst tok' else `Snd tok' ) + if Timer.equal_tok tok tok' then `Fst tok' else `Snd tok') in List.iter to_cancel ~f:(fun tok' -> Timer.cancel t.timer tok') ; add_back_timers t ~key:label ~data:to_put_back @@ -257,7 +255,8 @@ struct Deferred.any [ ready t.message_pipe ; ready t.triggered_timers_r - ; (if state_changed t then return () else Deferred.never ()) ] + ; (if state_changed t then return () else Deferred.never ()) + ] let is_ready t : bool = let b = @@ -267,9 +266,9 @@ struct in b - let make_node ~transport ~logger ~me ~messages ?parent:_ ~initial_state - ~timer message_conditions handle_conditions = - let logger = Logger.extend logger [("dsl_node", Peer.to_yojson me)] in + let make_node ~transport ~logger ~me ~messages ?parent:_ ~initial_state ~timer + message_conditions handle_conditions = + let logger = Logger.extend logger [ ("dsl_node", Peer.to_yojson me) ] in let conditions = Condition_label.Table.create () in List.iter handle_conditions ~f:(fun (l, c, h) -> match Condition_label.Table.add conditions ~key:l ~data:(c, h) with @@ -278,7 +277,7 @@ struct (Condition_label.sexp_of_label l |> Sexp.to_string_hum) () | `Ok -> - () ) ; + ()) ; let message_handlers = Message_label.Table.create () in List.iter message_conditions ~f:(fun (l, c, h) -> match Message_label.Table.add message_handlers ~key:l ~data:(c, h) with @@ -287,26 +286,27 @@ struct (Message_label.sexp_of_label l |> Sexp.to_string_hum) () | `Ok -> - () ) ; + ()) ; let timers = Timer_label.Table.create () in let triggered_timers_r, triggered_timers_w = Linear_pipe.create () in let t = - { state= initial_state - ; last_state= None + { state = initial_state + ; last_state = None ; conditions - ; message_pipe= messages + ; message_pipe = messages ; message_handlers ; triggered_timers_r ; triggered_timers_w ; timer ; timers - ; ident= me + ; ident = me ; transport - ; logger } + ; logger + } in t - let with_new_state t state : t = {t with last_state= Some t.state; state} + let with_new_state t state : t = { t with last_state = Some t.state; state } let step t : t Deferred.t = match @@ -322,14 +322,15 @@ struct match matches with | [] -> return (with_new_state t t.state) - | [(label, (_, transition))] -> + | [ (label, (_, transition)) ] -> let%map t' = transition t t.state >>| with_new_state t in [%log' debug t.logger] ~metadata: [ ("source", State.to_yojson t.state) ; ("destination", State.to_yojson t'.state) ; ("peer", Peer.to_yojson t.ident) - ; ("label", Condition_label.label_to_yojson label) ] + ; ("label", Condition_label.label_to_yojson label) + ] "Making transition from $source to $destination at $peer label: \ $label" ; t' @@ -342,19 +343,20 @@ struct | false, Some transition, _ -> ignore ( Linear_pipe.read_now t.triggered_timers_r - : [`Eof | `Nothing_available | `Ok of transition] ) ; + : [ `Eof | `Nothing_available | `Ok of transition ] ) ; let%map t' = transition t t.state >>| with_new_state t in [%log' debug t.logger] ~metadata: [ ("source", State.to_yojson t.state) ; ("destination", State.to_yojson t'.state) - ; ("peer", Peer.to_yojson t.ident) ] + ; ("peer", Peer.to_yojson t.ident) + ] "Making transition from $source to $destination at $peer via timer" ; t' | false, None, Some msg -> ( ignore ( Linear_pipe.read_now t.message_pipe - : [`Eof | `Nothing_available | `Ok of Message.t] ) ; + : [ `Eof | `Nothing_available | `Ok of Message.t ] ) ; let checks = Message_label.Table.to_alist t.message_handlers in let matches = List.filter checks ~f:(fun (_, (cond, _)) -> cond msg t.state) @@ -362,7 +364,7 @@ struct match matches with | [] -> return (with_new_state t t.state) - | [(label, (_, transition))] -> + | [ (label, (_, transition)) ] -> let%map t' = transition t msg t.state >>| with_new_state t in [%log' debug t.logger] !"Making transition from %{sexp:State.t} to %{sexp:State.t} at \ @@ -379,11 +381,11 @@ struct | false, None, None -> return (with_new_state t t.state) - let ident {ident; _} = ident + let ident { ident; _ } = ident - let state {state; _} = state + let state { state; _ } = state - let send {transport; _} = Transport.send transport + let send { transport; _ } = Transport.send transport let send_exn t ~recipient msg = match%map send t ~recipient msg with diff --git a/src/lib/distributed_dsl/node.mli b/src/lib/distributed_dsl/node.mli index 78bf0464477..a1141d43d2e 100644 --- a/src/lib/distributed_dsl/node.mli +++ b/src/lib/distributed_dsl/node.mli @@ -25,7 +25,7 @@ module type Timer_intf = sig type tok [@@deriving equal] - val wait : t -> Time.Span.t -> tok * [`Cancelled | `Finished] Deferred.t + val wait : t -> Time.Span.t -> tok * [ `Cancelled | `Finished ] Deferred.t val cancel : t -> tok -> unit end @@ -111,44 +111,41 @@ module type S = sig end module type F = functor - (State :sig - - type t [@@deriving equal, sexp, yojson] - end) - (Message :sig - - type t - end) + (State : sig + type t [@@deriving equal, sexp, yojson] + end) + (Message : sig + type t + end) (Peer : Peer_intf) (Timer : Timer_intf) - (Message_label :sig - - type label [@@deriving enum, sexp] - - include Hashable.S with type t = label - end) - (Timer_label :sig - - type label [@@deriving enum, sexp] - - include Hashable.S with type t = label - end) - (Condition_label :sig - - type label [@@deriving enum, sexp, yojson] - - include Hashable.S with type t = label - end) - (Transport : - Transport_intf with type message := Message.t and type peer := Peer.t) - -> S - with type message := Message.t - and type state := State.t - and type transport := Transport.t - and type peer := Peer.t - and module Message_label := Message_label - and module Timer_label := Timer_label - and module Condition_label := Condition_label - and module Timer := Timer + (Message_label : sig + type label [@@deriving enum, sexp] + + include Hashable.S with type t = label + end) + (Timer_label : sig + type label [@@deriving enum, sexp] + + include Hashable.S with type t = label + end) + (Condition_label : sig + type label [@@deriving enum, sexp, yojson] + + include Hashable.S with type t = label + end) + (Transport : Transport_intf + with type message := Message.t + and type peer := Peer.t) + -> + S + with type message := Message.t + and type state := State.t + and type transport := Transport.t + and type peer := Peer.t + and module Message_label := Message_label + and module Timer_label := Timer_label + and module Condition_label := Condition_label + and module Timer := Timer module Make : F diff --git a/src/lib/downloader/downloader.ml b/src/lib/downloader/downloader.ml index d8860a73f01..e73be1d9764 100644 --- a/src/lib/downloader/downloader.ml +++ b/src/lib/downloader/downloader.ml @@ -5,11 +5,12 @@ open Network_peer module Job = struct type ('key, 'attempt, 'a) t = - { key: 'key - ; attempts: 'attempt Peer.Map.t - ; res: - ('a Envelope.Incoming.t * 'attempt Peer.Map.t, [`Finished]) Result.t - Ivar.t } + { key : 'key + ; attempts : 'attempt Peer.Map.t + ; res : + ('a Envelope.Incoming.t * 'attempt Peer.Map.t, [ `Finished ]) Result.t + Ivar.t + } let result t = Ivar.read t.res end @@ -21,14 +22,14 @@ let pred_to_yojson _f _x = `String "" let sexp_opaque_to_yojson _f _x = `String "" module Claimed_knowledge = struct - type 'key t = [`All | `Some of 'key list | `Call of 'key pred[@sexp.opaque]] + type 'key t = [ `All | `Some of 'key list | `Call of 'key pred [@sexp.opaque] ] [@@deriving sexp_of, to_yojson] let to_yojson f t = match t with | `Some ks -> let n = List.length ks in - if n > 5 then to_yojson (fun x -> `Int x) (`Some [n]) + if n > 5 then to_yojson (fun x -> `Int x) (`Some [ n ]) else to_yojson f t | _ -> to_yojson f t @@ -70,7 +71,7 @@ end) : sig val result : t -> ( Result.t Envelope.Incoming.t * Attempt.t Peer.Map.t - , [`Finished] ) + , [ `Finished ] ) Base.Result.t Deferred.t end @@ -83,9 +84,8 @@ end) : sig -> trust_system:Trust_system.t -> get:(Peer.t -> Key.t list -> Result.t list Deferred.Or_error.t) -> knowledge_context:Knowledge_context.t Broadcast_pipe.Reader.t - -> knowledge:( Knowledge_context.t - -> Peer.t - -> Key.t Claimed_knowledge.t Deferred.t) + -> knowledge: + (Knowledge_context.t -> Peer.t -> Key.t Claimed_knowledge.t Deferred.t) -> peers:(unit -> Peer.t list Deferred.t) -> preferred:Peer.t list -> t Deferred.t @@ -111,40 +111,42 @@ end = struct module Job = struct type t = (Key.t, Attempt.t, Result.t) Job.t - let to_yojson ({key; attempts; _} : t) : Yojson.Safe.t = + let to_yojson ({ key; attempts; _ } : t) : Yojson.Safe.t = `Assoc [ ("key", Key.to_yojson key) ; ( "attempts" , `Assoc (List.map (Map.to_alist attempts) ~f:(fun (p, a) -> - (Peer.to_multiaddr_string p, Attempt.to_yojson a) )) ) ] + (Peer.to_multiaddr_string p, Attempt.to_yojson a))) ) + ] let result = Job.result end module Make_hash_queue (Key : Hashable.S) = struct module Key_value = struct - type 'a t = {key: Key.t; mutable value: 'a} [@@deriving fields] + type 'a t = { key : Key.t; mutable value : 'a } [@@deriving fields] end (* Hash_queue would be perfect, but it doesn't expose enough for us to make sure the underlying queue is sorted by blockchain_length. *) type 'a t = - { queue: 'a Key_value.t Doubly_linked.t - ; table: 'a Key_value.t Doubly_linked.Elt.t Key.Table.t } + { queue : 'a Key_value.t Doubly_linked.t + ; table : 'a Key_value.t Doubly_linked.Elt.t Key.Table.t + } let dequeue t = - Option.map (Doubly_linked.remove_first t.queue) ~f:(fun {key; value} -> - Hashtbl.remove t.table key ; value ) + Option.map (Doubly_linked.remove_first t.queue) ~f:(fun { key; value } -> + Hashtbl.remove t.table key ; value) let enqueue t (e : _ J.t) = if Hashtbl.mem t.table e.key then `Key_already_present else - let kv = {Key_value.key= e.key; value= e} in + let kv = { Key_value.key = e.key; value = e } in let elt = match - Doubly_linked.find_elt t.queue ~f:(fun {value; _} -> - Key.compare e.key value.J.key < 0 ) + Doubly_linked.find_elt t.queue ~f:(fun { value; _ } -> + Key.compare e.key value.J.key < 0) with | None -> (* e is >= everything. Put it at the back. *) @@ -157,7 +159,7 @@ end = struct let lookup t k = Option.map (Hashtbl.find t.table k) ~f:(fun x -> - (Doubly_linked.Elt.value x).value ) + (Doubly_linked.Elt.value x).value) let remove t k = match Hashtbl.find_and_remove t.table k with @@ -170,7 +172,8 @@ end = struct let to_list t = List.map (Doubly_linked.to_list t.queue) ~f:Key_value.value - let create () = {table= Key.Table.create (); queue= Doubly_linked.create ()} + let create () = + { table = Key.Table.create (); queue = Doubly_linked.create () } end module Q = Make_hash_queue (Key) @@ -183,12 +186,15 @@ end = struct end type t = - {claimed: Key.t Claimed_knowledge.t option; tried_and_failed: Key_set.t} + { claimed : Key.t Claimed_knowledge.t option + ; tried_and_failed : Key_set.t + } [@@deriving sexp_of, to_yojson] let clear t = Hash_set.clear t.tried_and_failed - let create () = {claimed= None; tried_and_failed= Key.Hash_set.create ()} + let create () = + { claimed = None; tried_and_failed = Key.Hash_set.create () } let knows t k = if Hash_set.mem t.tried_and_failed k then `No @@ -206,8 +212,9 @@ end = struct module Preferred_heap = struct (* The preferred peers, sorted by the last time that they were useful to us. *) type t = - { heap: (Peer.t * Time.t) Pairing_heap.t - ; table: (Peer.t * Time.t) Pairing_heap.Elt.t Peer.Table.t } + { heap : (Peer.t * Time.t) Pairing_heap.t + ; table : (Peer.t * Time.t) Pairing_heap.Elt.t Peer.Table.t + } let cmp (p1, t1) (p2, t2) = (* Later is smaller *) @@ -224,17 +231,16 @@ end = struct go t.heap ; Hashtbl.clear t.table let create () = - {heap= Pairing_heap.create ~cmp (); table= Peer.Table.create ()} + { heap = Pairing_heap.create ~cmp (); table = Peer.Table.create () } let add t (p, time) = Option.iter (Hashtbl.find t.table p) ~f:(fun elt -> - Pairing_heap.remove t.heap elt ) ; + Pairing_heap.remove t.heap elt) ; Hashtbl.set t.table ~key:p ~data:(Pairing_heap.add_removable t.heap (p, time)) let sexp_of_t (t : t) = - List.sexp_of_t [%sexp_of: Peer.t * Time.t] - (Pairing_heap.to_list t.heap) + List.sexp_of_t [%sexp_of: Peer.t * Time.t] (Pairing_heap.to_list t.heap) let of_list xs = let now = Time.now () in @@ -251,19 +257,21 @@ end = struct end type t = - { downloading_peers: Peer.Hash_set.t - ; knowledge_requesting_peers: Peer.Hash_set.t - ; temporary_ignores: + { downloading_peers : Peer.Hash_set.t + ; knowledge_requesting_peers : Peer.Hash_set.t + ; temporary_ignores : ((unit, unit) Clock.Event.t[@sexp.opaque]) Peer.Table.t - ; mutable all_preferred: Preferred_heap.t - ; knowledge: Knowledge.t Peer.Table.t + ; mutable all_preferred : Preferred_heap.t + ; knowledge : Knowledge.t Peer.Table.t (* Written to when something changes. *) - ; r: (unit Strict_pipe.Reader.t[@sexp.opaque]) - ; w: + ; r : (unit Strict_pipe.Reader.t[@sexp.opaque]) + ; w : (( unit , Strict_pipe.drop_head Strict_pipe.buffered , unit ) - Strict_pipe.Writer.t[@sexp.opaque]) } + Strict_pipe.Writer.t + [@sexp.opaque]) + } [@@deriving sexp_of] let reset_knowledge t ~all_peers = @@ -271,10 +279,10 @@ end = struct Preferred_heap.clear t.all_preferred ; Hashtbl.filter_mapi_inplace t.knowledge ~f:(fun ~key:p ~data:k -> Hash_set.clear k.tried_and_failed ; - if Set.mem all_peers p then Some {k with claimed= None} else None ) ; + if Set.mem all_peers p then Some { k with claimed = None } else None) ; Set.iter all_peers ~f:(fun p -> if not (Hashtbl.mem t.knowledge p) then - Hashtbl.add_exn t.knowledge ~key:p ~data:(Knowledge.create ()) ) ; + Hashtbl.add_exn t.knowledge ~key:p ~data:(Knowledge.create ())) ; Strict_pipe.Writer.write t.w () let to_yojson @@ -283,21 +291,22 @@ end = struct ; knowledge_requesting_peers ; temporary_ignores ; downloading_peers - ; r= _ - ; w= _ } = + ; r = _ + ; w = _ + } = let list xs = - `Assoc [("length", `Int (List.length xs)); ("elts", `List xs)] + `Assoc [ ("length", `Int (List.length xs)); ("elts", `List xs) ] in let f q = Knowledge.to_yojson q in `Assoc [ ( "all" , `Assoc (List.map (Hashtbl.to_alist knowledge) ~f:(fun (p, s) -> - (Peer.to_multiaddr_string p, f s) )) ) + (Peer.to_multiaddr_string p, f s))) ) ; ( "preferred" , `List (List.map (Preferred_heap.to_list all_preferred) ~f:(fun p -> - `String (Peer.to_multiaddr_string p) )) ) + `String (Peer.to_multiaddr_string p))) ) ; ( "temporary_ignores" , list (List.map ~f:Peer.to_yojson (Hashtbl.keys temporary_ignores)) ) @@ -308,7 +317,8 @@ end = struct ; ( "knowledge_requesting_peers" , list (List.map ~f:Peer.to_yojson - (Hash_set.to_list knowledge_requesting_peers)) ) ] + (Hash_set.to_list knowledge_requesting_peers)) ) + ] let create ~preferred ~all_peers = let knowledge = @@ -320,24 +330,26 @@ end = struct Strict_pipe.create ~name:"useful_peers-available" ~warn_on_drop:false (Buffered (`Capacity 0, `Overflow Drop_head)) in - { downloading_peers= Peer.Hash_set.create () - ; knowledge_requesting_peers= Peer.Hash_set.create () - ; temporary_ignores= Peer.Table.create () + { downloading_peers = Peer.Hash_set.create () + ; knowledge_requesting_peers = Peer.Hash_set.create () + ; temporary_ignores = Peer.Table.create () ; knowledge ; r ; w - ; all_preferred= Preferred_heap.of_list preferred } + ; all_preferred = Preferred_heap.of_list preferred + } let tear_down { downloading_peers ; temporary_ignores ; knowledge_requesting_peers ; knowledge - ; r= _ + ; r = _ ; w - ; all_preferred } = + ; all_preferred + } = Hashtbl.iter temporary_ignores ~f:(fun e -> - Clock.Event.abort_if_possible e () ) ; + Clock.Event.abort_if_possible e ()) ; Hashtbl.clear temporary_ignores ; Hash_set.clear downloading_peers ; Hash_set.clear knowledge_requesting_peers ; @@ -347,11 +359,11 @@ end = struct Strict_pipe.Writer.close w module Knowledge_summary = struct - type t = {no_information: int; no: int; claims_to: int} + type t = { no_information : int; no : int; claims_to : int } [@@deriving fields] (* Score needs revising -- should be more lexicographic *) - let score {no_information; no= _; claims_to} = + let score { no_information; no = _; claims_to } = Float.of_int claims_to +. (0.1 *. Float.of_int no_information) end @@ -359,10 +371,10 @@ end = struct Sequence.fold xs ~init:[] ~f:(fun acc x -> match acc with | [] -> - [x] + [ x ] | best :: _ -> let c = compare best x in - if c = 0 then x :: acc else if c < 0 then [x] else acc ) + if c = 0 then x :: acc else if c < 0 then [ x ] else acc) |> List.rev let useful_peer t ~pending_jobs = @@ -373,10 +385,10 @@ end = struct | None -> acc | Some k -> - (p, k) :: acc )) + (p, k) :: acc)) @ Hashtbl.fold t.knowledge ~init:[] ~f:(fun ~key:p ~data:k acc -> if not (Preferred_heap.mem t.all_preferred p) then (p, k) :: acc - else acc ) + else acc) in (* Algorithm: @@ -397,7 +409,7 @@ end = struct | `Claims_to -> Some ((p, k), `Claims_to) | `No_information -> - Some ((p, k), `No_information) ) + Some ((p, k), `No_information)) |> maxes ~compare:(fun (_, c1) (_, c2) -> match (c1, c2) with | `Claims_to, `Claims_to | `No_information, `No_information @@ -406,7 +418,7 @@ end = struct | `Claims_to, `No_information -> 1 | `No_information, `Claims_to -> - -1 ) + -1) |> List.map ~f:fst in let ts = @@ -421,7 +433,10 @@ end = struct let summary, js = List.fold ~init: - ( {Knowledge_summary.no_information= 0; no= 0; claims_to= 0} + ( { Knowledge_summary.no_information = 0 + ; no = 0 + ; claims_to = 0 + } , [] ) pending_jobs ~f:(fun (acc, js) j -> @@ -435,9 +450,9 @@ end = struct | `No_information -> (no_information, j :: js) in - (Field.map field acc ~f:(( + ) 1), js) ) + (Field.map field acc ~f:(( + ) 1), js)) in - ((p, List.rev js), Knowledge_summary.score summary) ) + ((p, List.rev js), Knowledge_summary.score summary)) in let useful_exists = List.exists knowledge ~f:(fun (_, s) -> Float.(s > 0.)) @@ -447,7 +462,7 @@ end = struct (List.filter knowledge ~f:(fun ((p, _), _) -> (not (Hashtbl.mem t.temporary_ignores p)) && (not (Hash_set.mem t.downloading_peers p)) - && not (Hash_set.mem t.knowledge_requesting_peers p) )) + && not (Hash_set.mem t.knowledge_requesting_peers p))) ~compare:(fun (_, s1) (_, s2) -> Float.compare s1 s2) in match best with @@ -457,34 +472,39 @@ end = struct if Float.(score <= 0.) then `Stalled else `Useful (p, k) type update = - | Refreshed_peers of {all_peers: Peer.Set.t} + | Refreshed_peers of { all_peers : Peer.Set.t } | Download_finished of - Peer.t * [`Successful of Key.t list] * [`Unsuccessful of Key.t list] + Peer.t + * [ `Successful of Key.t list ] + * [ `Unsuccessful of Key.t list ] | Download_starting of Peer.t | Job_cancelled of Key.t - | Add_knowledge of {peer: Peer.t; claimed: Key.t list; out_of_band: bool} + | Add_knowledge of + { peer : Peer.t; claimed : Key.t list; out_of_band : bool } | Knowledge_request_starting of Peer.t | Knowledge of - { peer: Peer.t - ; claimed: Key.t Claimed_knowledge.t - ; active_jobs: Job.t list - ; out_of_band: bool } + { peer : Peer.t + ; claimed : Key.t Claimed_knowledge.t + ; active_jobs : Job.t list + ; out_of_band : bool + } let jobs_no_longer_needed t ks = Hashtbl.iter t.knowledge ~f:(fun s -> - List.iter ks ~f:(Hash_set.remove s.tried_and_failed) ) + List.iter ks ~f:(Hash_set.remove s.tried_and_failed)) let ignore_period = Time.Span.of_min 2. let update t u = match u with - | Add_knowledge {peer; claimed; out_of_band} -> + | Add_knowledge { peer; claimed; out_of_band } -> if not out_of_band then Hash_set.remove t.knowledge_requesting_peers peer ; Hashtbl.update t.knowledge peer ~f:(function | None -> - { Knowledge.claimed= Some (`Some claimed) - ; tried_and_failed= Key.Hash_set.create () } + { Knowledge.claimed = Some (`Some claimed) + ; tried_and_failed = Key.Hash_set.create () + } | Some k -> let claimed = match k.claimed with @@ -500,10 +520,10 @@ end = struct let s = Key.Hash_set.of_list claimed in `Call (fun key -> f key || Hash_set.mem s key) in - {k with claimed= Some claimed} ) + { k with claimed = Some claimed }) | Knowledge_request_starting peer -> Hash_set.add t.knowledge_requesting_peers peer - | Knowledge {peer; claimed; active_jobs; out_of_band} -> + | Knowledge { peer; claimed; active_jobs; out_of_band } -> if not out_of_band then Hash_set.remove t.knowledge_requesting_peers peer ; let tried_and_failed = @@ -511,7 +531,7 @@ end = struct match Hashtbl.find t.knowledge peer with | None -> Key.Hash_set.create () - | Some {tried_and_failed; _} -> + | Some { tried_and_failed; _ } -> tried_and_failed in List.iter active_jobs ~f:(fun j -> @@ -519,20 +539,18 @@ end = struct | None -> () | Some a -> - if not (Attempt.worth_retrying a) then Hash_set.add s j.key - ) ; + if not (Attempt.worth_retrying a) then Hash_set.add s j.key) ; s in Hashtbl.set t.knowledge ~key:peer - ~data:{Knowledge.claimed= Some claimed; tried_and_failed} + ~data:{ Knowledge.claimed = Some claimed; tried_and_failed } | Job_cancelled h -> - jobs_no_longer_needed t [h] ; + jobs_no_longer_needed t [ h ] ; Hashtbl.iter t.knowledge ~f:(fun s -> - Hash_set.remove s.tried_and_failed h ) + Hash_set.remove s.tried_and_failed h) | Download_starting peer -> Hash_set.add t.downloading_peers peer - | Download_finished (peer0, `Successful succs, `Unsuccessful unsuccs) - -> ( + | Download_finished (peer0, `Successful succs, `Unsuccessful unsuccs) -> ( (let cancel = Option.iter ~f:(fun e -> Clock.Event.abort_if_possible e ()) in @@ -543,8 +561,8 @@ end = struct (fun () -> Hashtbl.remove t.temporary_ignores peer0 ; if not (Strict_pipe.Writer.is_closed t.w) then - Strict_pipe.Writer.write t.w () ) - () ) + Strict_pipe.Writer.write t.w ()) + ()) else ( Hashtbl.find_and_remove t.temporary_ignores peer0 |> cancel ; Preferred_heap.add t.all_preferred (peer0, Time.now ()) )) ; @@ -553,16 +571,17 @@ end = struct match Hashtbl.find t.knowledge peer0 with | None -> () - | Some {tried_and_failed; claimed= _} -> + | Some { tried_and_failed; claimed = _ } -> List.iter unsuccs ~f:(Hash_set.add tried_and_failed) ) - | Refreshed_peers {all_peers} -> + | Refreshed_peers { all_peers } -> Hashtbl.filter_keys_inplace t.knowledge ~f:(Set.mem all_peers) ; Set.iter all_peers ~f:(fun p -> if not (Hashtbl.mem t.knowledge p) then Hashtbl.add_exn t.knowledge ~key:p ~data: - { Knowledge.claimed= None - ; tried_and_failed= Key.Hash_set.create () } ) + { Knowledge.claimed = None + ; tried_and_failed = Key.Hash_set.create () + }) let update t u : unit = update t u ; @@ -571,32 +590,33 @@ end = struct end type t = - { mutable next_flush: (unit, unit) Clock.Event.t option - ; mutable all_peers: Peer.Set.t - ; pending: Job.t Q.t - ; downloading: (Peer.t * Job.t * Time.t) Key.Table.t - ; useful_peers: Useful_peers.t - ; flush_r: unit Strict_pipe.Reader.t (* Single reader *) - ; flush_w: + { mutable next_flush : (unit, unit) Clock.Event.t option + ; mutable all_peers : Peer.Set.t + ; pending : Job.t Q.t + ; downloading : (Peer.t * Job.t * Time.t) Key.Table.t + ; useful_peers : Useful_peers.t + ; flush_r : unit Strict_pipe.Reader.t (* Single reader *) + ; flush_w : ( unit , Strict_pipe.drop_head Strict_pipe.buffered , unit ) Strict_pipe.Writer.t (* buffer of length 0 *) - ; get: Peer.t -> Key.t list -> Result.t list Deferred.Or_error.t - ; max_batch_size: int + ; get : Peer.t -> Key.t list -> Result.t list Deferred.Or_error.t + ; max_batch_size : int (* A peer is useful if there is a job in the pending queue which has not - been attempted with that peer. *) - ; got_new_peers_w: + been attempted with that peer. *) + ; got_new_peers_w : ( unit , Strict_pipe.drop_head Strict_pipe.buffered , unit ) Strict_pipe.Writer.t (* buffer of length 0 *) - ; got_new_peers_r: unit Strict_pipe.Reader.t - ; logger: Logger.t - ; trust_system: Trust_system.t - ; stop: unit Deferred.t } + ; got_new_peers_r : unit Strict_pipe.Reader.t + ; logger : Logger.t + ; trust_system : Trust_system.t + ; stop : unit Deferred.t + } let total_jobs (t : t) = Q.length t.pending + Hashtbl.length t.downloading @@ -607,7 +627,8 @@ end = struct [ Q.to_list t.pending |> List.map ~f:(fun j -> j.key) |> Key.Set.of_list - ; Key.Set.of_hashtbl_keys t.downloading ]) + ; Key.Set.of_hashtbl_keys t.downloading + ]) |> [%test_eq: int] (total_jobs t) let check_invariant_r = ref check_invariant @@ -620,20 +641,20 @@ end = struct try !check_invariant_r t with e -> [%log' debug t.logger] - ~metadata:[("exn", `String (Exn.to_string e))] + ~metadata:[ ("exn", `String (Exn.to_string e)) ] "job_finished $exn" let kill_job _t j = Ivar.fill_if_empty j.J.res (Error `Finished) let flush_soon t = Option.iter t.next_flush ~f:(fun e -> Clock.Event.abort_if_possible e ()) ; - t.next_flush - <- Some - (Clock.Event.run_after max_wait - (fun () -> - if not (Strict_pipe.Writer.is_closed t.flush_w) then - Strict_pipe.Writer.write t.flush_w () ) - ()) + t.next_flush <- + Some + (Clock.Event.run_after max_wait + (fun () -> + if not (Strict_pipe.Writer.is_closed t.flush_w) then + Strict_pipe.Writer.write t.flush_w ()) + ()) let cancel t h = let job = @@ -655,7 +676,7 @@ end = struct let enqueue t e = Q.enqueue t.pending e let enqueue_exn t e = - assert ([%equal: [`Ok | `Key_already_present]] (enqueue t e) `Ok) + assert ([%equal: [ `Ok | `Key_already_present ]] (enqueue t e) `Ok) let active_jobs t = Q.to_list t.pending @@ -666,30 +687,31 @@ end = struct let peers' = Peer.Set.of_list peers in let new_peers = Set.diff peers' t.all_peers in Useful_peers.update t.useful_peers - (Refreshed_peers {all_peers= peers'}) ; + (Refreshed_peers { all_peers = peers' }) ; if (not (Set.is_empty new_peers)) && not (Strict_pipe.Writer.is_closed t.got_new_peers_w) then Strict_pipe.Writer.write t.got_new_peers_w () ; t.all_peers <- Peer.Set.of_list peers ; - Deferred.unit ) + Deferred.unit) |> don't_wait_for let tear_down ( { next_flush - ; all_peers= _ + ; all_peers = _ ; flush_w - ; get= _ + ; get = _ ; got_new_peers_w - ; flush_r= _ + ; flush_r = _ ; useful_peers - ; got_new_peers_r= _ + ; got_new_peers_r = _ ; pending ; downloading - ; max_batch_size= _ - ; logger= _ - ; trust_system= _ - ; stop= _ } as t ) = + ; max_batch_size = _ + ; logger = _ + ; trust_system = _ + ; stop = _ + } as t ) = let rec clear_queue q = match Q.dequeue q with | None -> @@ -712,7 +734,8 @@ end = struct ( ("length", `Int n) :: ( if n > 8 then [] - else [("elts", `List (List.map xs ~f:(fun j -> Key.to_yojson j.J.key)))] + else + [ ("elts", `List (List.map xs ~f:(fun j -> Key.to_yojson j.J.key))) ] ) ) in let keys = List.map xs ~f:(fun x -> x.J.key) in @@ -720,7 +743,10 @@ end = struct let e = Error.to_string_hum e in [%log' debug t.logger] "Downloading from $peer failed ($error) on $keys" ~metadata: - [("peer", Peer.to_yojson peer); ("error", `String e); ("keys", f xs)] ; + [ ("peer", Peer.to_yojson peer) + ; ("error", `String e) + ; ("keys", f xs) + ] ; if Option.is_some punish then (* TODO: Make this an insta ban *) Trust_system.( @@ -730,12 +756,12 @@ end = struct List.iter xs ~f:(fun x -> enqueue_exn t { x with - attempts= Map.set x.attempts ~key:peer ~data:Attempt.download } - ) ; + attempts = Map.set x.attempts ~key:peer ~data:Attempt.download + }) ; flush_soon t in List.iter xs ~f:(fun x -> - Hashtbl.set t.downloading ~key:x.key ~data:(peer, x, Time.now ()) ) ; + Hashtbl.set t.downloading ~key:x.key ~data:(peer, x, Time.now ())) ; Useful_peers.update t.useful_peers (Download_starting peer) ; let download_deferred = t.get peer keys in upon download_deferred (fun res -> @@ -748,14 +774,13 @@ end = struct let succ = List.filter_map rs ~f:(fun r -> let k = Result.key r in - if Hash_set.mem all k then Some k else None ) + if Hash_set.mem all k then Some k else None) in List.iter succ ~f:(Hash_set.remove all) ; (succ, Hash_set.to_list all) in Useful_peers.update t.useful_peers - (Download_finished (peer, `Successful succs, `Unsuccessful unsuccs)) - ) ; + (Download_finished (peer, `Successful succs, `Unsuccessful unsuccs))) ; let%map res = Deferred.choose [ Deferred.choice download_deferred (fun x -> `Not_stopped x) @@ -763,54 +788,58 @@ end = struct ; Deferred.choice (* This happens if all the jobs are cancelled. *) (Deferred.List.map xs ~f:(fun x -> Ivar.read x.res)) - (fun _ -> `Stopped) ] + (fun _ -> `Stopped) + ] in List.iter xs ~f:(fun j -> Hashtbl.remove t.downloading j.key) ; match res with | `Stopped -> List.iter xs ~f:(kill_job t) | `Not_stopped r -> ( - match r with - | Error e -> - fail e - | Ok rs -> - [%log' debug t.logger] "result is $result" - ~metadata: - [ ("result", f xs) - ; ("peer", `String (Peer.to_multiaddr_string peer)) ] ; - let received_at = Time.now () in - let jobs = - Key.Table.of_alist_exn (List.map xs ~f:(fun x -> (x.key, x))) - in - List.iter rs ~f:(fun r -> - match Hashtbl.find jobs (Result.key r) with - | None -> - (* Got something we didn't ask for. *) - Trust_system.( - record t.trust_system t.logger peer - Actions.(Violated_protocol, None)) - |> don't_wait_for - | Some j -> - Hashtbl.remove jobs j.key ; - job_finished t j - (Ok - ( { Envelope.Incoming.data= r - ; received_at - ; sender= Remote peer } - , j.attempts )) ) ; - (* Anything left in jobs, we did not get results for :( *) - Hashtbl.iter jobs ~f:(fun x -> - Hashtbl.remove t.downloading x.J.key ; - enqueue_exn t - { x with - attempts= Map.set x.attempts ~key:peer ~data:Attempt.download - } ) ; - flush_soon t ) + match r with + | Error e -> + fail e + | Ok rs -> + [%log' debug t.logger] "result is $result" + ~metadata: + [ ("result", f xs) + ; ("peer", `String (Peer.to_multiaddr_string peer)) + ] ; + let received_at = Time.now () in + let jobs = + Key.Table.of_alist_exn (List.map xs ~f:(fun x -> (x.key, x))) + in + List.iter rs ~f:(fun r -> + match Hashtbl.find jobs (Result.key r) with + | None -> + (* Got something we didn't ask for. *) + Trust_system.( + record t.trust_system t.logger peer + Actions.(Violated_protocol, None)) + |> don't_wait_for + | Some j -> + Hashtbl.remove jobs j.key ; + job_finished t j + (Ok + ( { Envelope.Incoming.data = r + ; received_at + ; sender = Remote peer + } + , j.attempts ))) ; + (* Anything left in jobs, we did not get results for :( *) + Hashtbl.iter jobs ~f:(fun x -> + Hashtbl.remove t.downloading x.J.key ; + enqueue_exn t + { x with + attempts = + Map.set x.attempts ~key:peer ~data:Attempt.download + }) ; + flush_soon t ) let to_yojson t : Yojson.Safe.t = check_invariant t ; let list xs = - `Assoc [("length", `Int (List.length xs)); ("elts", `List xs)] + `Assoc [ ("length", `Int (List.length xs)); ("elts", `List xs) ] in let now = Time.now () in let f q = list (List.map ~f:Job.to_yojson (Q.to_list q)) in @@ -828,7 +857,9 @@ end = struct ; ( "time_since_start" , `String (Time.Span.to_string_hum (Time.diff now start)) ) - ; ("peer", `String (Peer.to_multiaddr_string p)) ] )) ) ] + ; ("peer", `String (Peer.to_multiaddr_string p)) + ])) ) + ] let post_stall_retry_delay = Time.Span.of_min 1. @@ -860,7 +891,7 @@ end = struct (Strict_pipe.Reader.to_linear_pipe p).pipe [%here] in match%bind - Deferred.choose [read t.flush_r; read t.useful_peers.r] + Deferred.choose [ read t.flush_r; read t.useful_peers.r ] with | `Eof -> [%log' debug t.logger] "Downloader: flush eof" ; @@ -870,8 +901,8 @@ end = struct step t ) | `Stalled -> [%log' debug t.logger] - "Downloader: all stalled. Resetting knowledge, waiting %s and \ - then retrying." + "Downloader: all stalled. Resetting knowledge, waiting %s and then \ + retrying." (Time.Span.to_string_hum post_stall_retry_delay) ; Useful_peers.reset_knowledge t.useful_peers ~all_peers:t.all_peers ; let%bind () = after post_stall_retry_delay in @@ -882,7 +913,8 @@ end = struct [%log' debug t.logger] "Downloader: downloading $n from $peer" ~metadata: [ ("n", `Int (List.length to_download)) - ; ("peer", Peer.to_yojson peer) ] ; + ; ("peer", Peer.to_yojson peer) + ] ; List.iter to_download ~f:(fun j -> Q.remove t.pending j.key) ; match to_download with | [] -> @@ -893,11 +925,12 @@ end = struct let add_knowledge t peer claimed = Useful_peers.update t.useful_peers - (Add_knowledge {peer; claimed; out_of_band= true}) + (Add_knowledge { peer; claimed; out_of_band = true }) let update_knowledge t peer claimed = Useful_peers.update t.useful_peers - (Knowledge {peer; claimed; active_jobs= active_jobs t; out_of_band= true}) + (Knowledge + { peer; claimed; active_jobs = active_jobs t; out_of_band = true }) let mark_preferred t peer ~now = Useful_peers.Preferred_heap.add t.useful_peers.all_preferred (peer, now) @@ -912,20 +945,21 @@ end = struct let flush_r, flush_w = pipe ~name:"flush" 0 in let got_new_peers_r, got_new_peers_w = pipe ~name:"got_new_peers" 0 in let t = - { all_peers= Peer.Set.of_list all_peers - ; pending= Q.create () - ; next_flush= None + { all_peers = Peer.Set.of_list all_peers + ; pending = Q.create () + ; next_flush = None ; flush_r ; flush_w ; got_new_peers_r ; got_new_peers_w - ; useful_peers= Useful_peers.create ~all_peers ~preferred + ; useful_peers = Useful_peers.create ~all_peers ~preferred ; get ; max_batch_size - ; logger= Logger.create () + ; logger = Logger.create () ; trust_system - ; downloading= Key.Table.create () - ; stop } + ; downloading = Key.Table.create () + ; stop + } in let peers = let r, w = Broadcast_pipe.create [] in @@ -934,7 +968,7 @@ end = struct peers () >>= fun ps -> try Broadcast_pipe.Writer.write w ps - with Broadcast_pipe.Already_closed _ -> Deferred.unit ) ; + with Broadcast_pipe.Already_closed _ -> Deferred.unit) ; r in let deferred_ok = Deferred.return `Ok in @@ -953,7 +987,8 @@ end = struct ; choice (Pipe.values_available (Strict_pipe.Reader.to_linear_pipe t.useful_peers.r).pipe) - Fn.id ] + Fn.id + ] with | `Eof -> deferred_finished @@ -973,7 +1008,7 @@ end = struct | `Ok -> if not (Strict_pipe.Writer.is_closed request_w) then Strict_pipe.Writer.write request_w peer - else Deferred.unit ) + else Deferred.unit) in let ps : unit Ivar.t Peer.Table.t = Peer.Table.create () in Broadcast_pipe.Reader.iter peers ~f:(fun peers -> @@ -981,13 +1016,13 @@ end = struct Hashtbl.filteri_inplace ps ~f:(fun ~key:p ~data:finished -> let keep = Hash_set.mem peers p in if not keep then Ivar.fill_if_empty finished () ; - keep ) ; + keep) ; Hash_set.iter peers ~f:(fun p -> if not (Hashtbl.mem ps p) then ( let finished = Ivar.create () in refresh_knowledge (Ivar.read finished) p ; - Hashtbl.add_exn ps ~key:p ~data:finished ) ) ; - Deferred.unit ) + Hashtbl.add_exn ps ~key:p ~data:finished )) ; + Deferred.unit) |> don't_wait_for ; let throttle = Throttle.create ~continue_on_error:true ~max_concurrent_jobs:8 @@ -1000,8 +1035,8 @@ end = struct Ivar.fill_if_empty finished () ; let finished = Ivar.create () in refresh_knowledge (Ivar.read finished) p ; - finished ) ; - Deferred.unit ) + finished) ; + Deferred.unit) |> don't_wait_for ; Strict_pipe.Reader.iter request_r ~f:(fun peer -> (* TODO: The pipe/clock logic is not quite right, but it is good enough. *) @@ -1016,18 +1051,19 @@ end = struct let%map k = get_knowledge ctx peer in Useful_peers.update t.useful_peers (Knowledge - { out_of_band= false + { out_of_band = false ; peer - ; claimed= k - ; active_jobs= active_jobs t })) ) + ; claimed = k + ; active_jobs = active_jobs t + }))) |> don't_wait_for in don't_wait_for (step t) ; upon stop (fun () -> tear_down t) ; every ~stop (Time.Span.of_sec 30.) (fun () -> [%log' debug t.logger] - ~metadata:[("jobs", to_yojson t)] - "Downloader jobs" ) ; + ~metadata:[ ("jobs", to_yojson t) ] + "Downloader jobs") ; refresh_peers t peers ; t @@ -1041,6 +1077,6 @@ end = struct x | None, None -> flush_soon t ; - let e = {J.key; attempts; res= Ivar.create ()} in + let e = { J.key; attempts; res = Ivar.create () } in enqueue_exn t e ; e end diff --git a/src/lib/empty_hashes/empty_hashes.ml b/src/lib/empty_hashes/empty_hashes.ml index 6796eae1452..2fb29f38349 100644 --- a/src/lib/empty_hashes/empty_hashes.ml +++ b/src/lib/empty_hashes/empty_hashes.ml @@ -17,10 +17,10 @@ let cache hash_mod ~init_hash depth = if Int.equal i 0 then !last_hash else ( last_hash := merge_hash hash_mod (i - 1) !last_hash ; - !last_hash ) ) + !last_hash )) let extensible_cache hash_mod ~init_hash = - let empty_hashes = ref [|init_hash|] in + let empty_hashes = ref [| init_hash |] in fun i -> let prev = !empty_hashes in let height = Array.length prev - 1 in @@ -31,5 +31,5 @@ let extensible_cache hash_mod ~init_hash = Array.append prev (Array.init deficit ~f:(fun i -> last_hash := merge_hash hash_mod (i + height) !last_hash ; - !last_hash )) ) ; + !last_hash)) ) ; !empty_hashes.(i) diff --git a/src/lib/error_json/error_json.ml b/src/lib/error_json/error_json.ml index 276c99b9af4..83916212fed 100644 --- a/src/lib/error_json/error_json.ml +++ b/src/lib/error_json/error_json.ml @@ -19,7 +19,7 @@ let rec sexp_of_yojson (json : Yojson.Safe.t) : (Sexp.t, string) Result.t = | Ok sexp -> Continue (sexp :: sexps) | Error str -> - Stop (Error str) ) + Stop (Error str)) in Result.map ~f:(fun l -> Sexp.List (List.rev l)) rev_sexps | _ -> @@ -34,37 +34,39 @@ type info_data = (* Used to encode sub-lists of infos *) type info_tag = - {tag: string; data: Sexp.t option; loc: Source_code_position.t option} + { tag : string; data : Sexp.t option; loc : Source_code_position.t option } type 'a info_repr = - {base: 'a; rev_tags: info_tag list; backtrace: string option} + { base : 'a; rev_tags : info_tag list; backtrace : string option } let info_repr_to_yojson (info : info_data info_repr) : Yojson.Safe.t = let base_pairs = match info.base with | Sexp sexp -> - [("sexp", sexp_to_yojson sexp)] + [ ("sexp", sexp_to_yojson sexp) ] | String str -> - [("string", `String str)] + [ ("string", `String str) ] | Exn exn -> [ ( "exn_name" - , `String Stdlib.Obj.Extension_constructor.(name @@ of_val exn)) - ; ("exn", sexp_to_yojson (Sexplib.Conv.sexp_of_exn exn)) ] + , `String Stdlib.Obj.Extension_constructor.(name @@ of_val exn) ) + ; ("exn", sexp_to_yojson (Sexplib.Conv.sexp_of_exn exn)) + ] | Of_list (Some trunc_after, length, json) -> [ ("multiple", json) ; ("length", `Int length) - ; ("truncated_after", `Int trunc_after) ] + ; ("truncated_after", `Int trunc_after) + ] | Of_list (None, length, json) -> - [("multiple", json); ("length", `Int length)] + [ ("multiple", json); ("length", `Int length) ] in let tags = - let tag_to_json {tag; data; loc} = + let tag_to_json { tag; data; loc } = let jsons = match loc with | None -> [] | Some loc -> - [("loc", `String (Source_code_position.to_string loc))] + [ ("loc", `String (Source_code_position.to_string loc)) ] in let jsons = match data with @@ -79,7 +81,7 @@ let info_repr_to_yojson (info : info_data info_repr) : Yojson.Safe.t = | [] -> [] | _ :: _ -> - [("tags", `List (List.rev_map ~f:tag_to_json info.rev_tags))] + [ ("tags", `List (List.rev_map ~f:tag_to_json info.rev_tags)) ] in let backtrace = match info.backtrace with @@ -90,7 +92,8 @@ let info_repr_to_yojson (info : info_data info_repr) : Yojson.Safe.t = [ ( "backtrace" , `List (List.map ~f:(fun s -> `String s) (String.split_lines backtrace)) - ) ] + ) + ] in `Assoc (base_pairs @ tags @ backtrace) @@ -103,23 +106,26 @@ let rec info_internal_repr_to_yojson_aux (info : Info.Internal_repr.t) (acc : unit info_repr) : info_data info_repr = match info with | Could_not_construct sexp -> - {acc with base= Sexp (List [Atom "Could_not_construct"; sexp])} + { acc with base = Sexp (List [ Atom "Could_not_construct"; sexp ]) } | Sexp sexp -> - {acc with base= Sexp sexp} + { acc with base = Sexp sexp } | String str -> - {acc with base= String str} + { acc with base = String str } | Exn exn -> - {acc with base= Exn exn} + { acc with base = Exn exn } | Tag_sexp (tag, sexp, loc) -> { acc with - base= Sexp sexp - ; rev_tags= {tag; data= None; loc} :: acc.rev_tags } + base = Sexp sexp + ; rev_tags = { tag; data = None; loc } :: acc.rev_tags + } | Tag_t (tag, info) -> info_internal_repr_to_yojson_aux info - {acc with rev_tags= {tag; data= None; loc= None} :: acc.rev_tags} + { acc with rev_tags = { tag; data = None; loc = None } :: acc.rev_tags } | Tag_arg (tag, data, info) -> info_internal_repr_to_yojson_aux info - {acc with rev_tags= {tag; data= Some data; loc= None} :: acc.rev_tags} + { acc with + rev_tags = { tag; data = Some data; loc = None } :: acc.rev_tags + } | Of_list (trunc_after, infos) -> let rec rev_take i acc_len infos acc_infos = match (i, infos) with @@ -140,14 +146,14 @@ let rec info_internal_repr_to_yojson_aux (info : Info.Internal_repr.t) rev_take trunc_after 0 infos [] in let json_infos = `List (List.rev rev_json_infos) in - {acc with base= Of_list (trunc_after, length, json_infos)} + { acc with base = Of_list (trunc_after, length, json_infos) } | With_backtrace (info, backtrace) -> - info_internal_repr_to_yojson_aux info {acc with backtrace= Some backtrace} + info_internal_repr_to_yojson_aux info + { acc with backtrace = Some backtrace } -and info_internal_repr_to_yojson (info : Info.Internal_repr.t) : Yojson.Safe.t - = +and info_internal_repr_to_yojson (info : Info.Internal_repr.t) : Yojson.Safe.t = info_internal_repr_to_yojson_aux info - {base= (); rev_tags= []; backtrace= None} + { base = (); rev_tags = []; backtrace = None } |> info_repr_to_yojson let info_to_yojson (info : Info.t) : Yojson.Safe.t = @@ -158,4 +164,4 @@ let error_to_yojson (err : Error.t) : Yojson.Safe.t = | `Assoc assocs -> `Assoc (("commit_id", `String Mina_version.commit_id) :: assocs) | json -> - `Assoc [("commit_id", `String Mina_version.commit_id); ("error", json)] + `Assoc [ ("commit_id", `String Mina_version.commit_id); ("error", json) ] diff --git a/src/lib/exit_handlers/exit_handlers.ml b/src/lib/exit_handlers/exit_handlers.ml index f45d9d6e0a7..eb9135f4197 100644 --- a/src/lib/exit_handlers/exit_handlers.ml +++ b/src/lib/exit_handlers/exit_handlers.ml @@ -7,17 +7,18 @@ open Async_unix (* register a thunk to be called at exit; log registration and execution *) let register_handler ~logger ~description (f : unit -> unit) = [%log info] "Registering exit handler: $description" - ~metadata:[("description", `String description)] ; + ~metadata:[ ("description", `String description) ] ; let logging_thunk () = [%log info] "Running exit handler: $description" - ~metadata:[("description", `String description)] ; + ~metadata:[ ("description", `String description) ] ; (* if there's an exception, log it, allow other handlers to run *) try f () with exn -> [%log info] "When running exit handler: $description, got exception $exn" ~metadata: [ ("description", `String description) - ; ("exn", `String (Exn.to_string exn)) ] + ; ("exn", `String (Exn.to_string exn)) + ] in Stdlib.at_exit logging_thunk @@ -25,10 +26,10 @@ let register_handler ~logger ~description (f : unit -> unit) = let register_async_shutdown_handler ~logger ~description (f : unit -> unit Deferred.t) = [%log info] "Registering async shutdown handler: $description" - ~metadata:[("description", `String description)] ; + ~metadata:[ ("description", `String description) ] ; let logging_thunk () = [%log info] "Running async shutdown handler: $description" - ~metadata:[("description", `String description)] ; + ~metadata:[ ("description", `String description) ] ; let open Deferred.Let_syntax in let%map () = match%map Monitor.try_with ~here:[%here] ~extract_exn:true f with @@ -40,7 +41,8 @@ let register_async_shutdown_handler ~logger ~description $exn" ~metadata: [ ("description", `String description) - ; ("exn", `String (Exn.to_string exn)) ] + ; ("exn", `String (Exn.to_string exn)) + ] in () in diff --git a/src/lib/fake_network/fake_network.ml b/src/lib/fake_network/fake_network.ml index 8b2d78bac93..da582b019e9 100644 --- a/src/lib/fake_network/fake_network.ml +++ b/src/lib/fake_network/fake_network.ml @@ -11,15 +11,20 @@ type 'n num_peers = 'n Peano.gt_1 (* TODO: make transition frontier a mutable option *) type peer_state = - { frontier: Transition_frontier.t - ; consensus_local_state: Consensus.Data.Local_state.t } + { frontier : Transition_frontier.t + ; consensus_local_state : Consensus.Data.Local_state.t + } type peer_network = - {peer: Network_peer.Peer.t; state: peer_state; network: Mina_networking.t} + { peer : Network_peer.Peer.t + ; state : peer_state + ; network : Mina_networking.t + } type nonrec 'n t = - { fake_gossip_network: Gossip_net.Fake.network - ; peer_networks: (peer_network, 'n) Vect.t } + { fake_gossip_network : Gossip_net.Fake.network + ; peer_networks : (peer_network, 'n) Vect.t + } constraint 'n = _ num_peers module Constants = struct @@ -46,7 +51,7 @@ let setup (type n) ?(logger = Logger.null ()) (Peer.Id.unsafe_of_string (sprintf "fake peer at port %d" libp2p_port)) in - ((Int32.( + ) Int32.one ip, libp2p_port + 1), peer) ) + ((Int32.( + ) Int32.one ip, libp2p_port + 1), peer)) in let fake_gossip_network = Gossip_net.Fake.create_network (Vect.to_list peers) @@ -57,17 +62,20 @@ let setup (type n) ?(logger = Logger.null ()) ; trust_system ; time_controller ; consensus_local_state - ; is_seed= Vect.is_empty peers - ; genesis_ledger_hash= + ; is_seed = Vect.is_empty peers + ; genesis_ledger_hash = Ledger.merkle_root (Lazy.force (Precomputed_values.genesis_ledger precomputed_values)) - ; constraint_constants= precomputed_values.constraint_constants - ; creatable_gossip_net= + ; constraint_constants = precomputed_values.constraint_constants + ; creatable_gossip_net = Gossip_net.Any.Creatable ( (module Gossip_net.Fake) , Gossip_net.Fake.create_instance fake_gossip_network peer ) - ; log_gossip_heard= - {snark_pool_diff= true; transaction_pool_diff= true; new_state= true} + ; log_gossip_heard = + { snark_pool_diff = true + ; transaction_pool_diff = true + ; new_state = true + } } in let peer_networks = @@ -105,7 +113,7 @@ let setup (type n) ?(logger = Logger.null ()) ( scan_state , expected_merkle_root , pending_coinbases - , protocol_states )) ) + , protocol_states ))) ~get_some_initial_peers:(fun _ -> Deferred.return []) ~answer_sync_ledger_query:(fun query_env -> let ledger_hash, _ = Envelope.Incoming.data query_env in @@ -121,31 +129,31 @@ let setup (type n) ?(logger = Logger.null ()) (Error.createf !"%s for ledger_hash: %{sexp:Ledger_hash.t}" Mina_networking.refused_answer_query_string - ledger_hash)) ) + ledger_hash))) ~get_ancestry:(fun query_env -> Deferred.return (Sync_handler.Root.prove ~consensus_constants: precomputed_values.consensus_constants ~logger ~frontier - (Envelope.Incoming.data query_env)) ) + (Envelope.Incoming.data query_env))) ~get_best_tip:(fun _ -> failwith "Get_best_tip unimplemented") ~get_node_status:(fun _ -> - failwith "Get_node_status unimplemented" ) + failwith "Get_node_status unimplemented") ~get_transition_knowledge:(fun _query -> - Deferred.return (Sync_handler.best_tip_path ~frontier) ) + Deferred.return (Sync_handler.best_tip_path ~frontier)) ~get_transition_chain_proof:(fun query_env -> Deferred.return (Transition_chain_prover.prove ~frontier - (Envelope.Incoming.data query_env)) ) + (Envelope.Incoming.data query_env))) ~get_transition_chain:(fun query_env -> Deferred.return (Sync_handler.get_transition_chain ~frontier - (Envelope.Incoming.data query_env)) ) ) + (Envelope.Incoming.data query_env)))) in - {peer; state; network} ) + { peer; state; network }) in - {fake_gossip_network; peer_networks} + { fake_gossip_network; peer_networks } module Generator = struct open Quickcheck @@ -162,9 +170,7 @@ module Generator = struct Filename.temp_dir_name ^/ "epoch_ledger" ^ (Uuid_unix.create () |> Uuid.to_string) in - let genesis_ledger = - Precomputed_values.genesis_ledger precomputed_values - in + let genesis_ledger = Precomputed_values.genesis_ledger precomputed_values in let consensus_local_state = Consensus.Data.Local_state.create Public_key.Compressed.Set.empty ~genesis_ledger @@ -178,7 +184,7 @@ module Generator = struct Transition_frontier.For_tests.gen ~precomputed_values ~verifier ~consensus_local_state ~max_length:max_frontier_length ~size:0 () in - {frontier; consensus_local_state} + { frontier; consensus_local_state } let peer_with_branch ~frontier_branch_size ~precomputed_values ~verifier ~max_frontier_length = @@ -186,9 +192,7 @@ module Generator = struct Filename.temp_dir_name ^/ "epoch_ledger" ^ (Uuid_unix.create () |> Uuid.to_string) in - let genesis_ledger = - Precomputed_values.genesis_ledger precomputed_values - in + let genesis_ledger = Precomputed_values.genesis_ledger precomputed_values in let consensus_local_state = Consensus.Data.Local_state.create Public_key.Compressed.Set.empty ~genesis_ledger @@ -205,14 +209,14 @@ module Generator = struct in Async.Thread_safe.block_on_async_exn (fun () -> Deferred.List.iter branch - ~f:(Transition_frontier.add_breadcrumb_exn frontier) ) ; - {frontier; consensus_local_state} + ~f:(Transition_frontier.add_breadcrumb_exn frontier)) ; + { frontier; consensus_local_state } let gen ~precomputed_values ~verifier ~max_frontier_length configs = let open Quickcheck.Generator.Let_syntax in let%map states = Vect.Quickcheck_generator.map configs ~f:(fun config -> - config ~precomputed_values ~verifier ~max_frontier_length ) + config ~precomputed_values ~verifier ~max_frontier_length) in setup ~precomputed_values states end diff --git a/src/lib/fake_network/fake_network.mli b/src/lib/fake_network/fake_network.mli index 6854eac9c40..57a6bb37dae 100644 --- a/src/lib/fake_network/fake_network.mli +++ b/src/lib/fake_network/fake_network.mli @@ -5,15 +5,20 @@ open Gadt_lib type 'n num_peers = 'n Peano.gt_1 type peer_state = - { frontier: Transition_frontier.t - ; consensus_local_state: Consensus.Data.Local_state.t } + { frontier : Transition_frontier.t + ; consensus_local_state : Consensus.Data.Local_state.t + } type peer_network = - {peer: Network_peer.Peer.t; state: peer_state; network: Mina_networking.t} + { peer : Network_peer.Peer.t + ; state : peer_state + ; network : Mina_networking.t + } type nonrec 'n t = - { fake_gossip_network: Mina_networking.Gossip_net.Fake.network - ; peer_networks: (peer_network, 'n) Vect.t } + { fake_gossip_network : Mina_networking.Gossip_net.Fake.network + ; peer_networks : (peer_network, 'n) Vect.t + } constraint 'n = _ num_peers val setup : diff --git a/src/lib/file_system/file_system.ml b/src/lib/file_system/file_system.ml index a0c5ce08212..e719ad49929 100644 --- a/src/lib/file_system/file_system.ml +++ b/src/lib/file_system/file_system.ml @@ -2,14 +2,14 @@ open Core open Async let dir_exists dir = - let%bind access_res = Unix.access dir [`Exists] in + let%bind access_res = Unix.access dir [ `Exists ] in if Result.is_ok access_res then let%map stat = Unix.stat dir in Unix.File_kind.equal (Unix.Stats.kind stat) `Directory else return false let remove_dir dir = - let%bind _ = Process.run_exn ~prog:"rm" ~args:["-rf"; dir] () in + let%bind _ = Process.run_exn ~prog:"rm" ~args:[ "-rf"; dir ] () in Deferred.unit let rec rmrf path = @@ -19,7 +19,7 @@ let rec rmrf path = |> Array.iter ~f:(fun name -> rmrf (Filename.concat path name)) ; Core.Unix.rmdir path | _ -> - if [%equal: [`Yes | `No | `Unknown]] (Core.Sys.file_exists path) `Yes + if [%equal: [ `Yes | `No | `Unknown ]] (Core.Sys.file_exists path) `Yes then Core.Sys.remove path let try_finally ~(f : unit -> 'a Deferred.t) @@ -59,11 +59,11 @@ let clear_dir toplevel_dir = in let dirs = if String.equal dirname toplevel_dir then List.concat dirs - else List.append (List.concat dirs) [fullname] + else List.append (List.concat dirs) [ fullname ] in (dirs, List.concat files) | _ -> - Deferred.return ([], [fullname]) + Deferred.return ([], [ fullname ]) in let%bind dirs, files = all_files toplevel_dir "" in let%bind () = Deferred.List.iter files ~f:(fun file -> Sys.remove file) in diff --git a/src/lib/filtered_external_transition/filtered_external_transition.ml b/src/lib/filtered_external_transition/filtered_external_transition.ml index 44c2082e0b6..d4a44c9be0b 100644 --- a/src/lib/filtered_external_transition/filtered_external_transition.ml +++ b/src/lib/filtered_external_transition/filtered_external_transition.ml @@ -19,17 +19,18 @@ module Transactions = struct module Stable = struct module V1 = struct type t = - { commands: + { commands : ( User_command.Stable.V1.t , Transaction_hash.Stable.V1.t ) With_hash.Stable.V1.t With_status.Stable.V1.t list - ; fee_transfers: + ; fee_transfers : (Fee_transfer.Single.Stable.V1.t * Fee_transfer_type.Stable.V1.t) list - ; coinbase: Currency.Amount.Stable.V1.t - ; coinbase_receiver: Public_key.Compressed.Stable.V1.t option } + ; coinbase : Currency.Amount.Stable.V1.t + ; coinbase_receiver : Public_key.Compressed.Stable.V1.t option + } let to_latest = Fn.id end @@ -41,9 +42,10 @@ module Protocol_state = struct module Stable = struct module V1 = struct type t = - { previous_state_hash: State_hash.Stable.V1.t - ; blockchain_state: Mina_state.Blockchain_state.Value.Stable.V1.t - ; consensus_state: Consensus.Data.Consensus_state.Value.Stable.V1.t } + { previous_state_hash : State_hash.Stable.V1.t + ; blockchain_state : Mina_state.Blockchain_state.Value.Stable.V1.t + ; consensus_state : Consensus.Data.Consensus_state.Value.Stable.V1.t + } let to_latest = Fn.id end @@ -54,19 +56,20 @@ end module Stable = struct module V1 = struct type t = - { creator: Public_key.Compressed.Stable.V1.t - ; winner: Public_key.Compressed.Stable.V1.t - ; protocol_state: Protocol_state.Stable.V1.t - ; transactions: Transactions.Stable.V1.t - ; snark_jobs: Transaction_snark_work.Info.Stable.V1.t list - ; proof: Proof.Stable.V1.t } + { creator : Public_key.Compressed.Stable.V1.t + ; winner : Public_key.Compressed.Stable.V1.t + ; protocol_state : Protocol_state.Stable.V1.t + ; transactions : Transactions.Stable.V1.t + ; snark_jobs : Transaction_snark_work.Info.Stable.V1.t list + ; proof : Proof.Stable.V1.t + } let to_latest = Fn.id end end] let participants ~next_available_token - {transactions= {commands; fee_transfers; _}; creator; winner; _} = + { transactions = { commands; fee_transfers; _ }; creator; winner; _ } = let open Account_id.Set in let _next_available_token, user_command_set = List.fold commands ~init:(next_available_token, empty) @@ -76,11 +79,11 @@ let participants ~next_available_token , union set ( of_list @@ User_command.accounts_accessed ~next_available_token - user_command.data.data ) ) ) + user_command.data.data ) )) in let fee_transfer_participants = List.fold fee_transfers ~init:empty ~f:(fun set (ft, _) -> - add set (Fee_transfer.Single.receiver ft) ) + add set (Fee_transfer.Single.receiver ft)) in add (add @@ -89,22 +92,22 @@ let participants ~next_available_token (Account_id.create winner Token_id.default) let participant_pks - {transactions= {commands; fee_transfers; _}; creator; winner; _} = + { transactions = { commands; fee_transfers; _ }; creator; winner; _ } = let open Public_key.Compressed.Set in let user_command_set = List.fold commands ~init:empty ~f:(fun set user_command -> union set @@ of_list @@ List.map ~f:Account_id.public_key - @@ User_command.accounts_accessed - ~next_available_token:Token_id.invalid user_command.data.data ) + @@ User_command.accounts_accessed ~next_available_token:Token_id.invalid + user_command.data.data) in let fee_transfer_participants = List.fold fee_transfers ~init:empty ~f:(fun set (ft, _) -> - add set ft.receiver_pk ) + add set ft.receiver_pk) in add (add (union user_command_set fee_transfer_participants) creator) winner -let commands {transactions= {Transactions.commands; _}; _} = commands +let commands { transactions = { Transactions.commands; _ }; _ } = commands let validate_transactions ((transition_with_hash, _validity) as transition) = let staged_ledger_diff = @@ -126,11 +129,12 @@ let of_transition external_transition tracked_participants let creator = block_producer external_transition in let winner = block_winner external_transition in let protocol_state = - { Protocol_state.previous_state_hash= parent_hash external_transition - ; blockchain_state= + { Protocol_state.previous_state_hash = parent_hash external_transition + ; blockchain_state = External_transition.Validated.blockchain_state external_transition - ; consensus_state= - External_transition.Validated.consensus_state external_transition } + ; consensus_state = + External_transition.Validated.consensus_state external_transition + } in let next_available_token = protocol_state.blockchain_state.snarked_next_available_token @@ -138,21 +142,22 @@ let of_transition external_transition tracked_participants let transactions, _next_available_token = List.fold calculated_transactions ~init: - ( { Transactions.commands= [] - ; fee_transfers= [] - ; coinbase= Currency.Amount.zero - ; coinbase_receiver= None } + ( { Transactions.commands = [] + ; fee_transfers = [] + ; coinbase = Currency.Amount.zero + ; coinbase_receiver = None + } , next_available_token ) ~f:(fun (acc_transactions, next_available_token) -> function - | {data= Command (Snapp_command _); _} -> failwith "Not implemented" - | {data= Command command; status} -> ( + | { data = Command (Snapp_command _); _ } -> failwith "Not implemented" + | { data = Command command; status } -> ( let command = (command :> User_command.t) in let should_include_transaction command participants = List.exists (User_command.accounts_accessed ~next_available_token command) ~f:(fun account_id -> Public_key.Compressed.Set.mem participants - (Account_id.public_key account_id) ) + (Account_id.public_key account_id)) in match tracked_participants with | `Some interested_participants @@ -160,20 +165,22 @@ let of_transition external_transition tracked_participants (should_include_transaction command interested_participants) -> ( acc_transactions - , User_command.next_available_token command - next_available_token ) + , User_command.next_available_token command next_available_token + ) | `All | `Some _ -> (* Should include this command. *) ( { acc_transactions with - commands= - { With_status.data= - { With_hash.data= command - ; hash= Transaction_hash.hash_command command } - ; status } - :: acc_transactions.commands } - , User_command.next_available_token command - next_available_token ) ) - | {data= Fee_transfer fee_transfer; _} -> + commands = + { With_status.data = + { With_hash.data = command + ; hash = Transaction_hash.hash_command command + } + ; status + } + :: acc_transactions.commands + } + , User_command.next_available_token command next_available_token + ) ) | { data = Fee_transfer fee_transfer; _ } -> let fee_transfer_list = List.map (Mina_base.Fee_transfer.to_list fee_transfer) ~f:(fun f -> (f, Fee_transfer_type.Fee_transfer)) @@ -184,21 +191,20 @@ let of_transition external_transition tracked_participants fee_transfer_list | `Some interested_participants -> List.filter - ~f:(fun ({receiver_pk= pk; _}, _) -> - Public_key.Compressed.Set.mem interested_participants pk - ) + ~f:(fun ({ receiver_pk = pk; _ }, _) -> + Public_key.Compressed.Set.mem interested_participants pk) fee_transfer_list in ( { acc_transactions with - fee_transfers= fee_transfers @ acc_transactions.fee_transfers + fee_transfers = fee_transfers @ acc_transactions.fee_transfers } , next_available_token ) - | {data= Coinbase {Coinbase.amount; fee_transfer; receiver}; _} -> + | { data = Coinbase { Coinbase.amount; fee_transfer; receiver }; _ } -> let fee_transfer = Option.map ~f:(fun ft -> ( Coinbase_fee_transfer.to_fee_transfer ft - , Fee_transfer_type.Fee_transfer_via_coinbase ) ) + , Fee_transfer_type.Fee_transfer_via_coinbase )) fee_transfer in let fee_transfers = @@ -208,20 +214,20 @@ let of_transition external_transition tracked_participants in ( { acc_transactions with fee_transfers - ; coinbase_receiver= Some receiver - ; coinbase= + ; coinbase_receiver = Some receiver + ; coinbase = Currency.Amount.( - Option.value_exn (add amount acc_transactions.coinbase)) } - , next_available_token ) ) + Option.value_exn (add amount acc_transactions.coinbase)) + } + , next_available_token )) in let snark_jobs = List.map ( Staged_ledger_diff.completed_works - @@ External_transition.Validated.staged_ledger_diff external_transition - ) + @@ External_transition.Validated.staged_ledger_diff external_transition ) ~f:Transaction_snark_work.info in let proof = External_transition.Validated.protocol_state_proof external_transition in - {creator; winner; protocol_state; transactions; snark_jobs; proof} + { creator; winner; protocol_state; transactions; snark_jobs; proof } diff --git a/src/lib/gadt_lib/vect.ml b/src/lib/gadt_lib/vect.ml index 6a344bf2d9b..0cc1ee66178 100644 --- a/src/lib/gadt_lib/vect.ml +++ b/src/lib/gadt_lib/vect.ml @@ -17,8 +17,8 @@ let rec to_list : type n. ('a, n) t -> 'a list = function let rec map : type n. f:('a -> 'b) -> ('a, n) t -> ('b, n) t = fun ~f ls -> match ls with [] -> [] | h :: t -> f h :: map ~f t -let rec map2 : type n. - f:('a -> 'b -> 'c) -> ('a, n) t -> ('b, n) t -> ('c, n) t = +let rec map2 : type n. f:('a -> 'b -> 'c) -> ('a, n) t -> ('b, n) t -> ('c, n) t + = fun ~f ls_a ls_b -> match (ls_a, ls_b) with | [], [] -> @@ -30,8 +30,8 @@ let rec fold : type n. init:'b -> f:('b -> 'a -> 'b) -> ('a, n) t -> 'b = fun ~init ~f ls -> match ls with [] -> init | h :: t -> fold ~init:(f init h) ~f t -let rec fold_map : type n. - init:'b -> f:('b -> 'a -> 'b * 'c) -> ('a, n) t -> 'b * ('c, n) t = +let rec fold_map : + type n. init:'b -> f:('b -> 'a -> 'b * 'c) -> ('a, n) t -> 'b * ('c, n) t = fun ~init ~f ls -> match ls with | [] -> @@ -45,8 +45,8 @@ module Quickcheck_generator = struct open Core_kernel.Quickcheck open Generator.Let_syntax - let rec map : type n. - f:('a -> 'b Generator.t) -> ('a, n) t -> ('b, n) t Generator.t = + let rec map : + type n. f:('a -> 'b Generator.t) -> ('a, n) t -> ('b, n) t Generator.t = fun ~f ls -> match ls with | [] -> diff --git a/src/lib/genesis_constants/genesis_constants.ml b/src/lib/genesis_constants/genesis_constants.ml index 2889774d5e6..6f3e8e9e380 100644 --- a/src/lib/genesis_constants/genesis_constants.ml +++ b/src/lib/genesis_constants/genesis_constants.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel @@ -25,8 +24,7 @@ module Proof_level = struct | s -> failwithf "unrecognised proof level %s" s () - [%%inject - "compiled", proof_level] + [%%inject "compiled", proof_level] let compiled = of_string compiled @@ -38,9 +36,10 @@ module Fork_constants = struct module Stable = struct module V1 = struct type t = - { previous_state_hash: Pickles.Backend.Tick.Field.Stable.V1.t - ; previous_length: Mina_numbers.Length.Stable.V1.t - ; previous_global_slot: Mina_numbers.Global_slot.Stable.V1.t } + { previous_state_hash : Pickles.Backend.Tick.Field.Stable.V1.t + ; previous_length : Mina_numbers.Length.Stable.V1.t + ; previous_global_slot : Mina_numbers.Global_slot.Stable.V1.t + } [@@deriving sexp, equal, compare, yojson] let to_latest = Fn.id @@ -60,16 +59,17 @@ module Constraint_constants = struct module Stable = struct module V1 = struct type t = - { sub_windows_per_window: int - ; ledger_depth: int - ; work_delay: int - ; block_window_duration_ms: int - ; transaction_capacity_log_2: int - ; pending_coinbase_depth: int - ; coinbase_amount: Currency.Amount.Stable.V1.t - ; supercharged_coinbase_factor: int - ; account_creation_fee: Currency.Fee.Stable.V1.t - ; fork: Fork_constants.Stable.V1.t option } + { sub_windows_per_window : int + ; ledger_depth : int + ; work_delay : int + ; block_window_duration_ms : int + ; transaction_capacity_log_2 : int + ; pending_coinbase_depth : int + ; coinbase_amount : Currency.Amount.Stable.V1.t + ; supercharged_coinbase_factor : int + ; account_creation_fee : Currency.Fee.Stable.V1.t + ; fork : Fork_constants.Stable.V1.t option + } [@@deriving sexp, equal, compare, yojson] let to_latest = Fn.id @@ -77,55 +77,52 @@ module Constraint_constants = struct end] let to_snark_keys_header (t : t) : Snark_keys_header.Constraint_constants.t = - { sub_windows_per_window= t.sub_windows_per_window - ; ledger_depth= t.ledger_depth - ; work_delay= t.work_delay - ; block_window_duration_ms= t.block_window_duration_ms - ; transaction_capacity= Log_2 t.transaction_capacity_log_2 - ; pending_coinbase_depth= t.pending_coinbase_depth - ; coinbase_amount= Currency.Amount.to_uint64 t.coinbase_amount - ; supercharged_coinbase_factor= t.supercharged_coinbase_factor - ; account_creation_fee= Currency.Fee.to_uint64 t.account_creation_fee - ; fork= + { sub_windows_per_window = t.sub_windows_per_window + ; ledger_depth = t.ledger_depth + ; work_delay = t.work_delay + ; block_window_duration_ms = t.block_window_duration_ms + ; transaction_capacity = Log_2 t.transaction_capacity_log_2 + ; pending_coinbase_depth = t.pending_coinbase_depth + ; coinbase_amount = Currency.Amount.to_uint64 t.coinbase_amount + ; supercharged_coinbase_factor = t.supercharged_coinbase_factor + ; account_creation_fee = Currency.Fee.to_uint64 t.account_creation_fee + ; fork = ( match t.fork with - | Some {previous_length; previous_state_hash; previous_global_slot} -> + | Some { previous_length; previous_state_hash; previous_global_slot } -> Some - { previous_length= Unsigned.UInt32.to_int previous_length - ; previous_state_hash= + { previous_length = Unsigned.UInt32.to_int previous_length + ; previous_state_hash = Pickles.Backend.Tick.Field.to_string previous_state_hash - ; previous_global_slot= - Unsigned.UInt32.to_int previous_global_slot } + ; previous_global_slot = + Unsigned.UInt32.to_int previous_global_slot + } | None -> - None ) } + None ) + } (* Generate the compile-time constraint constants, using a signature to hide the optcomp constants that we import. *) include ( struct - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] - [%%inject - "sub_windows_per_window", sub_windows_per_window] + [%%inject "sub_windows_per_window", sub_windows_per_window] - [%%else] + [%%else] - (* Invalid value, this should not be used by nonconsensus nodes. *) - let sub_windows_per_window = -1 + (* Invalid value, this should not be used by nonconsensus nodes. *) + let sub_windows_per_window = -1 - [%%endif] + [%%endif] - [%%inject - "ledger_depth", ledger_depth] + [%%inject "ledger_depth", ledger_depth] - [%%inject - "coinbase_amount_string", coinbase] + [%%inject "coinbase_amount_string", coinbase] - [%%inject - "account_creation_fee_string", account_creation_fee_int] + [%%inject "account_creation_fee_string", account_creation_fee_int] - (** All the proofs before the last [work_delay] blocks must be + (** All the proofs before the last [work_delay] blocks must be completed to add transactions. [work_delay] is the minimum number of blocks and will increase if the throughput is less. - If [work_delay = 0], all the work that was added to the scan @@ -136,94 +133,86 @@ module Constraint_constants = struct completing the proofs. *) - [%%inject - "work_delay", scan_state_work_delay] + [%%inject "work_delay", scan_state_work_delay] - [%%inject - "block_window_duration_ms", block_window_duration] + [%%inject "block_window_duration_ms", block_window_duration] - [%%if - scan_state_with_tps_goal] + [%%if scan_state_with_tps_goal] - [%%inject - "tps_goal_x10", scan_state_tps_goal_x10] + [%%inject "tps_goal_x10", scan_state_tps_goal_x10] - let max_coinbases = 2 + let max_coinbases = 2 - (* block_window_duration is in milliseconds, so divide by 1000 divide - by 10 again because we have tps * 10 - *) - let max_user_commands_per_block = - tps_goal_x10 * block_window_duration_ms / (1000 * 10) + (* block_window_duration is in milliseconds, so divide by 1000 divide + by 10 again because we have tps * 10 + *) + let max_user_commands_per_block = + tps_goal_x10 * block_window_duration_ms / (1000 * 10) - (** Log of the capacity of transactions per transition. + (** Log of the capacity of transactions per transition. - 1 will only work if we don't have prover fees. - 2 will work with prover fees, but not if we want a transaction included in every block. - At least 3 ensures a transaction per block and the staged-ledger unit tests pass. *) - let transaction_capacity_log_2 = - 1 - + Core_kernel.Int.ceil_log2 - (max_user_commands_per_block + max_coinbases) - - [%%else] - - [%%inject - "transaction_capacity_log_2", scan_state_transaction_capacity_log_2] - - [%%endif] - - [%%inject - "supercharged_coinbase_factor", supercharged_coinbase_factor] - - let pending_coinbase_depth = - Core_kernel.Int.ceil_log2 - (((transaction_capacity_log_2 + 1) * (work_delay + 1)) + 1) - - [%%ifndef - fork_previous_length] - - let fork = None - - [%%else] - - [%%inject - "fork_previous_length", fork_previous_length] - - [%%inject - "fork_previous_state_hash", fork_previous_state_hash] - - [%%inject - "fork_previous_global_slot", fork_previous_global_slot] - - let fork = - Some - { Fork_constants.previous_length= - Mina_numbers.Length.of_int fork_previous_length - ; previous_state_hash= - Data_hash_lib.State_hash.of_base58_check_exn - fork_previous_state_hash - ; previous_global_slot= - Mina_numbers.Global_slot.of_int fork_previous_global_slot } - - [%%endif] - - let compiled = - { sub_windows_per_window - ; ledger_depth - ; work_delay - ; block_window_duration_ms - ; transaction_capacity_log_2 - ; pending_coinbase_depth - ; coinbase_amount= - Currency.Amount.of_formatted_string coinbase_amount_string - ; supercharged_coinbase_factor - ; account_creation_fee= - Currency.Fee.of_formatted_string account_creation_fee_string - ; fork } - end : + let transaction_capacity_log_2 = + 1 + + Core_kernel.Int.ceil_log2 (max_user_commands_per_block + max_coinbases) + + [%%else] + + [%%inject + "transaction_capacity_log_2", scan_state_transaction_capacity_log_2] + + [%%endif] + + [%%inject "supercharged_coinbase_factor", supercharged_coinbase_factor] + + let pending_coinbase_depth = + Core_kernel.Int.ceil_log2 + (((transaction_capacity_log_2 + 1) * (work_delay + 1)) + 1) + + [%%ifndef fork_previous_length] + + let fork = None + + [%%else] + + [%%inject "fork_previous_length", fork_previous_length] + + [%%inject "fork_previous_state_hash", fork_previous_state_hash] + + [%%inject "fork_previous_global_slot", fork_previous_global_slot] + + let fork = + Some + { Fork_constants.previous_length = + Mina_numbers.Length.of_int fork_previous_length + ; previous_state_hash = + Data_hash_lib.State_hash.of_base58_check_exn + fork_previous_state_hash + ; previous_global_slot = + Mina_numbers.Global_slot.of_int fork_previous_global_slot + } + + [%%endif] + + let compiled = + { sub_windows_per_window + ; ledger_depth + ; work_delay + ; block_window_duration_ms + ; transaction_capacity_log_2 + ; pending_coinbase_depth + ; coinbase_amount = + Currency.Amount.of_formatted_string coinbase_amount_string + ; supercharged_coinbase_factor + ; account_creation_fee = + Currency.Fee.of_formatted_string account_creation_fee_string + ; fork + } + end : sig val compiled : t end ) @@ -245,15 +234,15 @@ let validate_time time_str = match Result.try_with (fun () -> Option.value_map ~default:(Time.now ()) ~f:genesis_timestamp_of_string - time_str ) + time_str) with | Ok time -> Ok (of_time time) | Error _ -> Error "Invalid timestamp. Please specify timestamp in \"%Y-%m-%d \ - %H:%M:%S%z\". For example, \"2019-01-30 12:00:00-0800\" for \ - UTC-08:00 timezone" + %H:%M:%S%z\". For example, \"2019-01-30 12:00:00-0800\" for UTC-08:00 \ + timezone" let genesis_timestamp_to_string time = Int64.to_float time |> Time.Span.of_ms |> Time.of_span_since_epoch @@ -267,11 +256,12 @@ module Protocol = struct module Stable = struct module V1 = struct type ('length, 'delta, 'genesis_state_timestamp) t = - { k: 'length - ; slots_per_epoch: 'length - ; slots_per_sub_window: 'length - ; delta: 'delta - ; genesis_state_timestamp: 'genesis_state_timestamp } + { k : 'length + ; slots_per_epoch : 'length + ; slots_per_sub_window : 'length + ; delta : 'delta + ; genesis_state_timestamp : 'genesis_state_timestamp + } [@@deriving equal, ord, hash, sexp, yojson, hlist, fields] end end] @@ -297,7 +287,8 @@ module Protocol = struct (Time.of_span_since_epoch (Time.Span.of_ms (Int64.to_float t.genesis_state_timestamp))) - ~zone:Time.Zone.utc) ) ] + ~zone:Time.Zone.utc) ) + ] let of_yojson = function | `Assoc @@ -305,17 +296,19 @@ module Protocol = struct ; ("slots_per_epoch", `Int slots_per_epoch) ; ("slots_per_sub_window", `Int slots_per_sub_window) ; ("delta", `Int delta) - ; ("genesis_state_timestamp", `String time_str) ] -> ( - match validate_time time_str with - | Ok genesis_state_timestamp -> - Ok - { Poly.k - ; slots_per_epoch - ; slots_per_sub_window - ; delta - ; genesis_state_timestamp } - | Error e -> - Error (sprintf !"Genesis_constants.Protocol.of_yojson: %s" e) ) + ; ("genesis_state_timestamp", `String time_str) + ] -> ( + match validate_time time_str with + | Ok genesis_state_timestamp -> + Ok + { Poly.k + ; slots_per_epoch + ; slots_per_sub_window + ; delta + ; genesis_state_timestamp + } + | Error e -> + Error (sprintf !"Genesis_constants.Protocol.of_yojson: %s" e) ) | _ -> Error "Genesis_constants.Protocol.of_yojson: unexpected JSON" @@ -326,15 +319,16 @@ module Protocol = struct type t = (int, int, string) Poly.Stable.V1.t [@@deriving sexp] end in let t' : T.t = - { k= t.k - ; delta= t.delta - ; slots_per_epoch= t.slots_per_epoch - ; slots_per_sub_window= t.slots_per_sub_window - ; genesis_state_timestamp= + { k = t.k + ; delta = t.delta + ; slots_per_epoch = t.slots_per_epoch + ; slots_per_sub_window = t.slots_per_sub_window + ; genesis_state_timestamp = Time.to_string_abs (Time.of_span_since_epoch (Time.Span.of_ms (Int64.to_float t.genesis_state_timestamp))) - ~zone:Time.Zone.utc } + ~zone:Time.Zone.utc + } in T.sexp_of_t t' end @@ -342,12 +336,13 @@ module Protocol = struct module Tests = struct let%test "protocol constants serialization v1" = let t : V1.t = - { k= 1 - ; delta= 100 - ; slots_per_sub_window= 10 - ; slots_per_epoch= 1000 - ; genesis_state_timestamp= - Time.of_string "2019-10-08 17:51:23.050849Z" |> of_time } + { k = 1 + ; delta = 100 + ; slots_per_sub_window = 10 + ; slots_per_epoch = 1000 + ; genesis_state_timestamp = + Time.of_string "2019-10-08 17:51:23.050849Z" |> of_time + } in (*from the print statement in Serialization.check_serialization*) let known_good_digest = "28b7c3bb5f94351f0afa6ebd83078730" in @@ -357,16 +352,16 @@ module Protocol = struct end end] - [%%define_locally - Stable.Latest.(to_yojson)] + [%%define_locally Stable.Latest.(to_yojson)] end module T = struct (* bin_io is for printing chain id inputs *) type t = - { protocol: Protocol.Stable.Latest.t - ; txpool_max_size: int - ; num_accounts: int option } + { protocol : Protocol.Stable.Latest.t + ; txpool_max_size : int + ; num_accounts : int option + } [@@deriving to_yojson, bin_io_unversioned] let hash (t : t) = @@ -377,7 +372,8 @@ module T = struct ; t.protocol.slots_per_epoch ; t.protocol.slots_per_sub_window ; t.protocol.delta - ; t.txpool_max_size ] + ; t.txpool_max_size + ] ~f:Int.to_string |> String.concat ~sep:"" ) ^ Core.Time.to_string_abs ~zone:Time.Zone.utc @@ -390,34 +386,29 @@ end include T -[%%inject -"genesis_state_timestamp_string", genesis_state_timestamp] +[%%inject "genesis_state_timestamp_string", genesis_state_timestamp] -[%%inject -"k", k] +[%%inject "k", k] -[%%inject -"slots_per_epoch", slots_per_epoch] +[%%inject "slots_per_epoch", slots_per_epoch] -[%%inject -"slots_per_sub_window", slots_per_sub_window] +[%%inject "slots_per_sub_window", slots_per_sub_window] -[%%inject -"delta", delta] +[%%inject "delta", delta] -[%%inject -"pool_max_size", pool_max_size] +[%%inject "pool_max_size", pool_max_size] let compiled : t = - { protocol= + { protocol = { k ; slots_per_epoch ; slots_per_sub_window ; delta - ; genesis_state_timestamp= + ; genesis_state_timestamp = genesis_timestamp_of_string genesis_state_timestamp_string |> of_time } - ; txpool_max_size= pool_max_size - ; num_accounts= None } + ; txpool_max_size = pool_max_size + ; num_accounts = None + } let for_unit_tests = compiled diff --git a/src/lib/genesis_ledger/fake_accounts.ml b/src/lib/genesis_ledger/fake_accounts.ml index ce57fc16713..87aaefd250e 100644 --- a/src/lib/genesis_ledger/fake_accounts.ml +++ b/src/lib/genesis_ledger/fake_accounts.ml @@ -4,7 +4,7 @@ open Core_kernel open Signature_lib let make_account pk balance = - Intf.Public_accounts.{pk; balance; delegate= None; timing= Untimed} + Intf.Public_accounts.{ pk; balance; delegate = None; timing = Untimed } let balance_gen = Quickcheck.Generator.of_list (List.range 10 500) diff --git a/src/lib/genesis_ledger/fuzz_ledger.ml b/src/lib/genesis_ledger/fuzz_ledger.ml index 2e6d1ed8035..3492d2e449f 100644 --- a/src/lib/genesis_ledger/fuzz_ledger.ml +++ b/src/lib/genesis_ledger/fuzz_ledger.ml @@ -1,3 +1,4 @@ let name = "fuzz" -let balances = lazy [1_000_000; 2_000_000; 750_000; 1_500_000; 100_000; 500_000] +let balances = + lazy [ 1_000_000; 2_000_000; 750_000; 1_500_000; 100_000; 500_000 ] diff --git a/src/lib/genesis_ledger/genesis_ledger.ml b/src/lib/genesis_ledger/genesis_ledger.ml index 9bcf0161e76..1174024202e 100644 --- a/src/lib/genesis_ledger/genesis_ledger.ml +++ b/src/lib/genesis_ledger/genesis_ledger.ml @@ -26,10 +26,10 @@ module Private_accounts (Accounts : Intf.Private_accounts.S) = struct let accounts = let open Lazy.Let_syntax in let%map accounts = accounts in - List.map accounts ~f:(fun {pk; sk; balance; timing} -> + List.map accounts ~f:(fun { pk; sk; balance; timing } -> let account_id = Account_id.create pk Token_id.default in let balance = Balance.of_formatted_string (Int.to_string balance) in - (Some sk, account_with_timing account_id balance timing) ) + (Some sk, account_with_timing account_id balance timing)) end module Public_accounts (Accounts : Intf.Public_accounts.S) = struct @@ -38,11 +38,11 @@ module Public_accounts (Accounts : Intf.Public_accounts.S) = struct let accounts = let open Lazy.Let_syntax in let%map accounts = Accounts.accounts in - List.map accounts ~f:(fun {pk; balance; delegate; timing} -> + List.map accounts ~f:(fun { pk; balance; delegate; timing } -> let account_id = Account_id.create pk Token_id.default in let balance = Balance.of_int balance in let base_acct = account_with_timing account_id balance timing in - (None, {base_acct with delegate= Option.value ~default:pk delegate}) ) + (None, { base_acct with delegate = Option.value ~default:pk delegate })) end (** Generate a ledger using the sample keypairs from [Mina_base] with the given @@ -59,10 +59,11 @@ module Balances (Balances : Intf.Named_balances_intf) = struct let%map balances = Balances.balances and keypairs = Mina_base.Sample_keypairs.keypairs in List.mapi balances ~f:(fun i b -> - { balance= b - ; pk= fst keypairs.(i) - ; sk= snd keypairs.(i) - ; timing= Untimed } ) + { balance = b + ; pk = fst keypairs.(i) + ; sk = snd keypairs.(i) + ; timing = Untimed + }) end) end @@ -80,7 +81,7 @@ module Utils = struct (Public_key.decompress account.Poly.Stable.Latest.public_key) ~message:pk_error_msg in - {Keypair.public_key; private_key} + { Keypair.public_key; private_key } let id_of_account_record (_private_key, account) = Account.identifier account @@ -93,7 +94,7 @@ module Utils = struct find_account_record_exn accounts ~f:(fun new_account -> not (List.mem ~equal:Public_key.Compressed.equal old_account_pks - (Account.public_key new_account)) ) + (Account.public_key new_account))) let find_new_account_record_exn accounts old_account_pks = find_new_account_record_exn_ accounts @@ -121,7 +122,7 @@ module Make (Inputs : Intf.Ledger_input_intf) : Intf.S = struct List.iter (Lazy.force accounts) ~f:(fun (_, account) -> Ledger.create_new_account_exn ledger (Account.identifier account) - account ) ; + account) ; ledger include Utils @@ -142,8 +143,8 @@ module Make (Inputs : Intf.Ledger_input_intf) : Intf.S = struct in Memo.unit (fun () -> List.max_elt (Lazy.force accounts) ~compare:(fun (_, a) (_, b) -> - Balance.compare a.balance b.balance ) - |> Option.value_exn ?here:None ?error:None ~message:error_msg ) + Balance.compare a.balance b.balance) + |> Option.value_exn ?here:None ?error:None ~message:error_msg) let largest_account_id_exn = Memo.unit (fun () -> largest_account_exn () |> id_of_account_record) @@ -211,8 +212,8 @@ end) : Intf.S = struct in Memo.unit (fun () -> List.max_elt (Lazy.force accounts) ~compare:(fun (_, a) (_, b) -> - Balance.compare a.Account.Poly.balance b.Account.Poly.balance ) - |> Option.value_exn ?here:None ?error:None ~message:error_msg ) + Balance.compare a.Account.Poly.balance b.Account.Poly.balance) + |> Option.value_exn ?here:None ?error:None ~message:error_msg) let largest_account_id_exn = Memo.unit (fun () -> largest_account_exn () |> id_of_account_record) @@ -262,8 +263,7 @@ module Unit_test_ledger = Make (struct let directory = `Ephemeral - let depth = - Genesis_constants.Constraint_constants.for_unit_tests.ledger_depth + let depth = Genesis_constants.Constraint_constants.for_unit_tests.ledger_depth end) let for_unit_tests : Packed.t = (module Unit_test_ledger) @@ -273,14 +273,14 @@ module Integration_tests = struct let name = "test_delegation" let balances = - lazy [0 (* delegatee *); 0 (* placeholder *); 5_000_000 (* delegator *)] + lazy [ 0 (* delegatee *); 0 (* placeholder *); 5_000_000 (* delegator *) ] end)) module Five_even_stakes = Register (Balances (struct let name = "test_five_even_stakes" let balances = - lazy [1_000_000; 1_000_000; 1_000_000; 1_000_000; 1_000_000; 1_000] + lazy [ 1_000_000; 1_000_000; 1_000_000; 1_000_000; 1_000_000; 1_000 ] end)) module Split_two_stakes = Register (Balances (struct @@ -296,6 +296,6 @@ module Integration_tests = struct module Three_even_stakes = Register (Balances (struct let name = "test_three_even_stakes" - let balances = lazy [1_000_000; 1_000_000; 1_000_000; 1000; 1000; 1000] + let balances = lazy [ 1_000_000; 1_000_000; 1_000_000; 1000; 1000; 1000 ] end)) end diff --git a/src/lib/genesis_ledger/intf.ml b/src/lib/genesis_ledger/intf.ml index f8009d90e8b..db5cde4d3ff 100644 --- a/src/lib/genesis_ledger/intf.ml +++ b/src/lib/genesis_ledger/intf.ml @@ -23,15 +23,17 @@ module Timing = struct ; cliff_time ; cliff_amount ; vesting_increment - ; vesting_period } + ; vesting_period + } end module Public_accounts = struct type account_data = - { pk: Public_key.Compressed.t - ; balance: int - ; delegate: Public_key.Compressed.t option - ; timing: Timing.t } + { pk : Public_key.Compressed.t + ; balance : int + ; delegate : Public_key.Compressed.t option + ; timing : Timing.t + } module type S = sig val name : string @@ -42,10 +44,11 @@ end module Private_accounts = struct type account_data = - { pk: Public_key.Compressed.t - ; sk: Private_key.t - ; balance: int - ; timing: Timing.t } + { pk : Public_key.Compressed.t + ; sk : Private_key.t + ; balance : int + ; timing : Timing.t + } module type S = sig val name : string @@ -73,7 +76,7 @@ end module type Ledger_input_intf = sig include Accounts_intf - val directory : [`Ephemeral | `New | `Path of string] + val directory : [ `Ephemeral | `New | `Path of string ] val depth : int end diff --git a/src/lib/genesis_ledger/test_ledger.ml b/src/lib/genesis_ledger/test_ledger.ml index 590ec982928..473d74fd654 100644 --- a/src/lib/genesis_ledger/test_ledger.ml +++ b/src/lib/genesis_ledger/test_ledger.ml @@ -19,4 +19,5 @@ let balances = ; 1_000 ; 1_000 ; 1_000 - ; 1_000 ] + ; 1_000 + ] diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml index 8333f7ac373..ead5087bf08 100644 --- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml +++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml @@ -21,7 +21,8 @@ module Tar = struct "-czf" ; file ; (* Add [directory] to tar file. *) - directory ] + directory + ] () with | Ok _ -> @@ -38,7 +39,8 @@ module Tar = struct ; root ; (* Extract gzipped tar file [file]. *) "-xzf" - ; file ] + ; file + ] () with | Ok _ -> @@ -80,13 +82,13 @@ module Ledger = struct [ Int.to_string constraint_constants.ledger_depth ; Int.to_string (Option.value ~default:0 num_accounts) ; List.to_string balances ~f:(fun (i, balance) -> - sprintf "%i %s" i (Currency.Balance.to_string balance) ) + sprintf "%i %s" i (Currency.Balance.to_string balance)) ; (* Distinguish ledgers when the hash function is different. *) Snark_params.Tick.Field.to_string Mina_base.Account.empty_digest ; (* Distinguish ledgers when the account record layout has changed. *) - Bin_prot.Writer.to_string - Mina_base.Account.Stable.Latest.bin_writer_t - Mina_base.Account.empty ] + Bin_prot.Writer.to_string Mina_base.Account.Stable.Latest.bin_writer_t + Mina_base.Account.empty + ] in let str = match other_data with None -> str | Some other_data -> str ^ other_data @@ -101,29 +103,33 @@ module Ledger = struct let find_tar ~logger ~genesis_dir ~constraint_constants ~ledger_name_prefix (config : Runtime_config.Ledger.t) = - let search_paths = Cache_dir.possible_paths "" @ [genesis_dir] in + let search_paths = Cache_dir.possible_paths "" @ [ genesis_dir ] in let file_exists filename path = let filename = path ^/ filename in if%map file_exists ~follow_symlinks:true filename then ( [%log trace] "Found $ledger file at $path" ~metadata: - [("ledger", `String ledger_name_prefix); ("path", `String filename)] ; + [ ("ledger", `String ledger_name_prefix) + ; ("path", `String filename) + ] ; Some filename ) else ( [%log trace] "Ledger file $path does not exist" - ~metadata:[("path", `String filename)] ; + ~metadata:[ ("path", `String filename) ] ; None ) in let load_from_s3 filename = let s3_path = s3_bucket_prefix ^/ filename in let local_path = Cache_dir.s3_install_path ^/ filename in - match%bind Cache_dir.load_from_s3 [s3_path] [local_path] ~logger with + match%bind Cache_dir.load_from_s3 [ s3_path ] [ local_path ] ~logger with | Ok () -> file_exists filename Cache_dir.s3_install_path | Error _ -> [%log trace] "Could not download $ledger from $uri" ~metadata: - [("ledger", `String ledger_name_prefix); ("uri", `String s3_path)] ; + [ ("ledger", `String ledger_name_prefix) + ; ("uri", `String s3_path) + ] ; return None in let%bind hash_filename = @@ -158,28 +164,28 @@ module Ledger = struct | Some filename -> return (Some filename) | None -> ( - match (config.base, config.name) with - | Named name, _ -> - let named_filename = - named_filename ~constraint_constants - ~num_accounts:config.num_accounts ~balances:config.balances - ~ledger_name_prefix name - in - Deferred.List.find_map ~f:(file_exists named_filename) search_paths - | Accounts accounts, _ -> - search_local_and_s3 ~other_data:(accounts_hash accounts) "accounts" - | Hash hash, None -> - assert ([%equal: string option] (Some hash) config.hash) ; - return None - | _, Some name -> - search_local_and_s3 name ) + match (config.base, config.name) with + | Named name, _ -> + let named_filename = + named_filename ~constraint_constants + ~num_accounts:config.num_accounts ~balances:config.balances + ~ledger_name_prefix name + in + Deferred.List.find_map ~f:(file_exists named_filename) search_paths + | Accounts accounts, _ -> + search_local_and_s3 ~other_data:(accounts_hash accounts) "accounts" + | Hash hash, None -> + assert ([%equal: string option] (Some hash) config.hash) ; + return None + | _, Some name -> + search_local_and_s3 name ) let load_from_tar ?(genesis_dir = Cache_dir.autogen_path) ~logger ~(constraint_constants : Genesis_constants.Constraint_constants.t) ?accounts ~ledger_name_prefix filename = [%log trace] "Loading $ledger from $path" ~metadata: - [("ledger", `String ledger_name_prefix); ("path", `String filename)] ; + [ ("ledger", `String ledger_name_prefix); ("path", `String filename) ] ; let dirname = Uuid.to_string (Uuid_unix.create ()) in (* Unpack the ledger in the autogen directory, since we know that we have write permissions there. @@ -215,16 +221,15 @@ module Ledger = struct let dirname = Option.value_exn (Ledger.get_directory ledger) in let root_hash = State_hash.to_string @@ Ledger.merkle_root ledger in let%bind () = Unix.mkdir ~p:() genesis_dir in - let tar_path = - genesis_dir ^/ hash_filename root_hash ~ledger_name_prefix - in + let tar_path = genesis_dir ^/ hash_filename root_hash ~ledger_name_prefix in [%log trace] "Creating $ledger tar file for $root_hash at $path from database at $dir" ~metadata: [ ("ledger", `String ledger_name_prefix) ; ("root_hash", `String root_hash) ; ("path", `String tar_path) - ; ("dir", `String dirname) ] ; + ; ("dir", `String dirname) + ] ; let open Deferred.Or_error.Let_syntax in let%map () = Tar.create ~root:dirname ~file:tar_path ~directory:"." () in tar_path @@ -268,19 +273,22 @@ module Ledger = struct | Accounts accounts -> Some (lazy (add_genesis_winner_account (Accounts.to_full accounts))) | Named name -> ( - match Genesis_ledger.fetch_ledger name with - | Some (module M) -> - [%log trace] "Found $ledger with name $ledger_name" - ~metadata: - [ ("ledger", `String ledger_name_prefix) - ; ("ledger_name", `String name) ] ; - Some (Lazy.map ~f:add_genesis_winner_account M.accounts) - | None -> - [%log trace] "Could not find a built-in $ledger named $ledger_name" - ~metadata: - [ ("ledger", `String ledger_name_prefix) - ; ("ledger_name", `String name) ] ; - None ) + match Genesis_ledger.fetch_ledger name with + | Some (module M) -> + [%log trace] "Found $ledger with name $ledger_name" + ~metadata: + [ ("ledger", `String ledger_name_prefix) + ; ("ledger_name", `String name) + ] ; + Some (Lazy.map ~f:add_genesis_winner_account M.accounts) + | None -> + [%log trace] + "Could not find a built-in $ledger named $ledger_name" + ~metadata: + [ ("ledger", `String ledger_name_prefix) + ; ("ledger_name", `String name) + ] ; + None ) in let padded_accounts_with_balances_opt = Option.map accounts_opt @@ -328,101 +336,110 @@ module Ledger = struct [%log error] "Could not load ledger from $path: $error" ~metadata: [ ("path", `String tar_path) - ; ("error", Error_json.error_to_yojson err) ] ; + ; ("error", Error_json.error_to_yojson err) + ] ; Error err ) | None -> ( - match padded_accounts_opt with - | None -> ( - match config.base with - | Accounts _ -> - assert false - | Hash hash -> - [%log error] - "Could not find or generate a $ledger for $root_hash" - ~metadata: - [ ("ledger", `String ledger_name_prefix) - ; ("root_hash", `String hash) ] ; - Deferred.Or_error.errorf - "Could not find a ledger tar file for hash '%s'" hash - | Named ledger_name -> - let ledger_filename = - named_filename ~constraint_constants - ~num_accounts:config.num_accounts ~balances:config.balances - ~ledger_name_prefix ledger_name + match padded_accounts_opt with + | None -> ( + match config.base with + | Accounts _ -> + assert false + | Hash hash -> + [%log error] + "Could not find or generate a $ledger for $root_hash" + ~metadata: + [ ("ledger", `String ledger_name_prefix) + ; ("root_hash", `String hash) + ] ; + Deferred.Or_error.errorf + "Could not find a ledger tar file for hash '%s'" hash + | Named ledger_name -> + let ledger_filename = + named_filename ~constraint_constants + ~num_accounts:config.num_accounts + ~balances:config.balances ~ledger_name_prefix + ledger_name + in + [%log error] + "Bad config $config: $ledger named $ledger_name is not \ + built in, and no ledger file was found at \ + $ledger_filename" + ~metadata: + [ ("ledger", `String ledger_name_prefix) + ; ("config", Runtime_config.Ledger.to_yojson config) + ; ("ledger_name", `String ledger_name) + ; ("ledger_filename", `String ledger_filename) + ] ; + Deferred.Or_error.errorf "ledger '%s' not found" ledger_name + ) + | Some accounts -> ( + let packed = + packed_genesis_ledger_of_accounts + ~depth:constraint_constants.ledger_depth accounts in - [%log error] - "Bad config $config: $ledger named $ledger_name is not \ - built in, and no ledger file was found at $ledger_filename" - ~metadata: - [ ("ledger", `String ledger_name_prefix) - ; ("config", Runtime_config.Ledger.to_yojson config) - ; ("ledger_name", `String ledger_name) - ; ("ledger_filename", `String ledger_filename) ] ; - Deferred.Or_error.errorf "ledger '%s' not found" ledger_name ) - | Some accounts -> ( - let packed = - packed_genesis_ledger_of_accounts - ~depth:constraint_constants.ledger_depth accounts - in - let ledger = Lazy.force (Genesis_ledger.Packed.t packed) in - let%bind tar_path = - generate_tar ~genesis_dir ~logger ~ledger_name_prefix ledger - in - let config = - { config with - hash= Some (State_hash.to_string @@ Ledger.merkle_root ledger) - } - in - let name, other_data = - match (config.base, config.name) with - | Named name, _ -> - (Some name, None) - | Accounts accounts, _ -> - (Some "accounts", Some (accounts_hash accounts)) - | Hash _, None -> - (None, None) - | _, Some name -> - (Some name, None) - in - match (tar_path, name) with - | Ok tar_path, Some name -> - let link_name = - genesis_dir - ^/ named_filename ~constraint_constants - ~num_accounts:config.num_accounts - ~balances:config.balances ~ledger_name_prefix - ?other_data name - in - (* Delete the file if it already exists. *) - let%bind () = - Deferred.Or_error.try_with ~here:[%here] (fun () -> - Sys.remove link_name ) - |> Deferred.ignore_m - in - (* Add a symlink from the named path to the hash path. *) - let%map () = Unix.symlink ~target:tar_path ~link_name in - [%log trace] - "Linking ledger file $tar_path to $named_tar_path" - ~metadata: - [ ("tar_path", `String tar_path) - ; ("named_tar_path", `String link_name) ] ; - Ok (packed, config, link_name) - | Ok tar_path, None -> - return (Ok (packed, config, tar_path)) - | Error err, _ -> - let root_hash = - State_hash.to_string @@ Ledger.merkle_root ledger - in - let tar_path = - genesis_dir ^/ hash_filename root_hash ~ledger_name_prefix - in - [%log error] - "Could not generate a $ledger file at $path: $error" - ~metadata: - [ ("ledger", `String ledger_name_prefix) - ; ("path", `String tar_path) - ; ("error", Error_json.error_to_yojson err) ] ; - return (Error err) ) ) ) + let ledger = Lazy.force (Genesis_ledger.Packed.t packed) in + let%bind tar_path = + generate_tar ~genesis_dir ~logger ~ledger_name_prefix ledger + in + let config = + { config with + hash = + Some (State_hash.to_string @@ Ledger.merkle_root ledger) + } + in + let name, other_data = + match (config.base, config.name) with + | Named name, _ -> + (Some name, None) + | Accounts accounts, _ -> + (Some "accounts", Some (accounts_hash accounts)) + | Hash _, None -> + (None, None) + | _, Some name -> + (Some name, None) + in + match (tar_path, name) with + | Ok tar_path, Some name -> + let link_name = + genesis_dir + ^/ named_filename ~constraint_constants + ~num_accounts:config.num_accounts + ~balances:config.balances ~ledger_name_prefix + ?other_data name + in + (* Delete the file if it already exists. *) + let%bind () = + Deferred.Or_error.try_with ~here:[%here] (fun () -> + Sys.remove link_name) + |> Deferred.ignore_m + in + (* Add a symlink from the named path to the hash path. *) + let%map () = Unix.symlink ~target:tar_path ~link_name in + [%log trace] + "Linking ledger file $tar_path to $named_tar_path" + ~metadata: + [ ("tar_path", `String tar_path) + ; ("named_tar_path", `String link_name) + ] ; + Ok (packed, config, link_name) + | Ok tar_path, None -> + return (Ok (packed, config, tar_path)) + | Error err, _ -> + let root_hash = + State_hash.to_string @@ Ledger.merkle_root ledger + in + let tar_path = + genesis_dir ^/ hash_filename root_hash ~ledger_name_prefix + in + [%log error] + "Could not generate a $ledger file at $path: $error" + ~metadata: + [ ("ledger", `String ledger_name_prefix) + ; ("path", `String tar_path) + ; ("error", Error_json.error_to_yojson err) + ] ; + return (Error err) ) )) end module Epoch_data = struct @@ -443,11 +460,12 @@ module Epoch_data = struct load_ledger config.staking.ledger in [%log trace] "Loaded staking epoch ledger from $ledger_file" - ~metadata:[("ledger_file", `String ledger_file)] ; - ( { Consensus.Genesis_epoch_data.Data.ledger= + ~metadata:[ ("ledger_file", `String ledger_file) ] ; + ( { Consensus.Genesis_epoch_data.Data.ledger = Genesis_ledger.Packed.t staking_ledger - ; seed= Epoch_seed.of_string config.staking.seed } - , {config.staking with ledger= config'} ) + ; seed = Epoch_seed.of_string config.staking.seed + } + , { config.staking with ledger = config' } ) in let%map next, config'' = match config.next with @@ -456,20 +474,21 @@ module Epoch_data = struct "Configured next epoch ledger to be the same as the staking \ epoch ledger" ; Deferred.Or_error.return (None, None) - | Some {ledger; seed} -> - let%map next_ledger, config'', ledger_file = - load_ledger ledger - in + | Some { ledger; seed } -> + let%map next_ledger, config'', ledger_file = load_ledger ledger in [%log trace] "Loaded next epoch ledger from $ledger_file" - ~metadata:[("ledger_file", `String ledger_file)] ; + ~metadata:[ ("ledger_file", `String ledger_file) ] ; ( Some - { Consensus.Genesis_epoch_data.Data.ledger= + { Consensus.Genesis_epoch_data.Data.ledger = Genesis_ledger.Packed.t next_ledger - ; seed= Epoch_seed.of_string seed } - , Some {Runtime_config.Epoch_data.Data.ledger= config''; seed} ) + ; seed = Epoch_seed.of_string seed + } + , Some { Runtime_config.Epoch_data.Data.ledger = config''; seed } + ) in - ( Some {Consensus.Genesis_epoch_data.staking; next} - , Some {Runtime_config.Epoch_data.staking= config'; next= config''} ) + ( Some { Consensus.Genesis_epoch_data.staking; next } + , Some { Runtime_config.Epoch_data.staking = config'; next = config'' } + ) end (* This hash encodes the data that determines a genesis proof: @@ -505,11 +524,11 @@ module Genesis_proof = struct let filename = path ^/ filename in if%map file_exists ~follow_symlinks:true filename then ( [%log info] "Found genesis proof file at $path" - ~metadata:[("path", `String filename)] ; + ~metadata:[ ("path", `String filename) ] ; Some filename ) else ( [%log info] "Genesis proof file $path does not exist" - ~metadata:[("path", `String filename)] ; + ~metadata:[ ("path", `String filename) ] ; None ) in let filename = filename ~base_hash in @@ -521,14 +540,17 @@ module Genesis_proof = struct | None -> ( let s3_path = s3_bucket_prefix ^/ filename in let local_path = Cache_dir.s3_install_path ^/ filename in - match%bind Cache_dir.load_from_s3 [s3_path] [local_path] ~logger with + match%bind + Cache_dir.load_from_s3 [ s3_path ] [ local_path ] ~logger + with | Ok () -> file_exists filename Cache_dir.s3_install_path | Error e -> [%log info] "Could not download genesis proof file from $uri" ~metadata: [ ("uri", `String s3_path) - ; ("error", Error_json.error_to_yojson e) ] ; + ; ("error", Error_json.error_to_yojson e) + ] ; return None ) let generate_inputs ~runtime_config ~proof_level ~ledger ~genesis_epoch_data @@ -547,28 +569,30 @@ module Genesis_proof = struct ; constraint_constants ; proof_level ; blockchain_proof_system_id - ; genesis_ledger= ledger + ; genesis_ledger = ledger ; genesis_epoch_data ; consensus_constants ; protocol_state_with_hash - ; constraint_system_digests= None - ; genesis_constants } + ; constraint_system_digests = None + ; genesis_constants + } let generate (inputs : Genesis_proof.Inputs.t) = match inputs.proof_level with | Genesis_constants.Proof_level.Full -> Deferred.return @@ Genesis_proof.create_values_no_proof - { genesis_ledger= inputs.genesis_ledger - ; genesis_epoch_data= inputs.genesis_epoch_data - ; runtime_config= inputs.runtime_config - ; proof_level= inputs.proof_level - ; blockchain_proof_system_id= None - ; constraint_system_digests= None - ; protocol_state_with_hash= inputs.protocol_state_with_hash - ; genesis_constants= inputs.genesis_constants - ; consensus_constants= inputs.consensus_constants - ; constraint_constants= inputs.constraint_constants } + { genesis_ledger = inputs.genesis_ledger + ; genesis_epoch_data = inputs.genesis_epoch_data + ; runtime_config = inputs.runtime_config + ; proof_level = inputs.proof_level + ; blockchain_proof_system_id = None + ; constraint_system_digests = None + ; protocol_state_with_hash = inputs.protocol_state_with_hash + ; genesis_constants = inputs.genesis_constants + ; consensus_constants = inputs.consensus_constants + ; constraint_constants = inputs.constraint_constants + } | _ -> Deferred.return (Genesis_proof.create_values_no_proof inputs) @@ -577,13 +601,13 @@ module Genesis_proof = struct Monitor.try_with_or_error ~here:[%here] ~extract_exn:true (fun () -> let%bind wr = Writer.open_file filename in Writer.write wr (Proof.Stable.V1.sexp_of_t proof |> Sexp.to_string) ; - Writer.close wr ) + Writer.close wr) let load filename = (* TODO: Use [Reader.load_bin_prot]. *) Monitor.try_with_or_error ~here:[%here] ~extract_exn:true (fun () -> Reader.file_contents filename - >>| Sexp.of_string >>| Proof.Stable.V1.t_of_sexp ) + >>| Sexp.of_string >>| Proof.Stable.V1.t_of_sexp) let id_to_json x = `String (Sexp.to_string (Pickles.Verification_key.Id.sexp_of_t x)) @@ -656,23 +680,25 @@ module Genesis_proof = struct Lazy.force B.Proof.id in Some - ( { Genesis_proof.runtime_config= inputs.runtime_config - ; constraint_constants= inputs.constraint_constants - ; proof_level= inputs.proof_level - ; genesis_constants= inputs.genesis_constants - ; genesis_ledger= inputs.genesis_ledger - ; genesis_epoch_data= inputs.genesis_epoch_data - ; consensus_constants= inputs.consensus_constants - ; protocol_state_with_hash= inputs.protocol_state_with_hash + ( { Genesis_proof.runtime_config = inputs.runtime_config + ; constraint_constants = inputs.constraint_constants + ; proof_level = inputs.proof_level + ; genesis_constants = inputs.genesis_constants + ; genesis_ledger = inputs.genesis_ledger + ; genesis_epoch_data = inputs.genesis_epoch_data + ; consensus_constants = inputs.consensus_constants + ; protocol_state_with_hash = inputs.protocol_state_with_hash ; constraint_system_digests - ; proof_data= Some {blockchain_proof_system_id; genesis_proof} + ; proof_data = + Some { blockchain_proof_system_id; genesis_proof } } , file ) | Error err -> [%log error] "Could not load genesis proof from $path: $error" ~metadata: [ ("path", `String file) - ; ("error", Error_json.error_to_yojson err) ] ; + ; ("error", Error_json.error_to_yojson err) + ] ; None ) | None -> return None @@ -690,43 +716,46 @@ module Genesis_proof = struct ~state_hash:compiled.protocol_state_with_hash.hash in [%log info] - "Base hash $computed_hash matches compile-time $compiled_hash, \ - using precomputed genesis proof" + "Base hash $computed_hash matches compile-time $compiled_hash, using \ + precomputed genesis proof" ~metadata: [ ("computed_hash", Base_hash.to_yojson base_hash) - ; ("compiled_hash", Base_hash.to_yojson compiled_base_hash) ] ; + ; ("compiled_hash", Base_hash.to_yojson compiled_base_hash) + ] ; let filename = genesis_dir ^/ filename ~base_hash in let values = - { Genesis_proof.runtime_config= inputs.runtime_config - ; constraint_constants= inputs.constraint_constants - ; proof_level= inputs.proof_level - ; genesis_constants= inputs.genesis_constants - ; genesis_ledger= inputs.genesis_ledger - ; genesis_epoch_data= inputs.genesis_epoch_data - ; consensus_constants= inputs.consensus_constants - ; protocol_state_with_hash= inputs.protocol_state_with_hash - ; constraint_system_digests= compiled.constraint_system_digests - ; proof_data= Some proof_data } + { Genesis_proof.runtime_config = inputs.runtime_config + ; constraint_constants = inputs.constraint_constants + ; proof_level = inputs.proof_level + ; genesis_constants = inputs.genesis_constants + ; genesis_ledger = inputs.genesis_ledger + ; genesis_epoch_data = inputs.genesis_epoch_data + ; consensus_constants = inputs.consensus_constants + ; protocol_state_with_hash = inputs.protocol_state_with_hash + ; constraint_system_digests = compiled.constraint_system_digests + ; proof_data = Some proof_data + } in let%map () = match%map store ~filename proof_data.genesis_proof with | Ok () -> [%log info] "Compile-time genesis proof written to $path" - ~metadata:[("path", `String filename)] + ~metadata:[ ("path", `String filename) ] | Error err -> [%log warn] "Compile-time genesis proof could not be written to $path: \ $error" ~metadata: [ ("path", `String filename) - ; ("error", Error_json.error_to_yojson err) ] + ; ("error", Error_json.error_to_yojson err) + ] in Ok (values, filename) | None -> [%log info] "No genesis proof file was found for $base_hash, generating a new \ genesis proof" - ~metadata:[("base_hash", Base_hash.to_yojson base_hash)] ; + ~metadata:[ ("base_hash", Base_hash.to_yojson base_hash) ] ; let%bind values = generate inputs in let filename = genesis_dir ^/ filename ~base_hash in let%map () = @@ -737,13 +766,14 @@ module Genesis_proof = struct match%map store ~filename proof_data.genesis_proof with | Ok () -> [%log info] "New genesis proof written to $path" - ~metadata:[("path", `String filename)] + ~metadata:[ ("path", `String filename) ] | Error err -> [%log warn] "Genesis proof could not be written to $path: $error" ~metadata: [ ("path", `String filename) - ; ("error", Error_json.error_to_yojson err) ] ) + ; ("error", Error_json.error_to_yojson err) + ] ) in Ok (values, filename) @@ -753,7 +783,7 @@ end let load_config_json filename = Monitor.try_with_or_error ~here:[%here] (fun () -> let%map json = Reader.file_contents filename in - Yojson.Safe.from_string json ) + Yojson.Safe.from_string json) let load_config_file filename = let open Deferred.Or_error.Let_syntax in @@ -763,7 +793,7 @@ let load_config_file filename = | Ok config -> Ok config | Error err -> - Or_error.error_string err ) + Or_error.error_string err) let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger ~proof_level (config : Runtime_config.t) = @@ -780,7 +810,9 @@ let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger in [%log info] "Initializing with runtime configuration. Ledger name: $name" ~metadata: - [("name", ledger_name_json); ("config", Runtime_config.to_yojson config)] ; + [ ("name", ledger_name_json) + ; ("config", Runtime_config.to_yojson config) + ] ; let open Deferred.Or_error.Let_syntax in let genesis_constants = Genesis_constants.compiled in let proof_level = @@ -795,7 +827,8 @@ let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger Check | None -> None) - ; Some Genesis_constants.Proof_level.compiled ] + ; Some Genesis_constants.Proof_level.compiled + ] in let constraint_constants, blockchain_proof_system_id = match config.proof with @@ -804,8 +837,7 @@ let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger ( Genesis_constants.Constraint_constants.compiled , Some (Pickles.Verification_key.Id.dummy ()) ) | Some config -> - [%log info] - "Using the constraint constants from the configuration file" ; + [%log info] "Using the constraint constants from the configuration file" ; let blockchain_proof_system_id = (* We pass [None] here, which will force the constraint systems to be set up and their hashes evaluated before we can calculate the @@ -831,7 +863,8 @@ let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger level $compiled_proof_level" ~metadata: [ ("proof_level", `String (str proof_level)) - ; ("compiled_proof_level", `String (str compiled)) ] ; + ; ("compiled_proof_level", `String (str compiled)) + ] ; Deferred.Or_error.errorf "Proof level %s is not compatible with compile-time proof level %s" (str proof_level) (str compiled) @@ -840,23 +873,25 @@ let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger Ledger.load ~proof_level ~genesis_dir ~logger ~constraint_constants (Option.value config.ledger ~default: - { base= Named Mina_compile_config.genesis_ledger - ; num_accounts= None - ; balances= [] - ; hash= None - ; name= None - ; add_genesis_winner= None }) + { base = Named Mina_compile_config.genesis_ledger + ; num_accounts = None + ; balances = [] + ; hash = None + ; name = None + ; add_genesis_winner = None + }) in [%log info] "Loaded genesis ledger from $ledger_file" - ~metadata:[("ledger_file", `String ledger_file)] ; + ~metadata:[ ("ledger_file", `String ledger_file) ] ; let%bind genesis_epoch_data, genesis_epoch_data_config = Epoch_data.load ~proof_level ~genesis_dir ~logger ~constraint_constants config.epoch_data in let config = { config with - ledger= Option.map config.ledger ~f:(fun _ -> ledger_config) - ; epoch_data= genesis_epoch_data_config } + ledger = Option.map config.ledger ~f:(fun _ -> ledger_config) + ; epoch_data = genesis_epoch_data_config + } in let%map genesis_constants = Deferred.return @@ -877,7 +912,7 @@ let init_from_inputs ?(genesis_dir = Cache_dir.autogen_path) ~logger in if Option.is_some values.proof_data then [%log info] "Loaded genesis proof from $proof_file" - ~metadata:[("proof_file", `String proof_file)] ; + ~metadata:[ ("proof_file", `String proof_file) ] ; values let init_from_config_file ?genesis_dir ~logger ~proof_level @@ -909,7 +944,8 @@ let upgrade_old_config ~logger filename json = ; "log-received-blocks" ; "log-txn-pool-gossip" ; "log-snark-work-gossip" - ; "log-block-creation" ] + ; "log-block-creation" + ] in let found_daemon = ref false in let old_fields, remaining_fields = @@ -917,7 +953,7 @@ let upgrade_old_config ~logger filename json = if String.equal key "daemon" then ( found_daemon := true ; false ) - else List.mem ~equal:String.equal old_fields key ) + else List.mem ~equal:String.equal old_fields key) in if List.is_empty old_fields then return json else if !found_daemon then ( @@ -929,7 +965,7 @@ let upgrade_old_config ~logger filename json = These flags are now fields in the 'daemon' object of the config \ file." ~metadata: - [("values", `Assoc old_fields); ("filename", `String filename)] ; + [ ("values", `Assoc old_fields); ("filename", `String filename) ] ; return (`Assoc remaining_fields) ) else ( (* This file was written for the old format. Upgrade it. *) @@ -937,7 +973,7 @@ let upgrade_old_config ~logger filename json = "Automatically upgrading the config file $filename. The values \ $values have been moved to the 'daemon' object." ~metadata: - [("filename", `String filename); ("values", `Assoc old_fields)] ; + [ ("filename", `String filename); ("values", `Assoc old_fields) ] ; let upgraded_json = `Assoc (("daemon", `Assoc old_fields) :: remaining_fields) in @@ -945,8 +981,7 @@ let upgrade_old_config ~logger filename json = Deferred.Or_error.try_with ~here:[%here] (fun () -> Writer.with_file filename ~f:(fun w -> Deferred.return - @@ Writer.write w - (Yojson.Safe.pretty_to_string upgraded_json) ) ) + @@ Writer.write w (Yojson.Safe.pretty_to_string upgraded_json))) |> Deferred.ignore_m in upgraded_json ) @@ -964,5 +999,5 @@ let%test_module "Account config test" = let acc' = Accounts.Single.to_account_with_pk acc_config |> Or_error.ok_exn in - [%test_eq: Account.t] acc acc' ) + [%test_eq: Account.t] acc acc') end ) diff --git a/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml b/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml index 77bcbea30af..1321c754b9b 100644 --- a/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml +++ b/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml @@ -34,10 +34,11 @@ module Accounts = struct ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } -> + ; vesting_increment + } -> Mina_base.Account.create_timed account_id t.balance - ~initial_minimum_balance ~cliff_time ~cliff_amount - ~vesting_period ~vesting_increment + ~initial_minimum_balance ~cliff_time ~cliff_amount ~vesting_period + ~vesting_increment |> Or_error.ok_exn | None -> Mina_base.Account.create account_id t.balance @@ -53,11 +54,11 @@ module Accounts = struct ; receive ; set_delegate ; set_permissions - ; set_verification_key } -> + ; set_verification_key + } -> let auth_required a = match a with - | Runtime_config.Accounts.Single.Permissions.Auth_required.None - -> + | Runtime_config.Accounts.Single.Permissions.Auth_required.None -> Mina_base.Permissions.Auth_required.None | Either -> Either @@ -71,25 +72,26 @@ module Accounts = struct Impossible in { Mina_base.Permissions.Poly.stake - ; edit_state= auth_required edit_state - ; send= auth_required send - ; receive= auth_required receive - ; set_delegate= auth_required set_delegate - ; set_permissions= auth_required set_permissions - ; set_verification_key= auth_required set_verification_key } + ; edit_state = auth_required edit_state + ; send = auth_required send + ; receive = auth_required receive + ; set_delegate = auth_required set_delegate + ; set_permissions = auth_required set_permissions + ; set_verification_key = auth_required set_verification_key + } in let token_permissions = Option.value_map t.token_permissions ~default:account.token_permissions - ~f:(fun {token_owned; disable_new_accounts; account_disabled} -> + ~f:(fun { token_owned; disable_new_accounts; account_disabled } -> if token_owned then - Mina_base.Token_permissions.Token_owned {disable_new_accounts} - else Not_owned {account_disabled} ) + Mina_base.Token_permissions.Token_owned { disable_new_accounts } + else Not_owned { account_disabled }) in let%map snapp = match t.snapp with | None -> Ok None - | Some {state; verification_key} -> + | Some { state; verification_key } -> let%bind app_state = if Pickles_types.Vector.Nat.to_int Snapp_state.Max_state_size.n @@ -105,7 +107,7 @@ module Accounts = struct (* Use a URI-safe alphabet to make life easier for maintaining json We prefer this to base58-check here because users should not be manually entering verification keys. - *) + *) Option.value_map ~default:(Ok None) verification_key ~f:(fun verification_key -> let%map vk = @@ -115,7 +117,7 @@ module Accounts = struct Error.createf !"Could not parse verification key account \ %{sexp:Runtime_config.Accounts.Single.t}: %s" - t s ) + t s) |> Result.map ~f: (Binable.of_string @@ -123,27 +125,26 @@ module Accounts = struct .Stable .Latest )) in - Some - (With_hash.of_data ~hash_data:Snapp_account.digest_vk vk) - ) + Some (With_hash.of_data ~hash_data:Snapp_account.digest_vk vk)) in - Some {Snapp_account.verification_key; app_state} + Some { Snapp_account.verification_key; app_state } in { account with - delegate= + delegate = (if Option.is_some delegate then delegate else account.delegate) ; token_id ; token_permissions - ; nonce= Account.Nonce.of_uint32 t.nonce - ; receipt_chain_hash= + ; nonce = Account.Nonce.of_uint32 t.nonce + ; receipt_chain_hash = Option.value_map t.receipt_chain_hash ~default:account.receipt_chain_hash ~f:Mina_base.Receipt.Chain_hash.of_base58_check_exn - ; voting_for= + ; voting_for = Option.value_map ~default:account.voting_for ~f:Mina_base.State_hash.of_base58_check_exn t.voting_for ; snapp - ; permissions } + ; permissions + } let of_account : Mina_base.Account.t @@ -156,26 +157,29 @@ module Accounts = struct None | Timed t -> Some - { Runtime_config.Accounts.Single.Timed.initial_minimum_balance= + { Runtime_config.Accounts.Single.Timed.initial_minimum_balance = t.initial_minimum_balance - ; cliff_time= t.cliff_time - ; cliff_amount= t.cliff_amount - ; vesting_period= t.vesting_period - ; vesting_increment= t.vesting_increment } + ; cliff_time = t.cliff_time + ; cliff_amount = t.cliff_amount + ; vesting_period = t.vesting_period + ; vesting_increment = t.vesting_increment + } in let token_permissions = match account.token_permissions with - | Mina_base.Token_permissions.Token_owned {disable_new_accounts} -> + | Mina_base.Token_permissions.Token_owned { disable_new_accounts } -> Some - { Runtime_config.Accounts.Single.Token_permissions.token_owned= + { Runtime_config.Accounts.Single.Token_permissions.token_owned = true ; disable_new_accounts - ; account_disabled= false } - | Not_owned {account_disabled} -> + ; account_disabled = false + } + | Not_owned { account_disabled } -> Some - { token_owned= false - ; disable_new_accounts= false - ; account_disabled } + { token_owned = false + ; disable_new_accounts = false + ; account_disabled + } in let permissions = let auth_required a = @@ -199,20 +203,22 @@ module Accounts = struct ; receive ; set_delegate ; set_permissions - ; set_verification_key } = + ; set_verification_key + } = account.permissions in Some { Runtime_config.Accounts.Single.Permissions.stake - ; edit_state= auth_required edit_state - ; send= auth_required send - ; receive= auth_required receive - ; set_delegate= auth_required set_delegate - ; set_permissions= auth_required set_permissions - ; set_verification_key= auth_required set_verification_key } + ; edit_state = auth_required edit_state + ; send = auth_required send + ; receive = auth_required receive + ; set_delegate = auth_required set_delegate + ; set_permissions = auth_required set_permissions + ; set_verification_key = auth_required set_verification_key + } in let snapp = - Option.map account.snapp ~f:(fun {app_state; verification_key} -> + Option.map account.snapp ~f:(fun { app_state; verification_key } -> let state = Snapp_state.V.to_list app_state in let verification_key = Option.map verification_key ~f:(fun vk -> @@ -220,46 +226,48 @@ module Accounts = struct |> Binable.to_string ( module Pickles.Side_loaded.Verification_key.Stable .Latest ) - |> Base64.encode_exn ~alphabet:Base64.uri_safe_alphabet ) + |> Base64.encode_exn ~alphabet:Base64.uri_safe_alphabet) in { Runtime_config.Accounts.Single.Snapp_account.state - ; verification_key } ) + ; verification_key + }) in - { pk= + { pk = Some (Signature_lib.Public_key.Compressed.to_base58_check account.public_key) - ; sk= Option.map ~f:Signature_lib.Private_key.to_base58_check sk - ; balance= account.balance - ; delegate= + ; sk = Option.map ~f:Signature_lib.Private_key.to_base58_check sk + ; balance = account.balance + ; delegate = Option.map ~f:Signature_lib.Public_key.Compressed.to_base58_check account.delegate ; timing - ; token= Some (Mina_base.Token_id.to_uint64 account.token_id) + ; token = Some (Mina_base.Token_id.to_uint64 account.token_id) ; token_permissions - ; nonce= account.nonce - ; receipt_chain_hash= + ; nonce = account.nonce + ; receipt_chain_hash = Some (Mina_base.Receipt.Chain_hash.to_base58_check account.receipt_chain_hash) - ; voting_for= + ; voting_for = Some (Mina_base.State_hash.to_base58_check account.voting_for) ; snapp - ; permissions } + ; permissions + } end let to_full : Runtime_config.Accounts.t -> (Private_key.t option * Account.t) list = List.mapi - ~f:(fun i ({Runtime_config.Accounts.pk; sk; _} as account_config) -> + ~f:(fun i ({ Runtime_config.Accounts.pk; sk; _ } as account_config) -> let sk = match sk with | Some sk -> ( - match Private_key.of_yojson (`String sk) with - | Ok sk -> - Some sk - | Error err -> - Error.(raise (of_string err)) ) + match Private_key.of_yojson (`String sk) with + | Ok sk -> + Some sk + | Error err -> + Error.(raise (of_string err)) ) | None -> None in @@ -276,10 +284,10 @@ module Accounts = struct Public_key.Compressed.gen) in let account = - Single.to_account_with_pk {account_config with pk= Some pk} + Single.to_account_with_pk { account_config with pk = Some pk } |> Or_error.ok_exn in - (sk, account) ) + (sk, account)) let gen_with_balance balance : (Private_key.t option * Account.t) Quickcheck.Generator.t = @@ -324,7 +332,7 @@ module Accounts = struct | (n, balance) :: balances_tl -> gen_balances_rev n balance balances_tl accounts in - gen_balances_rev n balance balances_tl [] ) + gen_balances_rev n balance balances_tl []) let pad_with_rev_balances balances accounts = let balances_accounts = @@ -352,7 +360,7 @@ module Accounts = struct List.fold ~init:([], 0) accounts ~f:(fun (acc, count) account -> let count = count + 1 in if count >= n then raise Stop ; - (account :: acc, count + 1) ) + (account :: acc, count + 1)) in (* [rev_append] is tail-recursive, and we've already reversed the list, so we can avoid calling [append] which may internally reverse the @@ -366,9 +374,7 @@ let make_constraint_constants ~(default : Genesis_constants.Constraint_constants.t) (config : Runtime_config.Proof_keys.t) : Genesis_constants.Constraint_constants.t = - let work_delay = - Option.value ~default:default.work_delay config.work_delay - in + let work_delay = Option.value ~default:default.work_delay config.work_delay in let block_window_duration_ms = Option.value ~default:default.block_window_duration_ms config.block_window_duration_ms @@ -393,8 +399,7 @@ let make_constraint_constants unit tests pass. *) 1 - + Core_kernel.Int.ceil_log2 - (max_user_commands_per_block + max_coinbases) + + Core_kernel.Int.ceil_log2 (max_user_commands_per_block + max_coinbases) | None -> default.transaction_capacity_log_2 in @@ -402,40 +407,42 @@ let make_constraint_constants Core_kernel.Int.ceil_log2 (((transaction_capacity_log_2 + 1) * (work_delay + 1)) + 1) in - { sub_windows_per_window= + { sub_windows_per_window = Option.value ~default:default.sub_windows_per_window config.sub_windows_per_window - ; ledger_depth= + ; ledger_depth = Option.value ~default:default.ledger_depth config.ledger_depth ; work_delay ; block_window_duration_ms ; transaction_capacity_log_2 ; pending_coinbase_depth - ; coinbase_amount= + ; coinbase_amount = Option.value ~default:default.coinbase_amount config.coinbase_amount - ; supercharged_coinbase_factor= + ; supercharged_coinbase_factor = Option.value ~default:default.supercharged_coinbase_factor config.supercharged_coinbase_factor - ; account_creation_fee= + ; account_creation_fee = Option.value ~default:default.account_creation_fee config.account_creation_fee - ; fork= + ; fork = ( match config.fork with | None -> default.fork - | Some {previous_state_hash; previous_length; previous_global_slot} -> + | Some { previous_state_hash; previous_length; previous_global_slot } -> Some - { previous_state_hash= + { previous_state_hash = State_hash.of_base58_check_exn previous_state_hash - ; previous_length= Mina_numbers.Length.of_int previous_length - ; previous_global_slot= - Mina_numbers.Global_slot.of_int previous_global_slot } ) } + ; previous_length = Mina_numbers.Length.of_int previous_length + ; previous_global_slot = + Mina_numbers.Global_slot.of_int previous_global_slot + } ) + } let runtime_config_of_constraint_constants ~(proof_level : Genesis_constants.Proof_level.t) (constraint_constants : Genesis_constants.Constraint_constants.t) : Runtime_config.Proof_keys.t = - { level= + { level = ( match proof_level with | Full -> Some Full @@ -443,25 +450,28 @@ let runtime_config_of_constraint_constants Some Check | None -> Some None ) - ; sub_windows_per_window= Some constraint_constants.sub_windows_per_window - ; ledger_depth= Some constraint_constants.ledger_depth - ; work_delay= Some constraint_constants.work_delay - ; block_window_duration_ms= + ; sub_windows_per_window = Some constraint_constants.sub_windows_per_window + ; ledger_depth = Some constraint_constants.ledger_depth + ; work_delay = Some constraint_constants.work_delay + ; block_window_duration_ms = Some constraint_constants.block_window_duration_ms - ; transaction_capacity= + ; transaction_capacity = Some (Log_2 constraint_constants.transaction_capacity_log_2) - ; coinbase_amount= Some constraint_constants.coinbase_amount - ; supercharged_coinbase_factor= + ; coinbase_amount = Some constraint_constants.coinbase_amount + ; supercharged_coinbase_factor = Some constraint_constants.supercharged_coinbase_factor - ; account_creation_fee= Some constraint_constants.account_creation_fee - ; fork= + ; account_creation_fee = Some constraint_constants.account_creation_fee + ; fork = Option.map constraint_constants.fork - ~f:(fun {previous_state_hash; previous_length; previous_global_slot} -> - { Runtime_config.Fork_config.previous_state_hash= + ~f:(fun { previous_state_hash; previous_length; previous_global_slot } + -> + { Runtime_config.Fork_config.previous_state_hash = State_hash.to_base58_check previous_state_hash - ; previous_length= Mina_numbers.Length.to_int previous_length - ; previous_global_slot= - Mina_numbers.Global_slot.to_int previous_global_slot } ) } + ; previous_length = Mina_numbers.Length.to_int previous_length + ; previous_global_slot = + Mina_numbers.Global_slot.to_int previous_global_slot + }) + } let make_genesis_constants ~logger ~(default : Genesis_constants.t) (config : Runtime_config.t) = @@ -479,7 +489,7 @@ let make_genesis_constants ~logger ~(default : Genesis_constants.t) [%log error] "Could not build genesis constants from the configuration file: \ $error" - ~metadata:[("error", `String msg)] ; + ~metadata:[ ("error", `String msg) ] ; Or_error.errorf "Could not build genesis constants from the configuration file: %s" msg @@ -487,57 +497,62 @@ let make_genesis_constants ~logger ~(default : Genesis_constants.t) Ok None in let open Option.Let_syntax in - { Genesis_constants.protocol= - { k= + { Genesis_constants.protocol = + { k = Option.value ~default:default.protocol.k (config.genesis >>= fun cfg -> cfg.k) - ; delta= + ; delta = Option.value ~default:default.protocol.delta (config.genesis >>= fun cfg -> cfg.delta) - ; slots_per_epoch= + ; slots_per_epoch = Option.value ~default:default.protocol.slots_per_epoch (config.genesis >>= fun cfg -> cfg.slots_per_epoch) - ; slots_per_sub_window= + ; slots_per_sub_window = Option.value ~default:default.protocol.slots_per_sub_window (config.genesis >>= fun cfg -> cfg.slots_per_sub_window) - ; genesis_state_timestamp= + ; genesis_state_timestamp = Option.value ~default:default.protocol.genesis_state_timestamp - genesis_state_timestamp } - ; txpool_max_size= + genesis_state_timestamp + } + ; txpool_max_size = Option.value ~default:default.txpool_max_size (config.daemon >>= fun cfg -> cfg.txpool_max_size) - ; num_accounts= + ; num_accounts = Option.value_map ~default:default.num_accounts (config.ledger >>= fun cfg -> cfg.num_accounts) - ~f:(fun num_accounts -> Some num_accounts) } + ~f:(fun num_accounts -> Some num_accounts) + } let runtime_config_of_genesis_constants (genesis_constants : Genesis_constants.t) : Runtime_config.Genesis.t = - { k= Some genesis_constants.protocol.k - ; delta= Some genesis_constants.protocol.delta - ; slots_per_epoch= Some genesis_constants.protocol.slots_per_epoch - ; slots_per_sub_window= Some genesis_constants.protocol.slots_per_sub_window - ; genesis_state_timestamp= + { k = Some genesis_constants.protocol.k + ; delta = Some genesis_constants.protocol.delta + ; slots_per_epoch = Some genesis_constants.protocol.slots_per_epoch + ; slots_per_sub_window = Some genesis_constants.protocol.slots_per_sub_window + ; genesis_state_timestamp = Some (Genesis_constants.genesis_timestamp_to_string - genesis_constants.protocol.genesis_state_timestamp) } + genesis_constants.protocol.genesis_state_timestamp) + } let runtime_config_of_precomputed_values (precomputed_values : Genesis_proof.t) : Runtime_config.t = Runtime_config.combine precomputed_values.runtime_config - { daemon= + { daemon = Some - { txpool_max_size= + { txpool_max_size = Some precomputed_values.genesis_constants.txpool_max_size - ; peer_list_url= None } - ; genesis= + ; peer_list_url = None + } + ; genesis = Some (runtime_config_of_genesis_constants precomputed_values.genesis_constants) - ; proof= + ; proof = Some (runtime_config_of_constraint_constants ~proof_level:precomputed_values.proof_level precomputed_values.constraint_constants) - ; ledger= None - ; epoch_data= None } + ; ledger = None + ; epoch_data = None + } diff --git a/src/lib/genesis_proof/genesis_proof.ml b/src/lib/genesis_proof/genesis_proof.ml index f7aa112e840..5d9946a5f3c 100644 --- a/src/lib/genesis_proof/genesis_proof.ml +++ b/src/lib/genesis_proof/genesis_proof.ml @@ -4,65 +4,66 @@ open Mina_state module Inputs = struct type t = - { runtime_config: Runtime_config.t - ; constraint_constants: Genesis_constants.Constraint_constants.t - ; proof_level: Genesis_constants.Proof_level.t - ; genesis_constants: Genesis_constants.t - ; genesis_ledger: Genesis_ledger.Packed.t - ; genesis_epoch_data: Consensus.Genesis_epoch_data.t - ; consensus_constants: Consensus.Constants.t - ; protocol_state_with_hash: + { runtime_config : Runtime_config.t + ; constraint_constants : Genesis_constants.Constraint_constants.t + ; proof_level : Genesis_constants.Proof_level.t + ; genesis_constants : Genesis_constants.t + ; genesis_ledger : Genesis_ledger.Packed.t + ; genesis_epoch_data : Consensus.Genesis_epoch_data.t + ; consensus_constants : Consensus.Constants.t + ; protocol_state_with_hash : (Protocol_state.value, State_hash.t) With_hash.t - ; constraint_system_digests: (string * Md5_lib.t) list option - ; blockchain_proof_system_id: + ; constraint_system_digests : (string * Md5_lib.t) list option + ; blockchain_proof_system_id : (* This is only used for calculating the hash to lookup the genesis proof with. It is re-calculated when building the blockchain prover, so it is always okay -- if less efficient at startup -- to pass [None] here. *) - Pickles.Verification_key.Id.t option } + Pickles.Verification_key.Id.t option + } - let runtime_config {runtime_config; _} = runtime_config + let runtime_config { runtime_config; _ } = runtime_config - let constraint_constants {constraint_constants; _} = constraint_constants + let constraint_constants { constraint_constants; _ } = constraint_constants - let genesis_constants {genesis_constants; _} = genesis_constants + let genesis_constants { genesis_constants; _ } = genesis_constants - let proof_level {proof_level; _} = proof_level + let proof_level { proof_level; _ } = proof_level let protocol_constants t = (genesis_constants t).protocol - let ledger_depth {genesis_ledger; _} = + let ledger_depth { genesis_ledger; _ } = Genesis_ledger.Packed.depth genesis_ledger include Genesis_ledger.Utils - let genesis_ledger {genesis_ledger; _} = + let genesis_ledger { genesis_ledger; _ } = Genesis_ledger.Packed.t genesis_ledger - let genesis_epoch_data {genesis_epoch_data; _} = genesis_epoch_data + let genesis_epoch_data { genesis_epoch_data; _ } = genesis_epoch_data - let accounts {genesis_ledger; _} = + let accounts { genesis_ledger; _ } = Genesis_ledger.Packed.accounts genesis_ledger - let find_new_account_record_exn {genesis_ledger; _} = + let find_new_account_record_exn { genesis_ledger; _ } = Genesis_ledger.Packed.find_new_account_record_exn genesis_ledger - let find_new_account_record_exn_ {genesis_ledger; _} = + let find_new_account_record_exn_ { genesis_ledger; _ } = Genesis_ledger.Packed.find_new_account_record_exn_ genesis_ledger - let largest_account_exn {genesis_ledger; _} = + let largest_account_exn { genesis_ledger; _ } = Genesis_ledger.Packed.largest_account_exn genesis_ledger - let largest_account_keypair_exn {genesis_ledger; _} = + let largest_account_keypair_exn { genesis_ledger; _ } = Genesis_ledger.Packed.largest_account_keypair_exn genesis_ledger - let largest_account_pk_exn {genesis_ledger; _} = + let largest_account_pk_exn { genesis_ledger; _ } = Genesis_ledger.Packed.largest_account_pk_exn genesis_ledger - let consensus_constants {consensus_constants; _} = consensus_constants + let consensus_constants { consensus_constants; _ } = consensus_constants - let genesis_state_with_hash {protocol_state_with_hash; _} = + let genesis_state_with_hash { protocol_state_with_hash; _ } = protocol_state_with_hash let genesis_state t = (genesis_state_with_hash t).data @@ -72,73 +73,75 @@ end module Proof_data = struct type t = - { blockchain_proof_system_id: Pickles.Verification_key.Id.t - ; genesis_proof: Proof.t } + { blockchain_proof_system_id : Pickles.Verification_key.Id.t + ; genesis_proof : Proof.t + } end module T = struct type t = - { runtime_config: Runtime_config.t - ; constraint_constants: Genesis_constants.Constraint_constants.t - ; genesis_constants: Genesis_constants.t - ; proof_level: Genesis_constants.Proof_level.t - ; genesis_ledger: Genesis_ledger.Packed.t - ; genesis_epoch_data: Consensus.Genesis_epoch_data.t - ; consensus_constants: Consensus.Constants.t - ; protocol_state_with_hash: + { runtime_config : Runtime_config.t + ; constraint_constants : Genesis_constants.Constraint_constants.t + ; genesis_constants : Genesis_constants.t + ; proof_level : Genesis_constants.Proof_level.t + ; genesis_ledger : Genesis_ledger.Packed.t + ; genesis_epoch_data : Consensus.Genesis_epoch_data.t + ; consensus_constants : Consensus.Constants.t + ; protocol_state_with_hash : (Protocol_state.value, State_hash.t) With_hash.t - ; constraint_system_digests: (string * Md5_lib.t) list Lazy.t - ; proof_data: Proof_data.t option } + ; constraint_system_digests : (string * Md5_lib.t) list Lazy.t + ; proof_data : Proof_data.t option + } - let runtime_config {runtime_config; _} = runtime_config + let runtime_config { runtime_config; _ } = runtime_config - let constraint_constants {constraint_constants; _} = constraint_constants + let constraint_constants { constraint_constants; _ } = constraint_constants - let genesis_constants {genesis_constants; _} = genesis_constants + let genesis_constants { genesis_constants; _ } = genesis_constants - let proof_level {proof_level; _} = proof_level + let proof_level { proof_level; _ } = proof_level let protocol_constants t = (genesis_constants t).protocol - let ledger_depth {genesis_ledger; _} = + let ledger_depth { genesis_ledger; _ } = Genesis_ledger.Packed.depth genesis_ledger include Genesis_ledger.Utils - let genesis_ledger {genesis_ledger; _} = + let genesis_ledger { genesis_ledger; _ } = Genesis_ledger.Packed.t genesis_ledger - let genesis_epoch_data {genesis_epoch_data; _} = genesis_epoch_data + let genesis_epoch_data { genesis_epoch_data; _ } = genesis_epoch_data - let accounts {genesis_ledger; _} = + let accounts { genesis_ledger; _ } = Genesis_ledger.Packed.accounts genesis_ledger - let find_new_account_record_exn {genesis_ledger; _} = + let find_new_account_record_exn { genesis_ledger; _ } = Genesis_ledger.Packed.find_new_account_record_exn genesis_ledger - let find_new_account_record_exn_ {genesis_ledger; _} = + let find_new_account_record_exn_ { genesis_ledger; _ } = Genesis_ledger.Packed.find_new_account_record_exn_ genesis_ledger - let largest_account_exn {genesis_ledger; _} = + let largest_account_exn { genesis_ledger; _ } = Genesis_ledger.Packed.largest_account_exn genesis_ledger - let largest_account_keypair_exn {genesis_ledger; _} = + let largest_account_keypair_exn { genesis_ledger; _ } = Genesis_ledger.Packed.largest_account_keypair_exn genesis_ledger - let largest_account_pk_exn {genesis_ledger; _} = + let largest_account_pk_exn { genesis_ledger; _ } = Genesis_ledger.Packed.largest_account_pk_exn genesis_ledger - let consensus_constants {consensus_constants; _} = consensus_constants + let consensus_constants { consensus_constants; _ } = consensus_constants - let genesis_state_with_hash {protocol_state_with_hash; _} = + let genesis_state_with_hash { protocol_state_with_hash; _ } = protocol_state_with_hash let genesis_state t = (genesis_state_with_hash t).data let genesis_state_hash t = (genesis_state_with_hash t).hash - let genesis_proof {proof_data; _} = - Option.map proof_data ~f:(fun {Proof_data.genesis_proof= p; _} -> p) + let genesis_proof { proof_data; _ } = + Option.map proof_data ~f:(fun { Proof_data.genesis_proof = p; _ } -> p) end include T @@ -155,20 +158,22 @@ let base_proof (module B : Blockchain_snark.Blockchain_snark_state.S) in let curr = t.protocol_state_with_hash.data in let dummy_txn_stmt : Transaction_snark.Statement.With_sok.t = - { sok_digest= Mina_base.Sok_message.Digest.default - ; source= + { sok_digest = Mina_base.Sok_message.Digest.default + ; source = Blockchain_state.snarked_ledger_hash (Protocol_state.blockchain_state prev_state) - ; target= + ; target = Blockchain_state.snarked_ledger_hash (Protocol_state.blockchain_state curr) - ; supply_increase= Currency.Amount.zero - ; fee_excess= Fee_excess.zero - ; next_available_token_before= Token_id.(next default) - ; next_available_token_after= Token_id.(next default) - ; pending_coinbase_stack_state= - { source= Mina_base.Pending_coinbase.Stack.empty - ; target= Mina_base.Pending_coinbase.Stack.empty } } + ; supply_increase = Currency.Amount.zero + ; fee_excess = Fee_excess.zero + ; next_available_token_before = Token_id.(next default) + ; next_available_token_after = Token_id.(next default) + ; pending_coinbase_stack_state = + { source = Mina_base.Pending_coinbase.Stack.empty + ; target = Mina_base.Pending_coinbase.Stack.empty + } + } in let genesis_epoch_ledger = match t.genesis_epoch_data with @@ -184,11 +189,12 @@ let base_proof (module B : Blockchain_snark.Blockchain_snark_state.S) ~handler: (Consensus.Data.Prover_state.precomputed_handler ~constraint_constants ~genesis_epoch_ledger) - { transition= + { transition = Snark_transition.genesis ~constraint_constants ~consensus_constants ~genesis_ledger - ; prev_state } - [(prev_state, blockchain_dummy); (dummy_txn_stmt, txn_dummy)] + ; prev_state + } + [ (prev_state, blockchain_dummy); (dummy_txn_stmt, txn_dummy) ] t.protocol_state_with_hash.data let digests (module T : Transaction_snark.S) @@ -217,53 +223,57 @@ let blockchain_snark_state (inputs : Inputs.t) : let create_values txn b (t : Inputs.t) = let%map.Async.Deferred genesis_proof = base_proof b t in - { runtime_config= t.runtime_config - ; constraint_constants= t.constraint_constants - ; proof_level= t.proof_level - ; genesis_constants= t.genesis_constants - ; genesis_ledger= t.genesis_ledger - ; genesis_epoch_data= t.genesis_epoch_data - ; consensus_constants= t.consensus_constants - ; protocol_state_with_hash= t.protocol_state_with_hash - ; constraint_system_digests= digests txn b - ; proof_data= + { runtime_config = t.runtime_config + ; constraint_constants = t.constraint_constants + ; proof_level = t.proof_level + ; genesis_constants = t.genesis_constants + ; genesis_ledger = t.genesis_ledger + ; genesis_epoch_data = t.genesis_epoch_data + ; consensus_constants = t.consensus_constants + ; protocol_state_with_hash = t.protocol_state_with_hash + ; constraint_system_digests = digests txn b + ; proof_data = Some - { blockchain_proof_system_id= + { blockchain_proof_system_id = (let (module B) = b in Lazy.force B.Proof.id) - ; genesis_proof } } + ; genesis_proof + } + } let create_values_no_proof (t : Inputs.t) = - { runtime_config= t.runtime_config - ; constraint_constants= t.constraint_constants - ; proof_level= t.proof_level - ; genesis_constants= t.genesis_constants - ; genesis_ledger= t.genesis_ledger - ; genesis_epoch_data= t.genesis_epoch_data - ; consensus_constants= t.consensus_constants - ; protocol_state_with_hash= t.protocol_state_with_hash - ; constraint_system_digests= + { runtime_config = t.runtime_config + ; constraint_constants = t.constraint_constants + ; proof_level = t.proof_level + ; genesis_constants = t.genesis_constants + ; genesis_ledger = t.genesis_ledger + ; genesis_epoch_data = t.genesis_epoch_data + ; consensus_constants = t.consensus_constants + ; protocol_state_with_hash = t.protocol_state_with_hash + ; constraint_system_digests = lazy (let txn, b = blockchain_snark_state t in Lazy.force (digests txn b)) - ; proof_data= None } + ; proof_data = None + } let to_inputs (t : t) : Inputs.t = - { runtime_config= t.runtime_config - ; constraint_constants= t.constraint_constants - ; proof_level= t.proof_level - ; genesis_constants= t.genesis_constants - ; genesis_ledger= t.genesis_ledger - ; genesis_epoch_data= t.genesis_epoch_data - ; consensus_constants= t.consensus_constants - ; protocol_state_with_hash= t.protocol_state_with_hash - ; constraint_system_digests= + { runtime_config = t.runtime_config + ; constraint_constants = t.constraint_constants + ; proof_level = t.proof_level + ; genesis_constants = t.genesis_constants + ; genesis_ledger = t.genesis_ledger + ; genesis_epoch_data = t.genesis_epoch_data + ; consensus_constants = t.consensus_constants + ; protocol_state_with_hash = t.protocol_state_with_hash + ; constraint_system_digests = ( if Lazy.is_val t.constraint_system_digests then Some (Lazy.force t.constraint_system_digests) else None ) - ; blockchain_proof_system_id= + ; blockchain_proof_system_id = ( match t.proof_data with - | Some {blockchain_proof_system_id; _} -> + | Some { blockchain_proof_system_id; _ } -> Some blockchain_proof_system_id | None -> - None ) } + None ) + } diff --git a/src/lib/global_signer_private_key/global_signer_private_key.ml b/src/lib/global_signer_private_key/global_signer_private_key.ml index 660b8647a85..fba933467df 100644 --- a/src/lib/global_signer_private_key/global_signer_private_key.ml +++ b/src/lib/global_signer_private_key/global_signer_private_key.ml @@ -3,4 +3,4 @@ let t = Snark_params.Tock.Field.one -let mode : [`Dev | `Prod] = `Prod +let mode : [ `Dev | `Prod ] = `Prod diff --git a/src/lib/gossip_net/fake.ml b/src/lib/gossip_net/fake.ml index db2d5fc622e..8edfc231e83 100644 --- a/src/lib/gossip_net/fake.ml +++ b/src/lib/gossip_net/fake.ml @@ -24,34 +24,36 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : module Network = struct type rpc_hook = - { hook: + { hook : 'q 'r. Peer.Id.t -> ('q, 'r) rpc -> 'q - -> 'r Mina_base.Rpc_intf.rpc_response Deferred.t } + -> 'r Mina_base.Rpc_intf.rpc_response Deferred.t + } type network_interface = - { broadcast_message_writer: + { broadcast_message_writer : ( Message.msg Envelope.Incoming.t * Mina_net2.Validation_callback.t , Strict_pipe.crash Strict_pipe.buffered , unit ) Strict_pipe.Writer.t - ; rpc_hook: rpc_hook } + ; rpc_hook : rpc_hook + } - type node = {peer: Peer.t; mutable interface: network_interface option} + type node = { peer : Peer.t; mutable interface : network_interface option } - type t = {nodes: (Peer.Id.t, node list) Hashtbl.t} + type t = { nodes : (Peer.Id.t, node list) Hashtbl.t } let create peers = let nodes = Hashtbl.create (module Peer.Id) in List.iter peers ~f:(fun peer -> Hashtbl.add_multi nodes ~key:peer.Peer.peer_id - ~data:{peer; interface= None} ) ; - {nodes} + ~data:{ peer; interface = None }) ; + { nodes } - let get_initial_peers {nodes} local_ip = + let get_initial_peers { nodes } local_ip = Hashtbl.data nodes |> List.concat |> List.filter_map ~f:(fun node -> if Unix.Inet_addr.equal node.peer.host local_ip then None - else Some node.peer ) + else Some node.peer) let lookup_node t peer = let error = Error.of_string "peer does not exist" in @@ -85,9 +87,10 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : Strict_pipe.Writer.write intf.broadcast_message_writer ( msg , Mina_net2.Validation_callback.create_without_expiration - () ) ) ) ) + () )))) - let call_rpc : type q r. + let call_rpc : + type q r. t -> _ -> sender_id:Peer.Id.t @@ -111,25 +114,27 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : module Instance = struct type t = - { network: Network.t - ; me: Peer.t - ; rpc_handlers: rpc_handler list - ; peer_table: (Peer.Id.t, Peer.t) Hashtbl.t - ; initial_peers: Peer.t list - ; connection_gating: Mina_net2.connection_gating ref - ; received_message_reader: + { network : Network.t + ; me : Peer.t + ; rpc_handlers : rpc_handler list + ; peer_table : (Peer.Id.t, Peer.t) Hashtbl.t + ; initial_peers : Peer.t list + ; connection_gating : Mina_net2.connection_gating ref + ; received_message_reader : (Message.msg Envelope.Incoming.t * Mina_net2.Validation_callback.t) Strict_pipe.Reader.t - ; received_message_writer: + ; received_message_writer : ( Message.msg Envelope.Incoming.t * Mina_net2.Validation_callback.t , Strict_pipe.crash Strict_pipe.buffered , unit ) Strict_pipe.Writer.t - ; ban_notification_reader: ban_notification Linear_pipe.Reader.t - ; ban_notification_writer: ban_notification Linear_pipe.Writer.t } + ; ban_notification_reader : ban_notification Linear_pipe.Reader.t + ; ban_notification_writer : ban_notification Linear_pipe.Writer.t + } let rpc_hook t rpc_handlers = - let hook : type q r. + let hook : + type q r. Peer.Id.t -> (q, r) rpc -> q @@ -148,7 +153,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : match List.find_map rpc_handlers ~f:(fun handler -> match_handler handler rpc ~do_:(fun f -> - f sender ~version:latest_version query ) ) + f sender ~version:latest_version query)) with | None -> failwith "fake gossip net error: rpc not implemented" @@ -157,13 +162,13 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : Mina_base.Rpc_intf.Connected (Envelope.Incoming.wrap_peer ~data:(Ok response) ~sender) in - Network.{hook} + Network.{ hook } let create network me rpc_handlers = let initial_peers = Network.get_initial_peers network me.Peer.host in let peer_table = Hashtbl.create (module Peer.Id) in List.iter initial_peers ~f:(fun peer -> - Hashtbl.add_exn peer_table ~key:peer.peer_id ~data:peer ) ; + Hashtbl.add_exn peer_table ~key:peer.peer_id ~data:peer) ; let received_message_reader, received_message_writer = Strict_pipe.(create (Buffered (`Capacity 5, `Overflow Crash))) in @@ -176,20 +181,24 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ; rpc_handlers ; peer_table ; initial_peers - ; connection_gating= - ref Mina_net2.{banned_peers= []; trusted_peers= []; isolate= false} + ; connection_gating = + ref + Mina_net2. + { banned_peers = []; trusted_peers = []; isolate = false } ; received_message_reader ; received_message_writer ; ban_notification_reader - ; ban_notification_writer } + ; ban_notification_writer + } in Network.( attach_interface network me - { broadcast_message_writer= received_message_writer - ; rpc_hook= rpc_hook t rpc_handlers }) ; + { broadcast_message_writer = received_message_writer + ; rpc_hook = rpc_hook t rpc_handlers + }) ; t - let peers {peer_table; _} = Hashtbl.data peer_table |> Deferred.return + let peers { peer_table; _ } = Hashtbl.data peer_table |> Deferred.return let set_node_status _ _ = Deferred.Or_error.ok_unit @@ -219,10 +228,10 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : let on_first_high_connectivity _ ~f:_ = Deferred.never () - let received_message_reader {received_message_reader; _} = + let received_message_reader { received_message_reader; _ } = received_message_reader - let ban_notification_reader {ban_notification_reader; _} = + let ban_notification_reader { ban_notification_reader; _ } = ban_notification_reader let query_peer ?heartbeat_timeout:_ ?timeout:_ t peer rpc query = @@ -234,22 +243,21 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : Deferred.List.map ?how qs ~f:(query_peer ?timeout ?heartbeat_timeout t peer rpc) in - with_return (fun {return} -> + with_return (fun { return } -> let data = List.map rs ~f:(function | Connected x -> x.data | Failed_to_connect e -> - return (Mina_base.Rpc_intf.Failed_to_connect e) ) + return (Mina_base.Rpc_intf.Failed_to_connect e)) |> Or_error.all in let sender = Option.value_exn (Hashtbl.find t.peer_table peer) - ~error: - (Error.createf "failed to find peer %s in peer_table" peer) + ~error:(Error.createf "failed to find peer %s in peer_table" peer) in - Connected (Envelope.Incoming.wrap_peer ~data ~sender) ) + Connected (Envelope.Incoming.wrap_peer ~data ~sender)) let query_random_peers _ = failwith "TODO stub" diff --git a/src/lib/gossip_net/intf.ml b/src/lib/gossip_net/intf.ml index 716e32f16d3..e52653868b5 100644 --- a/src/lib/gossip_net/intf.ml +++ b/src/lib/gossip_net/intf.ml @@ -4,10 +4,10 @@ open Network_peer open Pipe_lib open Mina_base.Rpc_intf -type ban_creator = {banned_peer: Peer.t; banned_until: Time.t} +type ban_creator = { banned_peer : Peer.t; banned_until : Time.t } [@@deriving fields] -type ban_notification = {banned_peer: Peer.t; banned_until: Time.t} +type ban_notification = { banned_peer : Peer.t; banned_until : Time.t } module type Gossip_net_intf = sig type t diff --git a/src/lib/gossip_net/libp2p.ml b/src/lib/gossip_net/libp2p.ml index 214b5ab7bc0..1c5d09d186f 100644 --- a/src/lib/gossip_net/libp2p.ml +++ b/src/lib/gossip_net/libp2p.ml @@ -1,5 +1,4 @@ -[%%import -"../../config.mlh"] +[%%import "../../config.mlh"] open Core open Async @@ -20,25 +19,26 @@ end module Config = struct type t = - { timeout: Time.Span.t - ; initial_peers: Mina_net2.Multiaddr.t list - ; addrs_and_ports: Node_addrs_and_ports.t - ; metrics_port: string option - ; conf_dir: string - ; chain_id: string - ; logger: Logger.t - ; unsafe_no_trust_ip: bool - ; isolate: bool - ; trust_system: Trust_system.t - ; flooding: bool - ; direct_peers: Mina_net2.Multiaddr.t list - ; peer_exchange: bool - ; mina_peer_exchange: bool - ; seed_peer_list_url: Uri.t option - ; max_connections: int - ; validation_queue_size: int - ; mutable keypair: Mina_net2.Keypair.t option - ; all_peers_seen_metric: bool } + { timeout : Time.Span.t + ; initial_peers : Mina_net2.Multiaddr.t list + ; addrs_and_ports : Node_addrs_and_ports.t + ; metrics_port : string option + ; conf_dir : string + ; chain_id : string + ; logger : Logger.t + ; unsafe_no_trust_ip : bool + ; isolate : bool + ; trust_system : Trust_system.t + ; flooding : bool + ; direct_peers : Mina_net2.Multiaddr.t list + ; peer_exchange : bool + ; mina_peer_exchange : bool + ; seed_peer_list_url : Uri.t option + ; max_connections : int + ; validation_queue_size : int + ; mutable keypair : Mina_net2.Keypair.t option + ; all_peers_seen_metric : bool + } [@@deriving make] end @@ -65,29 +65,31 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : module T = struct type t = - { config: Config.t - ; mutable added_seeds: Peer.Hash_set.t - ; net2: Mina_net2.net Deferred.t ref - ; first_peer_ivar: unit Ivar.t - ; high_connectivity_ivar: unit Ivar.t - ; ban_reader: Intf.ban_notification Linear_pipe.Reader.t - ; message_reader: + { config : Config.t + ; mutable added_seeds : Peer.Hash_set.t + ; net2 : Mina_net2.net Deferred.t ref + ; first_peer_ivar : unit Ivar.t + ; high_connectivity_ivar : unit Ivar.t + ; ban_reader : Intf.ban_notification Linear_pipe.Reader.t + ; message_reader : (Message.msg Envelope.Incoming.t * Mina_net2.Validation_callback.t) Strict_pipe.Reader.t - ; subscription: + ; subscription : Message.msg Mina_net2.Pubsub.Subscription.t Deferred.t ref - ; restart_helper: unit -> unit } + ; restart_helper : unit -> unit + } let create_rpc_implementations - (Rpc_handler {rpc; f= handler; cost; budget}) = + (Rpc_handler { rpc; f = handler; cost; budget }) = let (module Impl) = implementation_of_rpc rpc in let logger = Logger.create () in let log_rate_limiter_occasionally rl = let t = Time.Span.of_min 1. in every t (fun () -> [%log' debug logger] - ~metadata:[("rate_limiter", Network_pool.Rate_limiter.summary rl)] - !"%s $rate_limiter" Impl.name ) + ~metadata: + [ ("rate_limiter", Network_pool.Rate_limiter.summary rl) ] + !"%s $rate_limiter" Impl.name) in let rl = Network_pool.Rate_limiter.create ~capacity:budget in log_rate_limiter_occasionally rl ; @@ -121,10 +123,9 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : don't_wait_for (Pipe.iter underlying_r ~f:(fun msg -> Pipe.write_without_pushback_if_open read_w msg ; - Deferred.unit )) ; + Deferred.unit)) ; let transport = - Async_rpc_kernel.Pipe_transport.( - create Kind.string read_r underlying_w) + Async_rpc_kernel.Pipe_transport.(create Kind.string read_r underlying_w) in transport @@ -157,7 +158,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : Mina_net2.create ~all_peers_seen_metric:config.all_peers_seen_metric ~logger:config.logger ~conf_dir ~pids - ~on_unexpected_termination ) ) + ~on_unexpected_termination)) with | Ok (Ok net2) -> ( let open Mina_net2 in @@ -176,18 +177,19 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : , `String (Unix.Inet_addr.to_string config.addrs_and_ports.external_ip) ) - ; ("port", `Int config.addrs_and_ports.libp2p_port) ] ; + ; ("port", `Int config.addrs_and_ports.libp2p_port) + ] ; ( match config.addrs_and_ports.peer with | Some _ -> () | None -> - config.addrs_and_ports.peer - <- Some - (Peer.create config.addrs_and_ports.bind_ip - ~libp2p_port:config.addrs_and_ports.libp2p_port - ~peer_id:my_peer_id) ) ; + config.addrs_and_ports.peer <- + Some + (Peer.create config.addrs_and_ports.bind_ip + ~libp2p_port:config.addrs_and_ports.libp2p_port + ~peer_id:my_peer_id) ) ; [%log' info config.logger] "libp2p peer ID this session is $peer_id" - ~metadata:[("peer_id", `String my_peer_id)] ; + ~metadata:[ ("peer_id", `String my_peer_id) ] ; let ctr = ref 0 in let initializing_libp2p_result : _ Deferred.Or_error.t = [%log' debug config.logger] "(Re)initializing libp2p result" ; @@ -207,7 +209,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ~f: (Fn.compose Mina_net2.Multiaddr.of_string Peer.to_multiaddr_string) - (Hash_set.to_list added_seeds) ]) + (Hash_set.to_list added_seeds) + ]) in let%bind () = configure net2 ~me ~logger:config.logger @@ -216,7 +219,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : [ Multiaddr.of_string (sprintf "/ip4/0.0.0.0/tcp/%d" (Option.value_exn config.addrs_and_ports.peer) - .libp2p_port) ] + .libp2p_port) + ] ~external_maddr: (Multiaddr.of_string (sprintf "/ip4/%s/tcp/%d" @@ -234,18 +238,19 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ~validation_queue_size:config.validation_queue_size ~initial_gating_config: Mina_net2. - { banned_peers= + { banned_peers = Trust_system.peer_statuses config.trust_system |> List.filter_map ~f:(fun (peer, status) -> match status.banned with | Banned_until _ -> Some peer | _ -> - None ) - ; trusted_peers= + None) + ; trusted_peers = List.filter_map ~f:Mina_net2.Multiaddr.to_peer config.initial_peers - ; isolate= config.isolate } + ; isolate = config.isolate + } ~on_peer_connected:(fun _ -> record_peer_connection ()) ~on_peer_disconnected:ignore in @@ -263,7 +268,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ( "Attempt to make unknown (fixed-version) RPC \ call \"$rpc\" with version $version" , [ ("rpc", `String rpc_tag) - ; ("version", `Int version) ] ) )) ; + ; ("version", `Int version) + ] ) )) ; `Close_connection in Rpc.Implementations.create_exn @@ -282,7 +288,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ~connection_state:(Fn.const peer) ~description: (Info.of_thunk (fun () -> - sprintf "stream from %s" peer.peer_id )) + sprintf "stream from %s" peer.peer_id)) transport with | Error handshake_error -> @@ -297,8 +303,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : , Some ( "Handshake error: $exn" , [ ( "exn" - , `String (Exn.to_string handshake_error) - ) ] ) )) + , `String (Exn.to_string handshake_error) ) + ] ) )) | Ok rpc_connection -> ( let%bind () = Async_rpc_kernel.Rpc.Connection.close_finished @@ -314,9 +320,10 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : [%log' warn config.logger] "failed to reset stream (this means it was \ probably closed successfully): $error" - ~metadata:[("error", Error_json.error_to_yojson e)] + ~metadata: + [ ("error", Error_json.error_to_yojson e) ] | Ok () -> - () ) ) + () )) in let message_reader, message_writer = Strict_pipe.( @@ -335,8 +342,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : (* Messages from ourselves are valid. Don't try and reingest them. *) match Envelope.Incoming.sender envelope with | Local -> - Mina_net2.Validation_callback.fire_exn - validation_callback `Accept ; + Mina_net2.Validation_callback.fire_exn validation_callback + `Accept ; Deferred.unit | Remote sender -> if not (Peer.Id.equal sender.peer_id my_peer_id) then @@ -345,7 +352,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : else ( Mina_net2.Validation_callback.fire_exn validation_callback `Accept ; - Deferred.unit ) ) + Deferred.unit )) ~bin_prot:Message.Latest.T.bin_msg ~on_decode_failure: (`Call @@ -356,7 +363,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : in let metadata = [ ("sender_peer_id", `String peer.peer_id) - ; ("error", Error_json.error_to_yojson err) ] + ; ("error", Error_json.error_to_yojson err) + ] in Trust_system.( record config.trust_system config.logger peer @@ -365,7 +373,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : , Some ("failed to decode gossip message", metadata) )) |> don't_wait_for ; - () )) + ())) in (* #4097 fix: drain the published message pipe, which we don't care about. *) don't_wait_for @@ -380,14 +388,13 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : (sprintf "/ip4/%s/tcp/%d" ( config.addrs_and_ports.bind_ip |> Unix.Inet_addr.to_string ) - (Option.value_exn config.addrs_and_ports.peer) - .libp2p_port)) + (Option.value_exn config.addrs_and_ports.peer).libp2p_port)) in let add_many xs ~seed = Deferred.map (Deferred.List.iter ~how:`Parallel xs ~f:(fun x -> let open Deferred.Let_syntax in - Mina_net2.add_peer ~seed net2 x >>| ignore )) + Mina_net2.add_peer ~seed net2 x >>| ignore)) ~f:(fun () -> Ok ()) in don't_wait_for @@ -400,7 +407,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : in add_many ~seed:false (List.filter !peers_snapshot ~f:(fun p -> - not (Hash_set.mem seeds (Multiaddr.to_string p)) )) + not (Hash_set.mem seeds (Multiaddr.to_string p)))) in let%bind () = Mina_net2.begin_advertising net2 in return ()) @@ -410,7 +417,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : | Error e -> [%log' warn config.logger] "starting libp2p up failed: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] )) ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ])) ; (subscription, message_reader) in match%map initializing_libp2p_result with @@ -463,18 +470,17 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : upon (after restart_after) (fun () -> don't_wait_for (let%bind () = Mina_net2.shutdown n in - on_unexpected_termination ()) ) + on_unexpected_termination ())) | None -> () ) ; - n ) ; + n) ; subscription_ref := Deferred.map res ~f:(fun (_, s, _, _) -> s) ; upon res (fun (_, _, m, me) -> (* This is a hack so that we keep the same keypair across restarts. *) config.keypair <- Some me ; let logger = config.logger in [%log trace] ~metadata:[] "Successfully restarted libp2p" ; - don't_wait_for (Strict_pipe.transfer m message_writer ~f:Fn.id) - ) + don't_wait_for (Strict_pipe.transfer m message_writer ~f:Fn.id)) and on_unexpected_termination () = on_libp2p_create (create_libp2p config rpc_handlers first_peer_ivar @@ -484,8 +490,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : in let res = create_libp2p config rpc_handlers first_peer_ivar - high_connectivity_ivar ~added_seeds ~pids - ~on_unexpected_termination + high_connectivity_ivar ~added_seeds ~pids ~on_unexpected_termination in on_libp2p_create res ; don't_wait_for @@ -493,12 +498,12 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : let%bind n = !net2_ref in let%bind () = Mina_net2.shutdown n in let%bind () = on_unexpected_termination () in - !net2_ref >>| ignore )) ; + !net2_ref >>| ignore)) ; let%map _ = res in () in let ban_configuration = - ref {Mina_net2.banned_peers= []; trusted_peers= []; isolate= false} + ref { Mina_net2.banned_peers = []; trusted_peers = []; isolate = false } in let do_ban (banned_peer, expiration) = don't_wait_for @@ -507,15 +512,17 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : let%bind net2 = !net2_ref in ban_configuration := { !ban_configuration with - banned_peers= + banned_peers = List.filter !ban_configuration.banned_peers ~f:(fun p -> - not (Peer.equal p banned_peer) ) } ; + not (Peer.equal p banned_peer)) + } ; Mina_net2.set_connection_gating_config net2 !ban_configuration |> Deferred.ignore_m ) ; (let%bind net2 = !net2_ref in ban_configuration := { !ban_configuration with - banned_peers= banned_peer :: !ban_configuration.banned_peers } ; + banned_peers = banned_peer :: !ban_configuration.banned_peers + } ; Mina_net2.set_connection_gating_config net2 !ban_configuration) |> Deferred.ignore_m in @@ -523,11 +530,11 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : Deferred.List.iter (Trust_system.peer_statuses config.trust_system) ~f:(function | ( addr - , {banned= Trust_system.Banned_status.Banned_until expiration; _} ) - -> + , { banned = Trust_system.Banned_status.Banned_until expiration; _ } + ) -> do_ban (addr, expiration) | _ -> - Deferred.unit ) + Deferred.unit) in let ban_reader, ban_writer = Linear_pipe.create () in don't_wait_for @@ -540,13 +547,14 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : let t = { config ; added_seeds - ; net2= net2_ref + ; net2 = net2_ref ; first_peer_ivar ; high_connectivity_ivar - ; subscription= subscription_ref + ; subscription = subscription_ref ; message_reader ; ban_reader - ; restart_helper= (fun () -> Strict_pipe.Writer.write restarts_w ()) } + ; restart_helper = (fun () -> Strict_pipe.Writer.write restarts_w ()) + } in Clock.every' peers_snapshot_max_staleness (fun () -> let%map peers = peers t in @@ -556,7 +564,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : List.map peers ~f: (Fn.compose Mina_net2.Multiaddr.of_string - Peer.to_multiaddr_string) ) ; + Peer.to_multiaddr_string)) ; t let set_node_status t data = @@ -587,7 +595,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : Hash_set.(diff (Peer.Hash_set.of_list peers) except |> to_list) n - let try_call_rpc_with_dispatch : type r q. + let try_call_rpc_with_dispatch : + type r q. ?heartbeat_timeout:Time_ns.Span.t -> ?timeout:Time.Span.t -> rpc_name:string @@ -607,7 +616,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ~send_every:(Time_ns.Span.of_sec 10.) ~timeout: (Option.value ~default:(Time_ns.Span.of_sec 120.) - heartbeat_timeout) ()) + heartbeat_timeout) + ()) ~connection_state:(Fn.const ()) ~dispatch_queries:(fun conn -> Versioned_rpc.Connection_with_menu.create conn @@ -620,7 +630,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : Deferred.choose [ Deferred.choice d Fn.id ; choice (after timeout) (fun () -> - Or_error.error_string "rpc timed out" ) ] ) + Or_error.error_string "rpc timed out") + ]) transport ~on_handshake_error: (`Call @@ -632,9 +643,9 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ( Outgoing_connection_error , Some ( "Handshake error: $exn" - , [("exn", `String (Exn.to_string exn))] ) )) + , [ ("exn", `String (Exn.to_string exn)) ] ) )) in - Or_error.error_string "handshake error" )) ) + Or_error.error_string "handshake error"))) >>= function | Ok (Ok result) -> (* call succeeded, result is valid *) @@ -644,16 +655,19 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : [%log' warn t.config.logger] "RPC call error for $rpc" ~metadata: [ ("rpc", `String rpc_name) - ; ("error", Error_json.error_to_yojson err) ] ; + ; ("error", Error_json.error_to_yojson err) + ] ; match (Error.to_exn err, Error.sexp_of_t err) with | ( _ , Sexp.List [ Sexp.List [ Sexp.Atom "rpc_error" - ; Sexp.List [Sexp.Atom "Connection_closed"; _] ] + ; Sexp.List [ Sexp.Atom "Connection_closed"; _ ] + ] ; _connection_description ; _rpc_tag - ; _rpc_version ] ) -> + ; _rpc_version + ] ) -> let%map () = Trust_system.( record t.config.trust_system t.config.logger peer @@ -670,7 +684,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ( Outgoing_connection_error , Some ( "RPC call failed, reason: $exn" - , [("exn", Error_json.error_to_yojson err)] ) )) + , [ ("exn", Error_json.error_to_yojson err) ] ) )) in Error err ) | Error monitor_exn -> @@ -684,19 +698,22 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : "RPC call for $rpc raised an exception" ~metadata: [ ("rpc", `String rpc_name) - ; ("exn", `String (Exn.to_string exn)) ] + ; ("exn", `String (Exn.to_string exn)) + ] | _ -> [%log' warn t.config.logger] "RPC call for $rpc raised an exception" ~metadata: [ ("rpc", `String rpc_name) - ; ("exn", `String (Exn.to_string exn)) ] + ; ("exn", `String (Exn.to_string exn)) + ] in Deferred.return (Or_error.of_exn exn) in call () - let try_call_rpc : type q r. + let try_call_rpc : + type q r. ?heartbeat_timeout:Time_ns.Span.t -> ?timeout:Time.Span.t -> t @@ -707,8 +724,8 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : -> r Deferred.Or_error.t = fun ?heartbeat_timeout ?timeout t peer transport rpc query -> let (module Impl) = implementation_of_rpc rpc in - try_call_rpc_with_dispatch ?heartbeat_timeout ?timeout - ~rpc_name:Impl.name t peer transport Impl.dispatch_multi query + try_call_rpc_with_dispatch ?heartbeat_timeout ?timeout ~rpc_name:Impl.name + t peer transport Impl.dispatch_multi query let query_peer ?heartbeat_timeout ?timeout t (peer_id : Peer.Id.t) rpc rpc_input = @@ -740,7 +757,7 @@ module Make (Rpc_intf : Mina_base.Rpc_intf.Rpc_interface_intf) : ~rpc_name:Impl.name t peer transport (fun conn qs -> Deferred.Or_error.List.map ?how qs ~f:(fun q -> - Impl.dispatch_multi conn q ) ) + Impl.dispatch_multi conn q)) qs >>| fun data -> Connected (Envelope.Incoming.wrap_peer ~data ~sender:peer) diff --git a/src/lib/gossip_net/message.ml b/src/lib/gossip_net/message.ml index 0f94c0420b9..5634c9ff262 100644 --- a/src/lib/gossip_net/message.ml +++ b/src/lib/gossip_net/message.ml @@ -27,7 +27,7 @@ module V1 = struct | New_state of External_transition.Stable.V1.t | Snark_pool_diff of Snark_pool.Diff_versioned.Stable.V1.t | Transaction_pool_diff of Transaction_pool.Diff_versioned.Stable.V1.t - [@@deriving bin_io, sexp, version {rpc}] + [@@deriving bin_io, sexp, version { rpc }] let callee_model_of_msg = Fn.id @@ -47,5 +47,4 @@ end module Latest = V1 -[%%define_locally -Latest.(summary)] +[%%define_locally Latest.(summary)] diff --git a/src/lib/graphql_lib/base_types.ml b/src/lib/graphql_lib/base_types.ml index 3e5260fa3b4..b07db29c31d 100644 --- a/src/lib/graphql_lib/base_types.ml +++ b/src/lib/graphql_lib/base_types.ml @@ -30,4 +30,4 @@ let token_id () = let epoch_seed () = scalar "EpochSeed" ~doc:"Base58Check-encoded epoch seed" ~coerce:(fun seed -> - `String (Mina_base.Epoch_seed.to_base58_check seed) ) + `String (Mina_base.Epoch_seed.to_base58_check seed)) diff --git a/src/lib/graphql_lib/client.ml b/src/lib/graphql_lib/client.ml index 503f52ee10c..05c197020d4 100644 --- a/src/lib/graphql_lib/client.ml +++ b/src/lib/graphql_lib/client.ml @@ -12,16 +12,16 @@ let make_local_uri port address = module type S = sig val query_or_error : - < parse: Yojson.Basic.t -> 'response - ; query: string - ; variables: Yojson.Basic.t > + < parse : Yojson.Basic.t -> 'response + ; query : string + ; variables : Yojson.Basic.t > -> int -> 'response Deferred.Or_error.t val query : - < parse: Yojson.Basic.t -> 'response - ; query: string - ; variables: Yojson.Basic.t > + < parse : Yojson.Basic.t -> 'response + ; query : string + ; variables : Yojson.Basic.t > -> int -> 'response Deferred.t end @@ -48,7 +48,7 @@ let graphql_error_to_string e = error_obj_to_string e module Connection_error = struct - type t = [`Failed_request of Error.t | `Graphql_error of Error.t] + type t = [ `Failed_request of Error.t | `Graphql_error of Error.t ] let ok_exn = function | `Failed_request e -> @@ -82,15 +82,15 @@ module Make (Config : Config_intf) = struct ( ("Accept", "application/json") :: ("Content-Type", "application/json") :: Map.to_alist Config.headers ) ~f:(fun header (key, value) -> - Cohttp.Header.add header key value ) + Cohttp.Header.add header key value) in let%bind response, body = Deferred.Or_error.try_with ~here:[%here] ~extract_exn:true (fun () -> Cohttp_async.Client.post ~headers ~body:(Cohttp_async.Body.of_string body_string) - uri ) + uri) |> Deferred.Result.map_error ~f:(fun e -> - `Failed_request (Error.to_string_hum e) ) + `Failed_request (Error.to_string_hum e)) in let%bind body_str = Cohttp_async.Body.to_string body |> Deferred.map ~f:Result.return @@ -119,7 +119,7 @@ module Make (Config : Config_intf) = struct `Graphql_error (Printf.sprintf "Problem parsing graphql response\nError message: %s" - (Exn.to_string e)) ) ) + (Exn.to_string e))) ) |> Deferred.return let query_exn query_obj port = diff --git a/src/lib/hash_prefix_states/hash_prefix_states.ml b/src/lib/hash_prefix_states/hash_prefix_states.ml index d1f80305736..a46ef8b5966 100644 --- a/src/lib/hash_prefix_states/hash_prefix_states.ml +++ b/src/lib/hash_prefix_states/hash_prefix_states.ml @@ -1,8 +1,6 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] [%%else] diff --git a/src/lib/hex/hex.ml b/src/lib/hex/hex.ml index 5e5e7195fc4..56652b3b89b 100644 --- a/src/lib/hex/hex.ml +++ b/src/lib/hex/hex.ml @@ -141,7 +141,7 @@ module Sequence_be = struct assert (n = k + k) ; init k ~f:(fun i -> Char.of_int_exn - ((16 * Digit.to_int t.(2 * i)) + Digit.to_int t.((2 * i) + 1)) ) + ((16 * Digit.to_int t.(2 * i)) + Digit.to_int t.((2 * i) + 1))) let to_string = to_bytes_like ~init:String.init @@ -165,7 +165,7 @@ let encode t = let c = if i mod 2 = 0 then (* hi *) c lsr 4 else (* lo *) c in - hex_char_of_int_exn (c land 15) ) + hex_char_of_int_exn (c land 15)) let%test_unit "decode" = let t = String.init 100 ~f:(fun _ -> Char.of_int_exn (Random.int 256)) in @@ -190,7 +190,7 @@ module Safe = struct in let high = charify @@ ((Char.to_int c land 0xF0) lsr 4) in let lo = charify (Char.to_int c land 0x0F) in - String.of_char_list [high; lo] ) + String.of_char_list [ high; lo ]) |> String.concat let%test_unit "to_hex sane" = @@ -218,16 +218,16 @@ module Safe = struct String.to_list hex |> List.chunks_of ~length:2 |> List.fold_result ~init:[] ~f:(fun acc chunk -> match chunk with - | [a; b] when Char.is_alphanum a && Char.is_alphanum b -> + | [ a; b ] when Char.is_alphanum a && Char.is_alphanum b -> Or_error.return @@ (Char.((to_u4 a lsl 4) lor to_u4 b |> of_int_exn) :: acc) | _ -> - Or_error.error_string "invalid hex" ) + Or_error.error_string "invalid hex") |> Or_error.ok |> Option.map ~f:(Fn.compose String.of_char_list List.rev) let%test_unit "partial isomorphism" = - Quickcheck.test ~sexp_of:[%sexp_of: string] ~examples:["\243"; "abc"] + Quickcheck.test ~sexp_of:[%sexp_of: string] ~examples:[ "\243"; "abc" ] Quickcheck.Generator.(map (list char) ~f:String.of_char_list) ~f:(fun s -> let hexified = to_hex s in @@ -237,5 +237,5 @@ module Safe = struct else failwithf !"expected: %s ; hexified: %s ; actual: %s" - expected hexified actual () ) + expected hexified actual ()) end diff --git a/src/lib/inline_test_quiet_logs/inline_test_quiet_logs.ml b/src/lib/inline_test_quiet_logs/inline_test_quiet_logs.ml index f769334e938..3eac2a7239c 100644 --- a/src/lib/inline_test_quiet_logs/inline_test_quiet_logs.ml +++ b/src/lib/inline_test_quiet_logs/inline_test_quiet_logs.ml @@ -27,13 +27,12 @@ module Ppx_inline_test_lib = struct let buf = Stdlib.Bytes.create buf_len in let rec go () = let len = input tempfile_channel buf 0 buf_len in - if len > 0 then ( output stdout buf 0 len ; go () ) + if len > 0 then (output stdout buf 0 len ; go ()) in go () ) ; Unix.unlink tempfile - let test ~config ~descr ~tags ~filename ~line_number ~start_pos ~end_pos f - = + let test ~config ~descr ~tags ~filename ~line_number ~start_pos ~end_pos f = let f () = let redirect_data = redirect_to_newfile () in try diff --git a/src/lib/integers_stubs_js/test/test.ml b/src/lib/integers_stubs_js/test/test.ml index ebcc50271be..9e8e411238f 100644 --- a/src/lib/integers_stubs_js/test/test.ml +++ b/src/lib/integers_stubs_js/test/test.ml @@ -56,7 +56,8 @@ let test_module (module M : Unsigned.S) = ; (__LOC__, 0xFFL, 0xFFL) ; (__LOC__, 0xFFFFL, 0xFFFFL) ; (__LOC__, 0xFFFFFFL, 0xFFFFFFL) - ; (__LOC__, 0xFFFFFFFFL, 0xFFFFFFFFL) ] ; + ; (__LOC__, 0xFFFFFFFFL, 0xFFFFFFFFL) + ] ; let check (loc, x, y) = let x, y = on_int ~f:M.add ~f_32:( + ) (M.of_int64 x) (M.of_int64 y) in print (x, y) ; @@ -67,7 +68,8 @@ let test_module (module M : Unsigned.S) = ; (__LOC__, 0xFFL, 0xFFL) ; (__LOC__, 0xFFFFL, 0xFFFFL) ; (__LOC__, 0xFFFFFFL, 0xFFFFFFL) - ; (__LOC__, 0xFFFFFFFFL, 0xFFFFFFFFL) ] + ; (__LOC__, 0xFFFFFFFFL, 0xFFFFFFFFL) + ] let () = Format.eprintf "UInt8@." ; diff --git a/src/lib/integration_test_cloud_engine/cli_inputs.ml b/src/lib/integration_test_cloud_engine/cli_inputs.ml index 460cf410b70..7596ec1b25f 100644 --- a/src/lib/integration_test_cloud_engine/cli_inputs.ml +++ b/src/lib/integration_test_cloud_engine/cli_inputs.ml @@ -1,6 +1,6 @@ open Cmdliner -type t = {coda_automation_location: string} +type t = { coda_automation_location : string } let term = let coda_automation_location = @@ -12,8 +12,8 @@ let term = Arg.( value & opt string "./automation" & info - ["coda-automation-location"] + [ "coda-automation-location" ] ~env ~docv:"CODA_AUTOMATION_LOCATION" ~doc) in - let cons_inputs coda_automation_location = {coda_automation_location} in + let cons_inputs coda_automation_location = { coda_automation_location } in Term.(const cons_inputs $ coda_automation_location) diff --git a/src/lib/integration_test_cloud_engine/coda_automation.ml b/src/lib/integration_test_cloud_engine/coda_automation.ml index 13c91c5c626..41b451f227f 100644 --- a/src/lib/integration_test_cloud_engine/coda_automation.ml +++ b/src/lib/integration_test_cloud_engine/coda_automation.ml @@ -23,45 +23,48 @@ module Network_config = struct module Cli_inputs = Cli_inputs type block_producer_config = - { name: string - ; id: string - ; keypair: Network_keypair.t - ; public_key: string - ; private_key: string - ; keypair_secret: string - ; libp2p_secret: string } + { name : string + ; id : string + ; keypair : Network_keypair.t + ; public_key : string + ; private_key : string + ; keypair_secret : string + ; libp2p_secret : string + } [@@deriving to_yojson] type terraform_config = - { k8s_context: string - ; cluster_name: string - ; cluster_region: string - ; aws_route53_zone_id: string - ; testnet_name: string - ; deploy_graphql_ingress: bool - ; coda_image: string - ; coda_agent_image: string - ; coda_bots_image: string - ; coda_points_image: string - ; coda_archive_image: string + { k8s_context : string + ; cluster_name : string + ; cluster_region : string + ; aws_route53_zone_id : string + ; testnet_name : string + ; deploy_graphql_ingress : bool + ; coda_image : string + ; coda_agent_image : string + ; coda_bots_image : string + ; coda_points_image : string + ; coda_archive_image : string (* this field needs to be sent as a string to terraform, even though it's a json encoded value *) - ; runtime_config: Yojson.Safe.t + ; runtime_config : Yojson.Safe.t [@to_yojson fun j -> `String (Yojson.Safe.to_string j)] - ; block_producer_configs: block_producer_config list - ; log_precomputed_blocks: bool - ; archive_node_count: int - ; mina_archive_schema: string - ; snark_worker_replicas: int - ; snark_worker_fee: string - ; snark_worker_public_key: string } + ; block_producer_configs : block_producer_config list + ; log_precomputed_blocks : bool + ; archive_node_count : int + ; mina_archive_schema : string + ; snark_worker_replicas : int + ; snark_worker_fee : string + ; snark_worker_public_key : string + } [@@deriving to_yojson] type t = - { coda_automation_location: string - ; debug_arg: bool - ; keypairs: Network_keypair.t list - ; constants: Test_config.constants - ; terraform: terraform_config } + { coda_automation_location : string + ; debug_arg : bool + ; keypairs : Network_keypair.t list + ; constants : Test_config.constants + ; terraform : terraform_config + } [@@deriving to_yojson] let terraform_config_to_assoc t = @@ -85,7 +88,8 @@ module Network_config = struct ; num_archive_nodes ; log_precomputed_blocks ; snark_worker_fee - ; snark_worker_public_key } = + ; snark_worker_public_key + } = test_config in let user_from_env = Option.value (Unix.getenv "USER") ~default:"auto" in @@ -107,7 +111,7 @@ module Network_config = struct if num_block_producers > List.length keypairs then failwith "not enough sample keypairs for specified number of block producers" ; - let f index ({Test_config.Block_producer.balance; timing}, (pk, sk)) = + let f index ({ Test_config.Block_producer.balance; timing }, (pk, sk)) = let runtime_account = let timing = match timing with @@ -115,26 +119,30 @@ module Network_config = struct None | Timed t -> Some - { Runtime_config.Accounts.Single.Timed.initial_minimum_balance= + { Runtime_config.Accounts.Single.Timed.initial_minimum_balance = t.initial_minimum_balance - ; cliff_time= t.cliff_time - ; cliff_amount= t.cliff_amount - ; vesting_period= t.vesting_period - ; vesting_increment= t.vesting_increment } + ; cliff_time = t.cliff_time + ; cliff_amount = t.cliff_amount + ; vesting_period = t.vesting_period + ; vesting_increment = t.vesting_increment + } in let default = Runtime_config.Accounts.Single.default in { default with - pk= Some (Public_key.Compressed.to_string pk) - ; sk= None - ; balance= + pk = Some (Public_key.Compressed.to_string pk) + ; sk = None + ; balance = Balance.of_formatted_string balance (* delegation currently unsupported *) - ; delegate= None - ; timing } + ; delegate = None + ; timing + } in let secret_name = "test-keypair-" ^ Int.to_string index in let keypair = - {Keypair.public_key= Public_key.decompress_exn pk; private_key= sk} + { Keypair.public_key = Public_key.decompress_exn pk + ; private_key = sk + } in ( Network_keypair.create_network_keypair ~keypair ~secret_name , runtime_account ) @@ -147,44 +155,48 @@ module Network_config = struct (* DAEMON CONFIG *) let proof_config = (* TODO: lift configuration of these up Test_config.t *) - { Runtime_config.Proof_keys.level= Some proof_level - ; sub_windows_per_window= None - ; ledger_depth= None - ; work_delay= None - ; block_window_duration_ms= None - ; transaction_capacity= None - ; coinbase_amount= None - ; supercharged_coinbase_factor= None - ; account_creation_fee= None - ; fork= None } + { Runtime_config.Proof_keys.level = Some proof_level + ; sub_windows_per_window = None + ; ledger_depth = None + ; work_delay = None + ; block_window_duration_ms = None + ; transaction_capacity = None + ; coinbase_amount = None + ; supercharged_coinbase_factor = None + ; account_creation_fee = None + ; fork = None + } in let constraint_constants = Genesis_ledger_helper.make_constraint_constants ~default:Genesis_constants.Constraint_constants.compiled proof_config in let runtime_config = - { Runtime_config.daemon= - Some {txpool_max_size= Some txpool_max_size; peer_list_url= None} - ; genesis= + { Runtime_config.daemon = + Some { txpool_max_size = Some txpool_max_size; peer_list_url = None } + ; genesis = Some - { k= Some k - ; delta= Some delta - ; slots_per_epoch= Some slots_per_epoch - ; slots_per_sub_window= Some slots_per_sub_window - ; genesis_state_timestamp= - Some Core.Time.(to_string_abs ~zone:Zone.utc (now ())) } - ; proof= + { k = Some k + ; delta = Some delta + ; slots_per_epoch = Some slots_per_epoch + ; slots_per_sub_window = Some slots_per_sub_window + ; genesis_state_timestamp = + Some Core.Time.(to_string_abs ~zone:Zone.utc (now ())) + } + ; proof = None (* was: Some proof_config; TODO: prebake ledger and only set hash *) - ; ledger= + ; ledger = Some - { base= Accounts runtime_accounts - ; add_genesis_winner= None - ; num_accounts= None - ; balances= [] - ; hash= None - ; name= None } - ; epoch_data= None } + { base = Accounts runtime_accounts + ; add_genesis_winner = None + ; num_accounts = None + ; balances = [] + ; hash = None + ; name = None + } + ; epoch_data = None + } in let genesis_constants = Or_error.ok_exn @@ -192,78 +204,87 @@ module Network_config = struct ~default:Genesis_constants.compiled runtime_config) in let constants : Test_config.constants = - {constraints= constraint_constants; genesis= genesis_constants} + { constraints = constraint_constants; genesis = genesis_constants } in (* BLOCK PRODUCER CONFIG *) let block_producer_config index keypair = - { name= "test-block-producer-" ^ Int.to_string (index + 1) - ; id= Int.to_string index + { name = "test-block-producer-" ^ Int.to_string (index + 1) + ; id = Int.to_string index ; keypair - ; keypair_secret= keypair.secret_name - ; public_key= keypair.public_key_file - ; private_key= keypair.private_key_file - ; libp2p_secret= "" } + ; keypair_secret = keypair.secret_name + ; public_key = keypair.public_key_file + ; private_key = keypair.private_key_file + ; libp2p_secret = "" + } in let mina_archive_schema = "https://raw.githubusercontent.com/MinaProtocol/mina/develop/src/app/archive/create_schema.sql" in (* NETWORK CONFIG *) - { coda_automation_location= cli_inputs.coda_automation_location - ; debug_arg= debug - ; keypairs= block_producer_keypairs + { coda_automation_location = cli_inputs.coda_automation_location + ; debug_arg = debug + ; keypairs = block_producer_keypairs ; constants - ; terraform= + ; terraform = { cluster_name ; cluster_region - ; k8s_context= cluster_id + ; k8s_context = cluster_id ; testnet_name - ; deploy_graphql_ingress= requires_graphql - ; coda_image= images.coda - ; coda_agent_image= images.user_agent - ; coda_bots_image= images.bots - ; coda_points_image= images.points - ; coda_archive_image= images.archive_node - ; runtime_config= Runtime_config.to_yojson runtime_config - ; block_producer_configs= + ; deploy_graphql_ingress = requires_graphql + ; coda_image = images.coda + ; coda_agent_image = images.user_agent + ; coda_bots_image = images.bots + ; coda_points_image = images.points + ; coda_archive_image = images.archive_node + ; runtime_config = Runtime_config.to_yojson runtime_config + ; block_producer_configs = List.mapi block_producer_keypairs ~f:block_producer_config ; log_precomputed_blocks - ; archive_node_count= num_archive_nodes + ; archive_node_count = num_archive_nodes ; mina_archive_schema - ; snark_worker_replicas= num_snark_workers + ; snark_worker_replicas = num_snark_workers ; snark_worker_public_key ; snark_worker_fee - ; aws_route53_zone_id } } + ; aws_route53_zone_id + } + } let to_terraform network_config = let open Terraform in [ Block.Terraform - { Block.Terraform.required_version= ">= 0.12.0" - ; backend= + { Block.Terraform.required_version = ">= 0.12.0" + ; backend = Backend.S3 - { Backend.S3.key= + { Backend.S3.key = "terraform-" ^ network_config.terraform.testnet_name ^ ".tfstate" - ; encrypt= true - ; region= aws_region - ; bucket= "o1labs-terraform-state" - ; acl= "bucket-owner-full-control" } } + ; encrypt = true + ; region = aws_region + ; bucket = "o1labs-terraform-state" + ; acl = "bucket-owner-full-control" + } + } ; Block.Provider - { Block.Provider.provider= "aws" - ; region= aws_region - ; zone= None - ; project= None - ; alias= None } + { Block.Provider.provider = "aws" + ; region = aws_region + ; zone = None + ; project = None + ; alias = None + } ; Block.Provider - { Block.Provider.provider= "google" - ; region= cluster_region - ; zone= Some cluster_zone - ; project= Some project_id - ; alias= None } + { Block.Provider.provider = "google" + ; region = cluster_region + ; zone = Some cluster_zone + ; project = Some project_id + ; alias = None + } ; Block.Module - { Block.Module.local_name= "integration_testnet" - ; providers= [("google.gke", "google")] - ; source= "../../modules/o1-integration" - ; args= terraform_config_to_assoc network_config.terraform } ] + { Block.Module.local_name = "integration_testnet" + ; providers = [ ("google.gke", "google") ] + ; source = "../../modules/o1-integration" + ; args = terraform_config_to_assoc network_config.terraform + } + ] let testnet_log_filter network_config = Printf.sprintf @@ -279,19 +300,20 @@ end module Network_manager = struct type t = - { logger: Logger.t - ; cluster: string - ; namespace: string - ; testnet_dir: string - ; testnet_log_filter: string - ; constants: Test_config.constants - ; seed_nodes: Kubernetes_network.Node.t list - ; block_producer_nodes: Kubernetes_network.Node.t list - ; snark_coordinator_nodes: Kubernetes_network.Node.t list - ; archive_nodes: Kubernetes_network.Node.t list - ; nodes_by_app_id: Kubernetes_network.Node.t String.Map.t - ; mutable deployed: bool - ; keypairs: Keypair.t list } + { logger : Logger.t + ; cluster : string + ; namespace : string + ; testnet_dir : string + ; testnet_log_filter : string + ; constants : Test_config.constants + ; seed_nodes : Kubernetes_network.Node.t list + ; block_producer_nodes : Kubernetes_network.Node.t list + ; snark_coordinator_nodes : Kubernetes_network.Node.t list + ; archive_nodes : Kubernetes_network.Node.t list + ; nodes_by_app_id : Kubernetes_network.Node.t String.Map.t + ; mutable deployed : bool + ; keypairs : Keypair.t list + } let run_cmd t prog args = Util.run_cmd t.testnet_dir prog args @@ -300,7 +322,7 @@ module Network_manager = struct let create ~logger (network_config : Network_config.t) = let%bind all_namespaces_str = Util.run_cmd_exn "/" "kubectl" - ["get"; "namespaces"; "-ojsonpath={.items[*].metadata.name}"] + [ "get"; "namespaces"; "-ojsonpath={.items[*].metadata.name}" ] in let all_namespaces = String.split ~on:' ' all_namespaces_str in let testnet_dir = @@ -316,8 +338,8 @@ module Network_manager = struct if network_config.debug_arg then Util.prompt_continue "Existing namespace of same name detected, pausing startup. \ - Enter [y/Y] to continue on and remove existing namespace, \ - start clean, and run the test; press Cntrl-C to quit out: " + Enter [y/Y] to continue on and remove existing namespace, start \ + clean, and run the test; press Cntrl-C to quit out: " else Deferred.return ([%log info] @@ -325,7 +347,7 @@ module Network_manager = struct clean") in Util.run_cmd_exn "/" "kubectl" - ["delete"; "namespace"; network_config.terraform.testnet_name] + [ "delete"; "namespace"; network_config.terraform.testnet_name ] >>| Fn.const () else return () in @@ -352,22 +374,21 @@ module Network_manager = struct ~f:(fun ch -> Network_config.to_terraform network_config |> Terraform.to_string - |> Out_channel.output_string ch ) ; - let testnet_log_filter = - Network_config.testnet_log_filter network_config - in + |> Out_channel.output_string ch) ; + let testnet_log_filter = Network_config.testnet_log_filter network_config in let cons_node pod_id container_id network_keypair_opt = - { Kubernetes_network.Node.testnet_name= + { Kubernetes_network.Node.testnet_name = network_config.terraform.testnet_name - ; cluster= cluster_id - ; namespace= network_config.terraform.testnet_name + ; cluster = cluster_id + ; namespace = network_config.terraform.testnet_name ; pod_id ; container_id - ; graphql_enabled= network_config.terraform.deploy_graphql_ingress - ; network_keypair= network_keypair_opt } + ; graphql_enabled = network_config.terraform.deploy_graphql_ingress + ; network_keypair = network_keypair_opt + } in (* we currently only deploy 1 seed and coordinator per deploy (will be configurable later) *) - let seed_nodes = [cons_node "seed" "coda" None] in + let seed_nodes = [ cons_node "seed" "coda" None ] in let snark_coordinator_name = "snark-coordinator-" ^ String.lowercase @@ -379,17 +400,17 @@ module Network_manager = struct in let snark_coordinator_nodes = if network_config.terraform.snark_worker_replicas > 0 then - [cons_node snark_coordinator_name "coordinator" None] + [ cons_node snark_coordinator_name "coordinator" None ] else [] in let block_producer_nodes = List.map network_config.terraform.block_producer_configs ~f:(fun bp_config -> - cons_node bp_config.name "coda" (Some bp_config.keypair) ) + cons_node bp_config.name "coda" (Some bp_config.keypair)) in let archive_nodes = List.init network_config.terraform.archive_node_count ~f:(fun i -> - cons_node (sprintf "archive-%d" (i + 1)) "archive" None ) + cons_node (sprintf "archive-%d" (i + 1)) "archive" None) in let nodes_by_app_id = let all_nodes = @@ -402,40 +423,42 @@ module Network_manager = struct in let t = { logger - ; cluster= cluster_id - ; namespace= network_config.terraform.testnet_name + ; cluster = cluster_id + ; namespace = network_config.terraform.testnet_name ; testnet_dir ; testnet_log_filter - ; constants= network_config.constants + ; constants = network_config.constants ; seed_nodes ; block_producer_nodes ; snark_coordinator_nodes ; archive_nodes ; nodes_by_app_id - ; deployed= false - ; keypairs= - List.map network_config.keypairs ~f:(fun {keypair; _} -> keypair) } + ; deployed = false + ; keypairs = + List.map network_config.keypairs ~f:(fun { keypair; _ } -> keypair) + } in [%log info] "Initializing terraform" ; - let%bind _ = run_cmd_exn t "terraform" ["init"] in - let%map _ = run_cmd_exn t "terraform" ["validate"] in + let%bind _ = run_cmd_exn t "terraform" [ "init" ] in + let%map _ = run_cmd_exn t "terraform" [ "validate" ] in t let deploy t = if t.deployed then failwith "network already deployed" ; [%log' info t.logger] "Deploying network" ; - let%map _ = run_cmd_exn t "terraform" ["apply"; "-auto-approve"] in + let%map _ = run_cmd_exn t "terraform" [ "apply"; "-auto-approve" ] in t.deployed <- true ; let result = - { Kubernetes_network.namespace= t.namespace - ; constants= t.constants - ; seeds= t.seed_nodes - ; block_producers= t.block_producer_nodes - ; snark_coordinators= t.snark_coordinator_nodes - ; archive_nodes= t.archive_nodes - ; nodes_by_app_id= t.nodes_by_app_id - ; testnet_log_filter= t.testnet_log_filter - ; keypairs= t.keypairs } + { Kubernetes_network.namespace = t.namespace + ; constants = t.constants + ; seeds = t.seed_nodes + ; block_producers = t.block_producer_nodes + ; snark_coordinators = t.snark_coordinator_nodes + ; archive_nodes = t.archive_nodes + ; nodes_by_app_id = t.nodes_by_app_id + ; testnet_log_filter = t.testnet_log_filter + ; keypairs = t.keypairs + } in let nodes_to_string = Fn.compose (String.concat ~sep:", ") @@ -454,7 +477,7 @@ module Network_manager = struct let destroy t = [%log' info t.logger] "Destroying network" ; if not t.deployed then failwith "network not deployed" ; - let%bind _ = run_cmd_exn t "terraform" ["destroy"; "-auto-approve"] in + let%bind _ = run_cmd_exn t "terraform" [ "destroy"; "-auto-approve" ] in t.deployed <- false ; Deferred.unit diff --git a/src/lib/integration_test_cloud_engine/kubernetes_network.ml b/src/lib/integration_test_cloud_engine/kubernetes_network.ml index d6f110dc395..d3073e87161 100644 --- a/src/lib/integration_test_cloud_engine/kubernetes_network.ml +++ b/src/lib/integration_test_cloud_engine/kubernetes_network.ml @@ -7,19 +7,20 @@ open Integration_test_lib module Node = struct type t = - { testnet_name: string - ; cluster: string - ; namespace: string - ; pod_id: string - ; container_id: string (* name of the container inside the pod *) - ; graphql_enabled: bool - ; network_keypair: Network_keypair.t option } + { testnet_name : string + ; cluster : string + ; namespace : string + ; pod_id : string + ; container_id : string (* name of the container inside the pod *) + ; graphql_enabled : bool + ; network_keypair : Network_keypair.t option + } - let id {pod_id; _} = pod_id + let id { pod_id; _ } = pod_id - let network_keypair {network_keypair; _} = network_keypair + let network_keypair { network_keypair; _ } = network_keypair - let base_kube_args t = ["--cluster"; t.cluster; "--namespace"; t.namespace] + let base_kube_args t = [ "--cluster"; t.cluster; "--namespace"; t.namespace ] let run_in_postgresql_container node ~cmd = let base_args = base_kube_args node in @@ -29,7 +30,7 @@ module Node = struct node.container_id node.container_id cmd in let%bind cwd = Unix.getcwd () in - Util.run_cmd_exn cwd "sh" ["-c"; kubectl_cmd] + Util.run_cmd_exn cwd "sh" [ "-c"; kubectl_cmd ] let get_logs_in_container node = let base_args = base_kube_args node in @@ -38,12 +39,12 @@ module Node = struct sprintf "%s get pod -l \"app=%s\" -o name" base_kube_cmd node.pod_id in let%bind cwd = Unix.getcwd () in - let%bind pod = Util.run_cmd_exn cwd "sh" ["-c"; pod_cmd] in + let%bind pod = Util.run_cmd_exn cwd "sh" [ "-c"; pod_cmd ] in let kubectl_cmd = Printf.sprintf "%s logs -c %s -n %s %s" base_kube_cmd node.container_id node.namespace pod in - Util.run_cmd_exn cwd "sh" ["-c"; kubectl_cmd] + Util.run_cmd_exn cwd "sh" [ "-c"; kubectl_cmd ] let run_in_container node cmd = let base_args = base_kube_args node in @@ -54,7 +55,7 @@ module Node = struct base_kube_cmd node.container_id base_kube_cmd node.pod_id cmd in let%bind.Deferred cwd = Unix.getcwd () in - Malleable_error.return (Util.run_cmd_exn cwd "sh" ["-c"; kubectl_cmd]) + Malleable_error.return (Util.run_cmd_exn cwd "sh" [ "-c"; kubectl_cmd ]) let start ~fresh_state node : unit Malleable_error.t = let open Malleable_error.Let_syntax in @@ -86,7 +87,8 @@ module Node = struct ; "-l" ; sprintf "app=%s" t.pod_id ; "-o=custom-columns=NAME:.metadata.name" - ; "--no-headers" ] + ; "--no-headers" + ] in let%bind run_result = Deferred.bind ~f:Malleable_error.or_hard_error @@ -95,7 +97,7 @@ module Node = struct match run_result with | [] -> Malleable_error.hard_error_string "get_pod_name: no result" - | [pod_name] -> + | [ pod_name ] -> return pod_name | _ -> Malleable_error.hard_error_string "get_pod_name: too many results" @@ -195,7 +197,7 @@ module Node = struct else let uri = Graphql.ingress_uri node in let metadata = - [("query", `String query_name); ("uri", `String (Uri.to_string uri))] + [ ("query", `String query_name); ("uri", `String (Uri.to_string uri)) ] in [%log info] "Attempting to send GraphQL request \"$query\" to \"$uri\"" ~metadata ; @@ -219,15 +221,16 @@ module Node = struct ($num_tries attempts left)" ~metadata: ( metadata - @ [("error", `String err_string); ("num_tries", `Int (n - 1))] - ) ; + @ [ ("error", `String err_string) + ; ("num_tries", `Int (n - 1)) + ] ) ; let%bind () = after (Time.Span.of_sec retry_delay_sec) in retry (n - 1) | Error (`Graphql_error err_string) -> [%log error] "GraphQL request \"$query\" to \"$uri\" returned an error: \ \"$error\" (this is a graphql error so not retrying)" - ~metadata:(metadata @ [("error", `String err_string)]) ; + ~metadata:(metadata @ [ ("error", `String err_string) ]) ; Deferred.Or_error.error_string err_string in let%bind () = after (Time.Span.of_sec initial_delay_sec) in @@ -237,14 +240,13 @@ module Node = struct let open Deferred.Or_error.Let_syntax in [%log info] "Getting node's peer_id, and the peer_ids of node's peers" ~metadata: - [("namespace", `String t.namespace); ("pod_id", `String t.pod_id)] ; + [ ("namespace", `String t.namespace); ("pod_id", `String t.pod_id) ] ; let query_obj = Graphql.Query_peer_id.make () in let%bind query_result_obj = - exec_graphql_request ~logger ~node:t ~query_name:"query_peer_id" - query_obj + exec_graphql_request ~logger ~node:t ~query_name:"query_peer_id" query_obj in [%log info] "get_peer_id, finished exec_graphql_request" ; - let self_id_obj = ((query_result_obj#daemonStatus)#addrsAndPorts)#peer in + let self_id_obj = query_result_obj#daemonStatus#addrsAndPorts#peer in let%bind self_id = match self_id_obj with | None -> @@ -252,10 +254,10 @@ module Node = struct | Some peer -> return peer#peerId in - let peers = (query_result_obj#daemonStatus)#peers |> Array.to_list in + let peers = query_result_obj#daemonStatus#peers |> Array.to_list in let peer_ids = List.map peers ~f:(fun peer -> peer#peerId) in - [%log info] - "get_peer_id, result of graphql query (self_id,[peers]) (%s,%s)" self_id + [%log info] "get_peer_id, result of graphql query (self_id,[peers]) (%s,%s)" + self_id (String.concat ~sep:" " peer_ids) ; return (self_id, peer_ids) @@ -284,7 +286,8 @@ module Node = struct ~metadata: [ ("namespace", `String t.namespace) ; ("pod_id", `String t.pod_id) - ; ("account_id", Mina_base.Account_id.to_yojson account_id) ] ; + ; ("account_id", Mina_base.Account_id.to_yojson account_id) + ] ; let pk = Mina_base.Account_id.public_key account_id in let token = Mina_base.Account_id.token_id account_id in let get_balance_obj = @@ -303,7 +306,7 @@ module Node = struct !"Account with %{sexp:Mina_base.Account_id.t} not found" account_id | Some acc -> - return (acc#balance)#total + return acc#balance#total let must_get_balance ~logger t ~account_id = get_balance ~logger t ~account_id @@ -313,13 +316,13 @@ module Node = struct let send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee = [%log info] "Sending a payment" ~metadata: - [("namespace", `String t.namespace); ("pod_id", `String t.pod_id)] ; + [ ("namespace", `String t.namespace); ("pod_id", `String t.pod_id) ] ; let open Deferred.Or_error.Let_syntax in let sender_pk_str = Signature_lib.Public_key.Compressed.to_string sender_pub_key in [%log info] "send_payment: unlocking account" - ~metadata:[("sender_pk", `String sender_pk_str)] ; + ~metadata:[ ("sender_pk", `String sender_pk_str) ] ; let unlock_sender_account_graphql () = let unlock_account_obj = Graphql.Unlock_account.make ~password:"naughty blue worm" @@ -343,14 +346,14 @@ module Node = struct send_payment_obj in let%map sent_payment_obj = send_payment_graphql () in - let (`UserCommand id_obj) = (sent_payment_obj#sendPayment)#payment in + let (`UserCommand id_obj) = sent_payment_obj#sendPayment#payment in let user_cmd_id = id_obj#id in [%log info] "Sent payment" - ~metadata:[("user_command_id", `String user_cmd_id)] ; + ~metadata:[ ("user_command_id", `String user_cmd_id) ] ; () - let must_send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount - ~fee = + let must_send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee + = send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee |> Deferred.bind ~f:Malleable_error.or_hard_error @@ -367,18 +370,18 @@ module Node = struct in [%log info] "Dumping archive data to file %s" data_file ; Out_channel.with_file data_file ~f:(fun out_ch -> - Out_channel.output_string out_ch data ) + Out_channel.output_string out_ch data) let dump_container_logs ~logger (t : t) ~log_file = let open Malleable_error.Let_syntax in - [%log info] "Dumping container logs from (node: %s, container: %s)" - t.pod_id t.container_id ; + [%log info] "Dumping container logs from (node: %s, container: %s)" t.pod_id + t.container_id ; let%map logs = Deferred.bind ~f:Malleable_error.return (get_logs_in_container t) in [%log info] "Dumping container log to file %s" log_file ; Out_channel.with_file log_file ~f:(fun out_ch -> - Out_channel.output_string out_ch logs ) + Out_channel.output_string out_ch logs) let dump_precomputed_blocks ~logger (t : t) = let open Malleable_error.Let_syntax in @@ -398,39 +401,41 @@ module Node = struct List.map jsons ~f:(fun json -> match json with | `Assoc items -> ( - match List.Assoc.find items ~equal:String.equal "metadata" with - | Some md -> - md - | None -> - failwithf "Log line is missing metadata: %s" - (Yojson.Safe.to_string json) - () ) + match List.Assoc.find items ~equal:String.equal "metadata" with + | Some md -> + md + | None -> + failwithf "Log line is missing metadata: %s" + (Yojson.Safe.to_string json) + () ) | other -> failwithf "Expected log line to be a JSON record, got: %s" (Yojson.Safe.to_string other) - () ) + ()) in let state_hash_and_blocks = List.fold metadata_jsons ~init:[] ~f:(fun acc json -> match json with | `Assoc items -> ( - match - List.Assoc.find items ~equal:String.equal "precomputed_block" - with - | Some block -> ( - match List.Assoc.find items ~equal:String.equal "state_hash" with - | Some state_hash -> - (state_hash, block) :: acc + match + List.Assoc.find items ~equal:String.equal "precomputed_block" + with + | Some block -> ( + match + List.Assoc.find items ~equal:String.equal "state_hash" + with + | Some state_hash -> + (state_hash, block) :: acc + | None -> + failwith + "Log metadata contains a precomputed block, but no \ + state hash" ) | None -> - failwith - "Log metadata contains a precomputed block, but no state \ - hash" ) - | None -> - acc ) + acc ) | other -> failwithf "Expected log line to be a JSON record, got: %s" (Yojson.Safe.to_string other) - () ) + ()) in let%bind.Deferred () = Deferred.List.iter state_hash_and_blocks @@ -454,41 +459,42 @@ module Node = struct "Dumping precomputed block with state hash %s to file %s" state_hash filename ; Out_channel.with_file filename ~f:(fun out_ch -> - Out_channel.output_string out_ch block ) ) + Out_channel.output_string out_ch block)) in Malleable_error.return () end type t = - { namespace: string - ; constants: Test_config.constants - ; seeds: Node.t list - ; block_producers: Node.t list - ; snark_coordinators: Node.t list - ; archive_nodes: Node.t list - ; testnet_log_filter: string - ; keypairs: Signature_lib.Keypair.t list - ; nodes_by_app_id: Node.t String.Map.t } + { namespace : string + ; constants : Test_config.constants + ; seeds : Node.t list + ; block_producers : Node.t list + ; snark_coordinators : Node.t list + ; archive_nodes : Node.t list + ; testnet_log_filter : string + ; keypairs : Signature_lib.Keypair.t list + ; nodes_by_app_id : Node.t String.Map.t + } -let constants {constants; _} = constants +let constants { constants; _ } = constants -let constraint_constants {constants; _} = constants.constraints +let constraint_constants { constants; _ } = constants.constraints -let genesis_constants {constants; _} = constants.genesis +let genesis_constants { constants; _ } = constants.genesis -let seeds {seeds; _} = seeds +let seeds { seeds; _ } = seeds -let block_producers {block_producers; _} = block_producers +let block_producers { block_producers; _ } = block_producers -let snark_coordinators {snark_coordinators; _} = snark_coordinators +let snark_coordinators { snark_coordinators; _ } = snark_coordinators -let archive_nodes {archive_nodes; _} = archive_nodes +let archive_nodes { archive_nodes; _ } = archive_nodes (* TODO: snark workers (until then, pretty sure snark work won't be done) *) -let all_nodes {seeds; block_producers; snark_coordinators; archive_nodes; _} = - List.concat [seeds; block_producers; snark_coordinators; archive_nodes] +let all_nodes { seeds; block_producers; snark_coordinators; archive_nodes; _ } = + List.concat [ seeds; block_producers; snark_coordinators; archive_nodes ] -let keypairs {keypairs; _} = keypairs +let keypairs { keypairs; _ } = keypairs let lookup_node_by_app_id t = Map.find t.nodes_by_app_id @@ -498,7 +504,7 @@ let initialize ~logger network = let max_polls = 60 (* 15 mins *) in let all_pods = all_nodes network - |> List.map ~f:(fun {pod_id; _} -> pod_id) + |> List.map ~f:(fun { pod_id; _ } -> pod_id) |> String.Set.of_list in let get_pod_statuses () = @@ -517,7 +523,7 @@ let initialize ~logger network = |> List.map ~f:(fun line -> let parts = String.split line ~on:':' in assert (List.length parts = 2) ; - (List.nth_exn parts 0, List.nth_exn parts 1) ) + (List.nth_exn parts 0, List.nth_exn parts 1)) |> List.filter ~f:(fun (pod_name, _) -> String.Set.mem all_pods pod_name) in let rec poll n = @@ -525,7 +531,7 @@ let initialize ~logger network = (* TODO: detect "bad statuses" (eg CrashLoopBackoff) and terminate early *) let bad_pod_statuses = List.filter pod_statuses ~f:(fun (_, status) -> - not (String.equal status "Running") ) + not (String.equal status "Running")) in if List.is_empty bad_pod_statuses then return () else if n < max_polls then @@ -538,13 +544,12 @@ let initialize ~logger network = `List (List.map bad_pod_statuses ~f:(fun (pod_name, status) -> `Assoc - [("pod_name", `String pod_name); ("status", `String status)] - )) + [ ("pod_name", `String pod_name); ("status", `String status) ])) in [%log fatal] "Not all pods were assigned to nodes and ready in time: \ $bad_pod_statuses" - ~metadata:[("bad_pod_statuses", bad_pod_statuses_json)] ; + ~metadata:[ ("bad_pod_statuses", bad_pod_statuses_json) ] ; Malleable_error.hard_error_format "Some pods either were not assigned to nodes or did deploy properly \ (errors: %s)" @@ -556,13 +561,13 @@ let initialize ~logger network = let seed_nodes = seeds network in let seed_pod_ids = seed_nodes - |> List.map ~f:(fun {Node.pod_id; _} -> pod_id) + |> List.map ~f:(fun { Node.pod_id; _ } -> pod_id) |> String.Set.of_list in let non_seed_nodes = network |> all_nodes - |> List.filter ~f:(fun {Node.pod_id; _} -> - not (String.Set.mem seed_pod_ids pod_id) ) + |> List.filter ~f:(fun { Node.pod_id; _ } -> + not (String.Set.mem seed_pod_ids pod_id)) in (* TODO: parallelize (requires accumlative hard errors) *) let%bind () = @@ -576,4 +581,4 @@ let initialize ~logger network = in Malleable_error.List.iter non_seed_nodes ~f:(Node.start ~fresh_state:false) - else Deferred.return res ) + else Deferred.return res) diff --git a/src/lib/integration_test_cloud_engine/stack_driver_log_engine.ml b/src/lib/integration_test_cloud_engine/stack_driver_log_engine.ml index c022d5193ef..97adb3a83b6 100644 --- a/src/lib/integration_test_cloud_engine/stack_driver_log_engine.ml +++ b/src/lib/integration_test_cloud_engine/stack_driver_log_engine.ml @@ -22,20 +22,21 @@ let or_error_list_fold ls ~init ~f = let open Or_error.Let_syntax in List.fold ls ~init:(return init) ~f:(fun acc_or_error el -> let%bind acc = acc_or_error in - f acc el ) + f acc el) let or_error_list_map ls ~f = let open Or_error.Let_syntax in or_error_list_fold ls ~init:[] ~f:(fun t el -> let%map h = f el in - h :: t ) + h :: t) let log_filter_of_event_type = let open Event_type in function | Event_type Log_error -> [ "jsonPayload.level=(\"Warn\" OR \"Error\" OR \"Faulty_peer\" OR \ - \"Fatal\")" ] + \"Fatal\")" + ] | Event_type t -> let event_id = to_structured_event_id (Event_type t) @@ -48,7 +49,7 @@ let log_filter_of_event_type = Printf.sprintf "jsonPayload.event_id=\"%s\"" (Structured_log_events.string_of_id event_id) in - [filter] + [ filter ] let all_event_types_log_filter = let event_filters = @@ -58,23 +59,23 @@ let all_event_types_log_filter = let disjunction = event_filters |> List.map ~f:(fun filter -> - nest (filter |> List.map ~f:nest |> String.concat ~sep:" AND ") ) + nest (filter |> List.map ~f:nest |> String.concat ~sep:" AND ")) |> String.concat ~sep:" OR " in - [disjunction] + [ disjunction ] module Subscription = struct - type t = {name: string; topic: string; sink: string} + type t = { name : string; topic : string; sink : string } (*Using the api endpoint to create a sink instead of the gcloud command - because the cli doesn't allow setting the writerIdentity account for the sink - and instead generates an account that doesn't have permissions to publish - logs to the topic. The account needs to be given permissions explicitly and - then there's this from the documentation: - There is a delay between creating the sink and using the sink's new service - account to authorize writing to the export destination. During the first 24 - hours after sink creation, you might see permission-related error messages - from the sink on your project's Activity page; you can ignore them. + because the cli doesn't allow setting the writerIdentity account for the sink + and instead generates an account that doesn't have permissions to publish + logs to the topic. The account needs to be given permissions explicitly and + then there's this from the documentation: + There is a delay between creating the sink and using the sink's new service + account to authorize writing to the export destination. During the first 24 + hours after sink creation, you might see permission-related error messages + from the sink on your project's Activity page; you can ignore them. *) let create_sink ~topic ~filter ~key ~logger name = let open Deferred.Or_error.Let_syntax in @@ -84,16 +85,16 @@ module Subscription = struct in let%bind authorization = let%map token = - run_cmd_or_error "." prog ["auth"; "print-access-token"] + run_cmd_or_error "." prog [ "auth"; "print-access-token" ] in let token = String.strip token in - String.concat ["Authorization: Bearer "; token] + String.concat [ "Authorization: Bearer "; token ] in let req_type = "Accept: application/json" in let content_type = "Content-Type: application/json" in let destination = String.concat ~sep:"/" - ["pubsub.googleapis.com"; "projects"; project_id; "topics"; topic] + [ "pubsub.googleapis.com"; "projects"; project_id; "topics"; topic ] in let header = "--header" in let data = @@ -101,7 +102,8 @@ module Subscription = struct [ ("name", `String name) ; ("description", `String "Sink for tests") ; ("destination", `String destination) - ; ("filter", `String filter) ] + ; ("filter", `String filter) + ] |> Yojson.Safe.to_string in let%bind response = @@ -119,14 +121,13 @@ module Subscription = struct ; content_type ; "--data" ; data - ; "--compressed" ] + ; "--compressed" + ] in [%log spam] "Create sink response: $response" - ~metadata:[("response", `String response)] ; + ~metadata:[ ("response", `String response) ] ; let%bind response_json = Deferred.return (yojson_from_string response) in - match - Yojson.Safe.Util.(to_option Fn.id (member "error" response_json)) - with + match Yojson.Safe.Util.(to_option Fn.id (member "error" response_json)) with | Some _ -> Deferred.Or_error.errorf "Error when creating sink: %s" response | None -> @@ -135,7 +136,7 @@ module Subscription = struct let resource_names name : t = let topic = name ^ "_topic" in let sink = name ^ "_sink" in - {name; topic; sink} + { name; topic; sink } let create ~name ~filter ~logger = let open Deferred.Or_error.Let_syntax in @@ -151,7 +152,7 @@ module Subscription = struct gcloud_key_file_env in let create_topic name = - run_cmd_or_error "." prog ["pubsub"; "topics"; "create"; name] + run_cmd_or_error "." prog [ "pubsub"; "topics"; "create"; name ] in let create_subscription name topic = run_cmd_or_error "." prog @@ -162,7 +163,8 @@ module Subscription = struct ; "--topic" ; topic ; "--topic-project" - ; project_id ] + ; project_id + ] in let t = resource_names name in let%bind _ = create_topic t.topic in @@ -170,25 +172,25 @@ module Subscription = struct let%map _ = create_subscription name t.topic in [%log debug] "Succesfully created subscription \"$name\" to topic \"$topic\"" - ~metadata:[("name", `String name); ("topic", `String t.topic)] ; + ~metadata:[ ("name", `String name); ("topic", `String t.topic) ] ; t let delete t = let open Deferred.Let_syntax in let%bind delete_subscription_res = run_cmd_or_error "." prog - ["pubsub"; "subscriptions"; "delete"; t.name; "--project"; project_id] + [ "pubsub"; "subscriptions"; "delete"; t.name; "--project"; project_id ] in let%bind delete_sink_res = run_cmd_or_error "." prog - ["logging"; "sinks"; "delete"; t.sink; "--project"; project_id] + [ "logging"; "sinks"; "delete"; t.sink; "--project"; project_id ] in let%map delete_topic_res = run_cmd_or_error "." prog - ["pubsub"; "topics"; "delete"; t.topic; "--project"; project_id] + [ "pubsub"; "topics"; "delete"; t.topic; "--project"; project_id ] in Or_error.combine_errors - [delete_subscription_res; delete_sink_res; delete_topic_res] + [ delete_subscription_res; delete_sink_res; delete_topic_res ] |> Or_error.map ~f:(Fn.const ()) let cleanup name = @@ -204,7 +206,7 @@ module Subscription = struct "Failed to created stackdriver subscription: $error. Cleaning up \ existing pubsub resources (topic, subscription, sink) and \ retrying.." - ~metadata:[("error", `String (Error.to_string_hum e))] ; + ~metadata:[ ("error", `String (Error.to_string_hum e)) ] ; let%bind _ = cleanup name in create () | Ok res -> @@ -213,7 +215,7 @@ module Subscription = struct let pull ~logger t = let open Deferred.Or_error.Let_syntax in let subscription_id = - String.concat ~sep:"/" ["projects"; project_id; "subscriptions"; t.name] + String.concat ~sep:"/" [ "projects"; project_id; "subscriptions"; t.name ] in (* The limit for messages we pull on each interval is currently not configurable. For now, it's set to 5 (which will hopefully be a sane for a while). *) let%bind result = @@ -226,12 +228,13 @@ module Subscription = struct ; "--limit" ; string_of_int 5 ; "--format" - ; "table(DATA)" ] + ; "table(DATA)" + ] in [%log spam] "Pull result from stackdriver: $result" - ~metadata:[("result", `String result)] ; + ~metadata:[ ("result", `String result) ] ; match String.split_lines result with - | [] | ["DATA"] -> + | [] | [ "DATA" ] -> return [] | "DATA" :: data -> Deferred.return (or_error_list_map data ~f:yojson_from_string) @@ -240,18 +243,19 @@ module Subscription = struct end type t = - { logger: Logger.t - ; subscription: Subscription.t - ; event_writer: (Node.t * Event_type.event) Pipe.Writer.t - ; event_reader: (Node.t * Event_type.event) Pipe.Reader.t - ; background_job: unit Deferred.t } + { logger : Logger.t + ; subscription : Subscription.t + ; event_writer : (Node.t * Event_type.event) Pipe.Writer.t + ; event_reader : (Node.t * Event_type.event) Pipe.Reader.t + ; background_job : unit Deferred.t + } -let event_reader {event_reader; _} = event_reader +let event_reader { event_reader; _ } = event_reader let parse_event_from_log_entry ~network log_entry = let open Or_error.Let_syntax in let open Json_parsing in - let%bind app_id = find string log_entry ["labels"; "k8s-pod/app"] in + let%bind app_id = find string log_entry [ "labels"; "k8s-pod/app" ] in let%bind node = Kubernetes_network.lookup_node_by_app_id network app_id |> Option.value_map ~f:Or_error.return @@ -261,7 +265,7 @@ let parse_event_from_log_entry ~network log_entry = let%bind log = find (parser_from_of_yojson Logger.Message.of_yojson) - log_entry ["jsonPayload"] + log_entry [ "jsonPayload" ] in let%map event = Event_type.parse_event log in (node, event) @@ -275,7 +279,7 @@ let rec pull_subscription_in_background ~logger ~network ~event_writer in if List.length log_entries > 0 then [%log spam] "Parsing events from $n logs" - ~metadata:[("n", `Int (List.length log_entries))] + ~metadata:[ ("n", `Int (List.length log_entries)) ] else [%log spam] "No logs were pulled" ; let%bind () = Deferred.List.iter ~how:`Sequential log_entries ~f:(fun log_entry -> @@ -283,11 +287,11 @@ let rec pull_subscription_in_background ~logger ~network ~event_writer |> parse_event_from_log_entry ~network |> Or_error.ok_exn |> Pipe.write_without_pushback_if_open event_writer ; - Deferred.unit ) + Deferred.unit) in let%bind () = after (Time.Span.of_ms 10000.0) in - pull_subscription_in_background ~logger ~network ~event_writer - ~subscription ) + pull_subscription_in_background ~logger ~network ~event_writer ~subscription + ) else Deferred.unit let create ~logger ~(network : Kubernetes_network.t) = @@ -295,7 +299,7 @@ let create ~logger ~(network : Kubernetes_network.t) = let log_filter = let coda_container_filter = "resource.labels.container_name=\"coda\"" in let filters = - [network.testnet_log_filter; coda_container_filter] + [ network.testnet_log_filter; coda_container_filter ] @ all_event_types_log_filter in String.concat filters ~sep:"\n" @@ -307,14 +311,13 @@ let create ~logger ~(network : Kubernetes_network.t) = [%log info] "Event subscription created" ; let event_reader, event_writer = Pipe.create () in let background_job = - pull_subscription_in_background ~logger ~network ~event_writer - ~subscription + pull_subscription_in_background ~logger ~network ~event_writer ~subscription in - {logger; subscription; event_reader; event_writer; background_job} + { logger; subscription; event_reader; event_writer; background_job } let destroy t : unit Deferred.Or_error.t = let open Deferred.Or_error.Let_syntax in - let {logger; subscription; event_reader= _; event_writer; background_job} = + let { logger; subscription; event_reader = _; event_writer; background_job } = t in Pipe.close event_writer ; diff --git a/src/lib/integration_test_cloud_engine/stack_driver_log_engine.mli b/src/lib/integration_test_cloud_engine/stack_driver_log_engine.mli index 7472a60aaed..1dbef606c1a 100644 --- a/src/lib/integration_test_cloud_engine/stack_driver_log_engine.mli +++ b/src/lib/integration_test_cloud_engine/stack_driver_log_engine.mli @@ -1,3 +1,3 @@ include Integration_test_lib.Intf.Engine.Log_engine_intf - with module Network := Kubernetes_network + with module Network := Kubernetes_network diff --git a/src/lib/integration_test_cloud_engine/terraform.ml b/src/lib/integration_test_cloud_engine/terraform.ml index 385607bb45b..91fa49530ef 100644 --- a/src/lib/integration_test_cloud_engine/terraform.ml +++ b/src/lib/integration_test_cloud_engine/terraform.ml @@ -2,12 +2,17 @@ open Core (* this is only a partial (minimum required) implementation of the terraform json spec *) let cons (type a) (key : string) (body : a) (to_yojson : a -> Yojson.Safe.t) = - `Assoc [(key, to_yojson body)] + `Assoc [ (key, to_yojson body) ] module Backend = struct module S3 = struct type t = - {key: string; encrypt: bool; region: string; bucket: string; acl: string} + { key : string + ; encrypt : bool + ; region : string + ; bucket : string + ; acl : string + } [@@deriving to_yojson] end @@ -18,21 +23,22 @@ end module Block = struct module Terraform = struct - type t = {required_version: string; backend: Backend.t} + type t = { required_version : string; backend : Backend.t } [@@deriving to_yojson] end (* should probably leave these untyped, but this covers basic keys for both google and aws *) module Provider = struct type t = - { provider: string - ; region: string - ; alias: string option - ; project: string option - ; zone: string option } + { provider : string + ; region : string + ; alias : string option + ; project : string option + ; zone : string option + } [@@deriving to_yojson] - let to_yojson {provider; region; alias; project; zone} = + let to_yojson { provider; region; alias; project; zone } = cons provider () (fun () -> let open Option.Let_syntax in let field k v = (k, `String v) in @@ -40,44 +46,50 @@ module Block = struct [ Some (field "region" region) ; alias >>| field "alias" ; project >>| field "project" - ; zone >>| field "zone" ] + ; zone >>| field "zone" + ] in - `Assoc (List.filter_map fields ~f:Fn.id) ) + `Assoc (List.filter_map fields ~f:Fn.id)) end module Module = struct type t = - { local_name: string - ; providers: (string * string) list - ; source: string - ; args: (string * Yojson.Safe.t) list } + { local_name : string + ; providers : (string * string) list + ; source : string + ; args : (string * Yojson.Safe.t) list + } - let to_yojson {local_name; providers; source; args} = + let to_yojson { local_name; providers; source; args } = cons local_name () (fun () -> let const_fields = [ ( "providers" - , `Assoc (List.map providers ~f:(fun (k, v) -> (k, `String v))) - ) - ; ("source", `String source) ] + , `Assoc (List.map providers ~f:(fun (k, v) -> (k, `String v))) ) + ; ("source", `String source) + ] in - `Assoc (const_fields @ args) ) + `Assoc (const_fields @ args)) end module Data = struct type t = - { data_source: string - ; local_name: string - ; args: (string * Yojson.Safe.t) list } + { data_source : string + ; local_name : string + ; args : (string * Yojson.Safe.t) list + } - let to_yojson {data_source; local_name; args} = + let to_yojson { data_source; local_name; args } = cons data_source () (fun () -> cons local_name () (fun () -> `Assoc args)) end module Resource = struct type t = - {type_: string; local_name: string; args: (string * Yojson.Safe.t) list} + { type_ : string + ; local_name : string + ; args : (string * Yojson.Safe.t) list + } - let to_yojson {type_; local_name; args} = + let to_yojson { type_; local_name; args } = cons type_ () (fun () -> cons local_name () (fun () -> `Assoc args)) end diff --git a/src/lib/integration_test_lib/dsl.ml b/src/lib/integration_test_lib/dsl.ml index 17b1ed69ad5..3f97d8979b4 100644 --- a/src/lib/integration_test_lib/dsl.ml +++ b/src/lib/integration_test_lib/dsl.ml @@ -19,7 +19,7 @@ let broadcast_pipe_fold_until_with_timeout reader ~timeout_duration true | `Continue x -> acc := x ; - false ) ) + false )) in match%map Timeout.await () ~timeout_duration read_deferred with | `Ok () -> @@ -40,15 +40,16 @@ module Make (Engine : Intf.Engine.S) () : (* TODO: monadify as Malleable_error w/ global value threading *) type t = - { logger: Logger.t - ; network: Engine.Network.t - ; event_router: Event_router.t - ; network_state_reader: Network_state.t Broadcast_pipe.Reader.t } + { logger : Logger.t + ; network : Engine.Network.t + ; event_router : Event_router.t + ; network_state_reader : Network_state.t Broadcast_pipe.Reader.t + } let network_state t = Broadcast_pipe.Reader.peek t.network_state_reader let create ~logger ~network ~event_router ~network_state_reader = - let t = {logger; network; event_router; network_state_reader} in + let t = { logger; network; event_router; network_state_reader } in `Don't_call_in_tests t let section_hard = Malleable_error.contextualize @@ -120,12 +121,11 @@ module Make (Engine : Intf.Engine.S) () : ~metadata: [ ( "soft_timeout" , `String - (Network_time_span.to_string ~constants condition.soft_timeout) - ) + (Network_time_span.to_string ~constants condition.soft_timeout) ) ; ( "hard_timeout" , `String - (Network_time_span.to_string ~constants condition.hard_timeout) - ) ] ; + (Network_time_span.to_string ~constants condition.hard_timeout) ) + ] ; let%bind result = match condition.predicate with | Network_state_predicate (init, check) -> @@ -145,7 +145,8 @@ module Make (Engine : Intf.Engine.S) () : (Error.of_list [ Error.createf "wait_for hit an error waiting for %s" condition.description - ; error ]) + ; error + ]) | `Success -> let soft_timeout_was_met = Time.(add start_time soft_timeout >= now ()) @@ -166,58 +167,61 @@ module Make (Engine : Intf.Engine.S) () : type log_error = Node.t * Event_type.Log_error.t type log_error_accumulator = - { warn: log_error DynArray.t - ; error: log_error DynArray.t - ; faulty_peer: log_error DynArray.t - ; fatal: log_error DynArray.t } + { warn : log_error DynArray.t + ; error : log_error DynArray.t + ; faulty_peer : log_error DynArray.t + ; fatal : log_error DynArray.t + } let empty_log_error_accumulator () = - { warn= DynArray.create () - ; error= DynArray.create () - ; faulty_peer= DynArray.create () - ; fatal= DynArray.create () } + { warn = DynArray.create () + ; error = DynArray.create () + ; faulty_peer = DynArray.create () + ; fatal = DynArray.create () + } let watch_log_errors ~logger ~event_router ~on_fatal_error = let log_error_accumulator = empty_log_error_accumulator () in ignore - (Event_router.on event_router Event_type.Log_error - ~f:(fun node message -> - let open Logger.Message in - let acc = - match message.level with - | Warn -> - log_error_accumulator.warn - | Error -> - log_error_accumulator.error - | Faulty_peer -> - log_error_accumulator.faulty_peer - | Fatal -> - log_error_accumulator.fatal - | _ -> - failwith "unexpected log level encountered" - in - DynArray.add acc (node, message) ; - if Logger.Level.equal message.level Fatal then ( - [%log fatal] "Error occured $error" - ~metadata:[("error", Logger.Message.to_yojson message)] ; - on_fatal_error message ) ; - Deferred.return `Continue ) : 'a Event_router.event_subscription) ; + ( Event_router.on event_router Event_type.Log_error + ~f:(fun node message -> + let open Logger.Message in + let acc = + match message.level with + | Warn -> + log_error_accumulator.warn + | Error -> + log_error_accumulator.error + | Faulty_peer -> + log_error_accumulator.faulty_peer + | Fatal -> + log_error_accumulator.fatal + | _ -> + failwith "unexpected log level encountered" + in + DynArray.add acc (node, message) ; + if Logger.Level.equal message.level Fatal then ( + [%log fatal] "Error occured $error" + ~metadata:[ ("error", Logger.Message.to_yojson message) ] ; + on_fatal_error message ) ; + Deferred.return `Continue) + : 'a Event_router.event_subscription ) ; log_error_accumulator - let lift_accumulated_log_errors {warn; faulty_peer; error; fatal} = + let lift_accumulated_log_errors { warn; faulty_peer; error; fatal } = let open Test_error in let lift error_array = DynArray.to_list error_array |> List.map ~f:(fun (node, message) -> - {node_id= Node.id node; error_message= message} ) + { node_id = Node.id node; error_message = message }) in - let time_of_error {error_message; _} = error_message.timestamp in + let time_of_error { error_message; _ } = error_message.timestamp in let accumulate_errors = List.fold ~init:Error_accumulator.empty ~f:(fun acc error -> Error_accumulator.add_to_context acc error.node_id error - ~time_of_error ) + ~time_of_error) in let soft_errors = accumulate_errors (lift warn @ lift faulty_peer) in let hard_errors = accumulate_errors (lift error @ lift fatal) in - {Set.soft_errors; hard_errors} + { Set.soft_errors; hard_errors } end diff --git a/src/lib/integration_test_lib/event_router.ml b/src/lib/integration_test_lib/event_router.ml index b801335d4c1..7d9ffb54432 100644 --- a/src/lib/integration_test_lib/event_router.ml +++ b/src/lib/integration_test_lib/event_router.ml @@ -9,14 +9,11 @@ module Make (Engine : Intf.Engine.S) () : module Event_handler_id = Unique_id.Int () type ('a, 'b) handler_func = - Node.t -> 'a -> [`Stop of 'b | `Continue] Deferred.t + Node.t -> 'a -> [ `Stop of 'b | `Continue ] Deferred.t type event_handler = | Event_handler : - Event_handler_id.t - * 'b Ivar.t - * 'a Event_type.t - * ('a, 'b) handler_func + Event_handler_id.t * 'b Ivar.t * 'a Event_type.t * ('a, 'b) handler_func -> event_handler (* event subscriptions surface information from the handler (as type witnesses), but do not existentially hide the result parameter *) @@ -28,15 +25,14 @@ module Make (Engine : Intf.Engine.S) () : type handler_map = event_handler list Event_type.Map.t (* TODO: asynchronously unregistered event handlers *) - type t = {logger: Logger.t; handlers: handler_map ref} + type t = { logger : Logger.t; handlers : handler_map ref } let unregister_event_handlers_by_id handlers event_type ids = handlers := Event_type.Map.update !handlers event_type ~f:(fun registered_handlers -> registered_handlers |> Option.value ~default:[] |> List.filter ~f:(fun (Event_handler (registered_id, _, _, _)) -> - not (List.mem ids registered_id ~equal:Event_handler_id.equal) - ) ) + not (List.mem ids registered_id ~equal:Event_handler_id.equal))) let dispatch_event handlers node event = let open Event_type in @@ -64,7 +60,7 @@ module Make (Engine : Intf.Engine.S) () : None | `Stop result -> Ivar.fill handler_finished_ivar result ; - Some handler_id ) + Some handler_id) in unregister_event_handlers_by_id handlers (Event_type.type_of_event event) @@ -77,9 +73,10 @@ module Make (Engine : Intf.Engine.S) () : [%log debug] "Dispatching event $event for $node" ~metadata: [ ("event", Event_type.event_to_yojson event) - ; ("node", `String (Node.id node)) ] ; - dispatch_event handlers node event )) ; - {logger; handlers} + ; ("node", `String (Node.id node)) + ] ; + dispatch_event handlers node event)) ; + { logger; handlers } let on t event_type ~f = let event_type_ex = Event_type.Event_type event_type in @@ -94,7 +91,7 @@ module Make (Engine : Intf.Engine.S) () : let cancel t event_subscription cancellation = let (Event_subscription (id, ivar, event_type)) = event_subscription in unregister_event_handlers_by_id t.handlers - (Event_type.Event_type event_type) [id] ; + (Event_type.Event_type event_type) [ id ] ; Ivar.fill ivar cancellation let await event_subscription = @@ -104,9 +101,7 @@ module Make (Engine : Intf.Engine.S) () : let await_with_timeout t event_subscription ~timeout_duration ~timeout_cancellation = let open Deferred.Let_syntax in - match%map - Timeout.await () ~timeout_duration (await event_subscription) - with + match%map Timeout.await () ~timeout_duration (await event_subscription) with | `Ok x -> x | `Timeout -> diff --git a/src/lib/integration_test_lib/event_type.ml b/src/lib/integration_test_lib/event_type.ml index 8f150514861..a4164d240ea 100644 --- a/src/lib/integration_test_lib/event_type.ml +++ b/src/lib/integration_test_lib/event_type.ml @@ -8,7 +8,7 @@ let or_error_list_fold ls ~init ~f = let open Or_error.Let_syntax in List.fold ls ~init:(return init) ~f:(fun acc_or_error el -> let%bind acc = acc_or_error in - f acc el ) + f acc el) let get_metadata (message : Logger.Message.t) key = match String.Map.find message.metadata key with @@ -19,7 +19,7 @@ let get_metadata (message : Logger.Message.t) key = let parse id (m : Logger.Message.t) = Or_error.try_with (fun () -> - Structured_log_events.parse_exn id (Map.to_alist m.metadata) ) + Structured_log_events.parse_exn id (Map.to_alist m.metadata)) let bad_parse = Or_error.error_string "bad parse" @@ -62,16 +62,19 @@ module Transition_frontier_diff_application = struct let structured_event_id = Some Transition_frontier.applying_diffs_structured_events_id - type root_transitioned = {new_root: State_hash.t; garbage: State_hash.t list} + type root_transitioned = + { new_root : State_hash.t; garbage : State_hash.t list } [@@deriving to_yojson] type t = - { new_node: State_hash.t option - ; best_tip_changed: State_hash.t option - ; root_transitioned: root_transitioned option } + { new_node : State_hash.t option + ; best_tip_changed : State_hash.t option + ; root_transitioned : root_transitioned option + } [@@deriving lens, to_yojson] - let empty = {new_node= None; best_tip_changed= None; root_transitioned= None} + let empty = + { new_node = None; best_tip_changed = None; root_transitioned = None } let register (lens : (t, 'a option) Lens.t) (result : t) (x : 'a) : t Or_error.t = @@ -89,8 +92,8 @@ module Transition_frontier_diff_application = struct let%bind diffs = get_metadata message "diffs" >>= parse (list json) in or_error_list_fold diffs ~init:empty ~f:(fun res diff -> match Yojson.Safe.Util.keys diff with - | [name] -> ( - let%bind value = find json diff [name] in + | [ name ] -> ( + let%bind value = find json diff [ name ] in match name with | "New_node" -> let%bind state_hash = parse state_hash value in @@ -99,16 +102,15 @@ module Transition_frontier_diff_application = struct let%bind state_hash = parse state_hash value in register best_tip_changed res state_hash | "Root_transitioned" -> - let%bind new_root = find state_hash value ["new_root"] in - let%bind garbage = find (list state_hash) value ["garbage"] in - let data = {new_root; garbage} in + let%bind new_root = find state_hash value [ "new_root" ] in + let%bind garbage = find (list state_hash) value [ "garbage" ] in + let data = { new_root; garbage } in register root_transitioned res data | _ -> - Or_error.error_string - "unexpected transition frontier diff name" ) + Or_error.error_string "unexpected transition frontier diff name" + ) | _ -> - Or_error.error_string "unexpected transition frontier diff format" - ) + Or_error.error_string "unexpected transition frontier diff format") end module Block_produced = struct @@ -118,10 +120,11 @@ module Block_produced = struct Some Block_producer.block_produced_structured_events_id type t = - { block_height: int - ; epoch: int - ; global_slot: int - ; snarked_ledger_generated: bool } + { block_height : int + ; epoch : int + ; global_slot : int + ; snarked_ledger_generated : bool + } [@@deriving to_yojson] (* @@ -160,7 +163,7 @@ module Block_produced = struct let open Or_error.Let_syntax in let%bind breadcrumb = get_metadata message "breadcrumb" in let%bind snarked_ledger_generated = - find bool breadcrumb ["just_emitted_a_proof"] + find bool breadcrumb [ "just_emitted_a_proof" ] in let%bind breadcrumb_consensus_state = find json breadcrumb @@ -168,26 +171,26 @@ module Block_produced = struct ; "data" ; "protocol_state" ; "body" - ; "consensus_state" ] + ; "consensus_state" + ] in let%bind block_height = - find int breadcrumb_consensus_state ["blockchain_length"] + find int breadcrumb_consensus_state [ "blockchain_length" ] in let%bind global_slot = - find int breadcrumb_consensus_state ["curr_global_slot"; "slot_number"] + find int breadcrumb_consensus_state [ "curr_global_slot"; "slot_number" ] in - let%map epoch = find int breadcrumb_consensus_state ["epoch_count"] in - {block_height; global_slot; epoch; snarked_ledger_generated} + let%map epoch = find int breadcrumb_consensus_state [ "epoch_count" ] in + { block_height; global_slot; epoch; snarked_ledger_generated } end module Breadcrumb_added = struct let name = "Breadcrumb_added" let structured_event_id = - Some - Transition_frontier.added_breadcrumb_user_commands_structured_events_id + Some Transition_frontier.added_breadcrumb_user_commands_structured_events_id - type t = {user_commands: User_command.Valid.t With_status.t list} + type t = { user_commands : User_command.Valid.t With_status.t list } [@@deriving to_yojson] let parse message = @@ -197,7 +200,7 @@ module Breadcrumb_added = struct get_metadata message "user_commands" >>= parse valid_commands_with_statuses in - {user_commands} + { user_commands } end module Gossip = struct @@ -216,7 +219,7 @@ module Gossip = struct let structured_event_id = Some id - type r = {state_hash: State_hash.t} [@@deriving yojson, hash] + type r = { state_hash : State_hash.t } [@@deriving yojson, hash] type t = r With_direction.t [@@deriving yojson] @@ -224,10 +227,10 @@ module Gossip = struct let parse message : t Or_error.t = match%bind parse id message with - | Mina_networking.Block_received {state_hash; sender= _} -> - Ok ({state_hash}, Direction.Received) - | Mina_networking.Gossip_new_state {state_hash} -> - Ok ({state_hash}, Sent) + | Mina_networking.Block_received { state_hash; sender = _ } -> + Ok ({ state_hash }, Direction.Received) + | Mina_networking.Gossip_new_state { state_hash } -> + Ok ({ state_hash }, Sent) | _ -> bad_parse end @@ -237,7 +240,7 @@ module Gossip = struct let structured_event_id = Some id - type r = {work: Network_pool.Snark_pool.Resource_pool.Diff.compact} + type r = { work : Network_pool.Snark_pool.Resource_pool.Diff.compact } [@@deriving yojson, hash] type t = r With_direction.t [@@deriving yojson] @@ -246,10 +249,10 @@ module Gossip = struct let parse message = match%bind parse id message with - | Mina_networking.Snark_work_received {work; sender= _} -> - Ok ({work}, Direction.Received) - | Mina_networking.Gossip_snark_pool_diff {work} -> - Ok ({work}, Direction.Received) + | Mina_networking.Snark_work_received { work; sender = _ } -> + Ok ({ work }, Direction.Received) + | Mina_networking.Gossip_snark_pool_diff { work } -> + Ok ({ work }, Direction.Received) | _ -> bad_parse end @@ -260,7 +263,7 @@ module Gossip = struct let structured_event_id = Some id type r = - {txns: Network_pool.Transaction_pool.Diff_versioned.Stable.Latest.t} + { txns : Network_pool.Transaction_pool.Diff_versioned.Stable.Latest.t } [@@deriving yojson, hash] type t = r With_direction.t [@@deriving yojson] @@ -269,10 +272,10 @@ module Gossip = struct let parse message = match%bind parse id message with - | Mina_networking.Transactions_received {txns; sender= _} -> - Ok ({txns}, Direction.Received) - | Mina_networking.Gossip_transaction_pool_diff {txns} -> - Ok ({txns}, Sent) + | Mina_networking.Transactions_received { txns; sender = _ } -> + Ok ({ txns }, Direction.Received) + | Mina_networking.Gossip_transaction_pool_diff { txns } -> + Ok ({ txns }, Sent) | _ -> bad_parse end @@ -366,7 +369,8 @@ let all_event_types = ; Event_type Breadcrumb_added ; Event_type Block_gossip ; Event_type Snark_work_gossip - ; Event_type Transactions_gossip ] + ; Event_type Transactions_gossip + ] let event_type_module : type a. a t -> (module Event_type_intf with type t = a) = function @@ -390,7 +394,7 @@ let event_type_module : type a. a t -> (module Event_type_intf with type t = a) let event_to_yojson event = let (Event (t, d)) = event in let (module Type) = event_type_module t in - `Assoc [(to_string t, Type.to_yojson d)] + `Assoc [ (to_string t, Type.to_yojson d) ] let to_structured_event_id event_type = let (Event_type t) = event_type in @@ -403,7 +407,7 @@ let of_structured_event_id = all_event_types |> List.filter_map ~f:(fun t -> let%map event_id = to_structured_event_id t in - (Structured_log_events.string_of_id event_id, t) ) + (Structured_log_events.string_of_id event_id, t)) |> String.Table.of_alist_exn in Fn.compose (String.Table.find table) Structured_log_events.string_of_id diff --git a/src/lib/integration_test_lib/event_type.mli b/src/lib/integration_test_lib/event_type.mli index fb915769e3f..d970d541e9c 100644 --- a/src/lib/integration_test_lib/event_type.mli +++ b/src/lib/integration_test_lib/event_type.mli @@ -24,22 +24,25 @@ module Node_initialization : sig end module Transition_frontier_diff_application : sig - type root_transitioned = {new_root: State_hash.t; garbage: State_hash.t list} + type root_transitioned = + { new_root : State_hash.t; garbage : State_hash.t list } type t = - { new_node: State_hash.t option - ; best_tip_changed: State_hash.t option - ; root_transitioned: root_transitioned option } + { new_node : State_hash.t option + ; best_tip_changed : State_hash.t option + ; root_transitioned : root_transitioned option + } include Event_type_intf with type t := t end module Block_produced : sig type t = - { block_height: int - ; epoch: int - ; global_slot: int - ; snarked_ledger_generated: bool } + { block_height : int + ; epoch : int + ; global_slot : int + ; snarked_ledger_generated : bool + } include Event_type_intf with type t := t @@ -57,7 +60,7 @@ module Block_produced : sig end module Breadcrumb_added : sig - type t = {user_commands: User_command.Valid.t With_status.t list} + type t = { user_commands : User_command.Valid.t With_status.t list } include Event_type_intf with type t := t end @@ -72,7 +75,7 @@ module Gossip : sig end module Block : sig - type r = {state_hash: State_hash.t} [@@deriving hash, yojson] + type r = { state_hash : State_hash.t } [@@deriving hash, yojson] type t = r With_direction.t @@ -80,7 +83,7 @@ module Gossip : sig end module Snark_work : sig - type r = {work: Network_pool.Snark_pool.Resource_pool.Diff.compact} + type r = { work : Network_pool.Snark_pool.Resource_pool.Diff.compact } [@@deriving hash, yojson] type t = r With_direction.t @@ -89,7 +92,7 @@ module Gossip : sig end module Transactions : sig - type r = {txns: Network_pool.Transaction_pool.Resource_pool.Diff.t} + type r = { txns : Network_pool.Transaction_pool.Resource_pool.Diff.t } [@@deriving hash, yojson] type t = r With_direction.t diff --git a/src/lib/integration_test_lib/gossip_state.ml b/src/lib/integration_test_lib/gossip_state.ml index b513403147e..e6e84926102 100644 --- a/src/lib/integration_test_lib/gossip_state.ml +++ b/src/lib/integration_test_lib/gossip_state.ml @@ -1,7 +1,7 @@ open Core_kernel module By_direction = struct - type 'a t = {sent: 'a; received: 'a} [@@deriving to_yojson, fields] + type 'a t = { sent : 'a; received : 'a } [@@deriving to_yojson, fields] end module type Set = sig @@ -53,20 +53,22 @@ end open Event_type.Gossip type t = - { node_id: string - ; blocks: Block.r Set.t By_direction.t - ; transactions: Transactions.r Set.t By_direction.t - ; snark_work: Snark_work.r Set.t By_direction.t } + { node_id : string + ; blocks : Block.r Set.t By_direction.t + ; transactions : Transactions.r Set.t By_direction.t + ; snark_work : Snark_work.r Set.t By_direction.t + } [@@deriving to_yojson, fields] let create node_id : t = let f create = - {By_direction.sent= create (); By_direction.received= create ()} + { By_direction.sent = create (); By_direction.received = create () } in { node_id - ; blocks= f Set.create - ; transactions= f Set.create - ; snark_work= f Set.create } + ; blocks = f Set.create + ; transactions = f Set.create + ; snark_work = f Set.create + } let add (gossip_state : t) (type a) (event_type : a Event_type.Gossip.With_direction.t Event_type.t) @@ -111,16 +113,17 @@ let stats (type a) List.filter_map gossip_states ~f:(fun gos_state -> if List.exists exclusion_list ~f:(fun id -> - String.equal id gos_state.node_id ) + String.equal id gos_state.node_id) then None - else Some gos_state ) + else Some gos_state) in let event_type_gossip_states = List.map gossip_states_filtered ~f:(fun gos_state -> let event_type_gossip_state_by_direction = getter_func gos_state in Set.union [ event_type_gossip_state_by_direction.sent - ; event_type_gossip_state_by_direction.received ] ) + ; event_type_gossip_state_by_direction.received + ]) in ( `Seen_by_all (Set.size (Set.inter event_type_gossip_states)) , `Seen_by_some (Set.size (Set.union event_type_gossip_states)) ) diff --git a/src/lib/integration_test_lib/intf.ml b/src/lib/integration_test_lib/intf.ml index 5e04665dc25..33c2ea2ac44 100644 --- a/src/lib/integration_test_lib/intf.ml +++ b/src/lib/integration_test_lib/intf.ml @@ -158,8 +158,8 @@ module Engine = struct module Network_manager : Network_manager_intf - with module Network_config := Network_config - and module Network := Network + with module Network_config := Network_config + and module Network := Network module Log_engine : Log_engine_intf with module Network := Network end @@ -172,7 +172,7 @@ module Dsl = struct type t type ('a, 'b) handler_func = - Engine.Network.Node.t -> 'a -> [`Stop of 'b | `Continue] Deferred.t + Engine.Network.Node.t -> 'a -> [ `Stop of 'b | `Continue ] Deferred.t type 'a event_subscription @@ -202,14 +202,15 @@ module Dsl = struct module Event_router : Event_router_intf with module Engine := Engine type t = - { block_height: int - ; epoch: int - ; global_slot: int - ; snarked_ledgers_generated: int - ; blocks_generated: int - ; node_initialization: bool String.Map.t - ; gossip_received: Gossip_state.t String.Map.t - ; best_tips_by_node: State_hash.t String.Map.t } + { block_height : int + ; epoch : int + ; global_slot : int + ; snarked_ledgers_generated : int + ; blocks_generated : int + ; node_initialization : bool String.Map.t + ; gossip_received : Gossip_state.t String.Map.t + ; best_tips_by_node : State_hash.t String.Map.t + } val listen : logger:Logger.t @@ -224,8 +225,8 @@ module Dsl = struct module Network_state : Network_state_intf - with module Engine := Engine - and module Event_router := Event_router + with module Engine := Engine + and module Event_router := Event_router type t @@ -263,14 +264,14 @@ module Dsl = struct module Network_state : Network_state_intf - with module Engine := Engine - and module Event_router := Event_router + with module Engine := Engine + and module Event_router := Event_router module Wait_condition : Wait_condition_intf - with module Engine := Engine - and module Event_router := Event_router - and module Network_state := Network_state + with module Engine := Engine + and module Event_router := Event_router + and module Network_state := Network_state module Util : Util_intf with module Engine := Engine @@ -290,7 +291,7 @@ module Dsl = struct -> network:Engine.Network.t -> event_router:Event_router.t -> network_state_reader:Network_state.t Broadcast_pipe.Reader.t - -> [`Don't_call_in_tests of t] + -> [ `Don't_call_in_tests of t ] type log_error_accumulator @@ -326,20 +327,9 @@ module Test = struct (* NB: until the DSL is actually implemented, a test just takes in the engine * implementation directly. *) - module type Functor_intf = functor (Inputs : Inputs_intf) -> S - with type network = - Inputs - .Engine - .Network - .t - and type node = - Inputs - .Engine - .Network - .Node - .t - and type dsl = - Inputs - .Dsl - .t + module type Functor_intf = functor (Inputs : Inputs_intf) -> + S + with type network = Inputs.Engine.Network.t + and type node = Inputs.Engine.Network.Node.t + and type dsl = Inputs.Dsl.t end diff --git a/src/lib/integration_test_lib/json_parsing.ml b/src/lib/integration_test_lib/json_parsing.ml index 6c8b1fc3283..1ecd714e729 100644 --- a/src/lib/integration_test_lib/json_parsing.ml +++ b/src/lib/integration_test_lib/json_parsing.ml @@ -33,7 +33,7 @@ let parser_from_of_yojson of_yojson js = | Error modl -> let logger = Logger.create () in [%log error] "Could not parse JSON using of_yojson" - ~metadata:[("module", `String modl); ("json", js)] ; + ~metadata:[ ("module", `String modl); ("json", js) ] ; failwithf "Could not parse JSON using %s.of_yojson" modl () let valid_commands_with_statuses : @@ -51,14 +51,14 @@ let valid_commands_with_statuses : | _, Error err -> let logger = Logger.create () in [%log error] - ~metadata:[("error", `String err)] + ~metadata:[ ("error", `String err) ] "Failed to parse JSON for user command status" ; (* fail on any error *) failwith "valid_commands_with_statuses: unable to parse JSON for user \ command" | cmds, Ok cmd -> - cmd :: cmds ) + cmd :: cmds) | _ -> failwith "valid_commands_with_statuses: expected `List" @@ -78,7 +78,7 @@ let rec find (parser : 'a parser) (json : Yojson.Safe.t) (path : string list) : "failed to find path using key '%s' in json object { %s }" key (String.concat ~sep:", " (List.map assoc ~f:(fun (s, json) -> - sprintf "\"%s\":%s" s (Yojson.Safe.to_string json) ))) + sprintf "\"%s\":%s" s (Yojson.Safe.to_string json)))) in find parser entry path' | _ -> diff --git a/src/lib/integration_test_lib/malleable_error.ml b/src/lib/integration_test_lib/malleable_error.ml index f7af0786517..ea0ea9889f3 100644 --- a/src/lib/integration_test_lib/malleable_error.ml +++ b/src/lib/integration_test_lib/malleable_error.ml @@ -15,47 +15,54 @@ module Error_accumulator = Test_error.Error_accumulator module Hard_fail = struct type t = { (* Most of the time, there is only one hard error, but we can have multiple when joining lists of monads (concurrency) *) - hard_errors: Test_error.internal_error Error_accumulator.t - ; soft_errors: Test_error.internal_error Error_accumulator.t } + hard_errors : Test_error.internal_error Error_accumulator.t + ; soft_errors : Test_error.internal_error Error_accumulator.t + } [@@deriving equal, sexp_of, compare] (* INVARIANT: hard_errors should always have at least 1 error *) - let check_invariants {hard_errors; _} = + let check_invariants { hard_errors; _ } = Error_accumulator.error_count hard_errors > 0 - let add_soft_errors {hard_errors; soft_errors} new_soft_errors = + let add_soft_errors { hard_errors; soft_errors } new_soft_errors = { hard_errors - ; soft_errors= Error_accumulator.merge soft_errors new_soft_errors } + ; soft_errors = Error_accumulator.merge soft_errors new_soft_errors + } let of_hard_errors hard_errors = - {hard_errors; soft_errors= Error_accumulator.empty} + { hard_errors; soft_errors = Error_accumulator.empty } - let contextualize context {hard_errors; soft_errors} = - { hard_errors= + let contextualize context { hard_errors; soft_errors } = + { hard_errors = Error_accumulator.contextualize' context hard_errors ~time_of_error:Test_error.occurrence_time - ; soft_errors= + ; soft_errors = Error_accumulator.contextualize' context soft_errors - ~time_of_error:Test_error.occurrence_time } + ~time_of_error:Test_error.occurrence_time + } end module Result_accumulator = struct type 'a t = - { computation_result: 'a - ; soft_errors: Test_error.internal_error Error_accumulator.t } + { computation_result : 'a + ; soft_errors : Test_error.internal_error Error_accumulator.t + } [@@deriving equal, sexp_of, compare] - let create computation_result soft_errors = {computation_result; soft_errors} + let create computation_result soft_errors = + { computation_result; soft_errors } - let return a = {computation_result= a; soft_errors= Error_accumulator.empty} + let return a = + { computation_result = a; soft_errors = Error_accumulator.empty } - let is_ok {soft_errors; _} = Error_accumulator.error_count soft_errors = 0 + let is_ok { soft_errors; _ } = Error_accumulator.error_count soft_errors = 0 let contextualize context acc = { acc with - soft_errors= + soft_errors = Error_accumulator.contextualize' context acc.soft_errors - ~time_of_error:Test_error.occurrence_time } + ~time_of_error:Test_error.occurrence_time + } end type 'a t = ('a Result_accumulator.t, Hard_fail.t) Deferred.Result.t @@ -69,13 +76,14 @@ module T = Monad.Make (struct let bind res ~f = let open Result_accumulator in match%bind res with - | Ok {computation_result= prev_result; soft_errors} -> ( + | Ok { computation_result = prev_result; soft_errors } -> ( match%map f prev_result with - | Ok {computation_result; soft_errors= new_soft_errors} -> + | Ok { computation_result; soft_errors = new_soft_errors } -> Ok { computation_result - ; soft_errors= - Error_accumulator.merge soft_errors new_soft_errors } + ; soft_errors = + Error_accumulator.merge soft_errors new_soft_errors + } | Error hard_fail -> Error (Hard_fail.add_soft_errors hard_fail soft_errors) ) | Error hard_fail -> @@ -113,7 +121,7 @@ let soften_error m = match%map m with | Ok acc -> Ok acc - | Error {Hard_fail.soft_errors; hard_errors} -> + | Error { Hard_fail.soft_errors; hard_errors } -> Ok (Result_accumulator.create () (Error_accumulator.merge soft_errors hard_errors)) @@ -156,7 +164,7 @@ let combine_errors (malleable_errors : 'a t list) : 'a list t = List.fold_left malleable_errors ~init:(return []) ~f:(fun acc el -> let%bind t = acc in let%map h = el in - h :: t ) + h :: t) in List.rev values @@ -166,12 +174,12 @@ let lift_error_set (type a) (m : a t) : Deferred.Result.t = let open Deferred.Let_syntax in let error_set hard_errors soft_errors = - {Test_error.Set.hard_errors; soft_errors} + { Test_error.Set.hard_errors; soft_errors } in match%map m with - | Ok {computation_result; soft_errors} -> + | Ok { computation_result; soft_errors } -> Ok (computation_result, error_set Error_accumulator.empty soft_errors) - | Error {hard_errors; soft_errors} -> + | Error { hard_errors; soft_errors } -> Error (error_set hard_errors soft_errors) let lift_error_set_unit (m : unit t) : @@ -229,7 +237,7 @@ module List = struct let%map _ = fold ls ~init:0 ~f:(fun i x -> let%map () = f i x in - i + 1 ) + i + 1) in () end @@ -237,8 +245,8 @@ end let%test_module "malleable error unit tests" = ( module struct (* we derive custom equality and comparisions for our result type, as the - * default behavior of ppx_assert is to use polymorphic equality and comparisons - * for results (as to why, I have no clue) *) + * default behavior of ppx_assert is to use polymorphic equality and comparisons + * for results (as to why, I have no clue) *) type 'a inner = ('a Result_accumulator.t, Hard_fail.t) Result.t [@@deriving sexp_of] @@ -275,8 +283,7 @@ let%test_module "malleable error unit tests" = f (f (f (f (f (return 0))))) in let%map expected = T.return 5 in - [%test_eq: int inner] ~equal:(equal_inner Int.equal) actual expected - ) + [%test_eq: int inner] ~equal:(equal_inner Int.equal) actual expected) let%test_unit "malleable error test 2: completes string computation when \ no errors" = @@ -289,7 +296,7 @@ let%test_module "malleable error unit tests" = in let%map expected = T.return "123" in [%test_eq: string inner] ~equal:(equal_inner String.equal) actual - expected ) + expected) let%test_unit "malleable error test 3: ok result that accumulates soft \ errors" = @@ -302,17 +309,17 @@ let%test_module "malleable error unit tests" = in let expected = let errors = - Base.List.map ["a"; "b"] + Base.List.map [ "a"; "b" ] ~f:(Fn.compose Test_error.internal_error Error.of_string) in Result.return - { Result_accumulator.computation_result= "123" - ; soft_errors= - {Error_accumulator.empty with from_current_context= errors} + { Result_accumulator.computation_result = "123" + ; soft_errors = + { Error_accumulator.empty with from_current_context = errors } } in [%test_eq: string inner] ~equal:(equal_inner String.equal) actual - expected ) + expected) let%test_unit "malleable error test 4: do a basic hard error" = Async.Thread_safe.block_on_async_exn (fun () -> @@ -324,13 +331,14 @@ let%test_module "malleable error unit tests" = in let expected = Result.fail - { Hard_fail.hard_errors= + { Hard_fail.hard_errors = Error_accumulator.singleton (Test_error.internal_error (Error.of_string "xyz")) - ; soft_errors= Error_accumulator.empty } + ; soft_errors = Error_accumulator.empty + } in [%test_eq: string inner] ~equal:(equal_inner String.equal) actual - expected ) + expected) let%test_unit "malleable error test 5: hard error that accumulates a soft \ error" = @@ -344,15 +352,16 @@ let%test_module "malleable error unit tests" = in let expected = Result.fail - { Hard_fail.hard_errors= + { Hard_fail.hard_errors = Error_accumulator.singleton (Test_error.internal_error (Error.of_string "xyz")) - ; soft_errors= + ; soft_errors = Error_accumulator.singleton - (Test_error.internal_error (Error.of_string "a")) } + (Test_error.internal_error (Error.of_string "a")) + } in [%test_eq: string inner] ~equal:(equal_inner String.equal) actual - expected ) + expected) let%test_unit "malleable error test 6: hard error with multiple soft \ errors accumulating" = @@ -366,15 +375,18 @@ let%test_module "malleable error unit tests" = in let expected = Result.fail - { Hard_fail.hard_errors= + { Hard_fail.hard_errors = Error_accumulator.singleton (Test_error.internal_error (Error.of_string "xyz")) - ; soft_errors= + ; soft_errors = { Error_accumulator.empty with - from_current_context= + from_current_context = [ Test_error.internal_error (Error.of_string "b") - ; Test_error.internal_error (Error.of_string "a") ] } } + ; Test_error.internal_error (Error.of_string "a") + ] + } + } in [%test_eq: string inner] ~equal:(equal_inner String.equal) actual - expected ) + expected) end ) diff --git a/src/lib/integration_test_lib/network_keypair.ml b/src/lib/integration_test_lib/network_keypair.ml index b90176d015f..7ddc3724990 100644 --- a/src/lib/integration_test_lib/network_keypair.ml +++ b/src/lib/integration_test_lib/network_keypair.ml @@ -2,10 +2,11 @@ open Signature_lib open Core_kernel type t = - { keypair: Keypair.t - ; secret_name: string - ; public_key_file: string - ; private_key_file: string } + { keypair : Keypair.t + ; secret_name : string + ; public_key_file : string + ; private_key_file : string + } [@@deriving to_yojson] let create_network_keypair ~keypair ~secret_name = @@ -23,4 +24,4 @@ let create_network_keypair ~keypair ~secret_name = Secrets.Secret_box.encrypt ~plaintext ~password |> Secrets.Secret_box.to_yojson |> Yojson.Safe.to_string in - {keypair; secret_name; public_key_file; private_key_file} + { keypair; secret_name; public_key_file; private_key_file } diff --git a/src/lib/integration_test_lib/network_state.ml b/src/lib/integration_test_lib/network_state.ml index 522400191da..30129d559e4 100644 --- a/src/lib/integration_test_lib/network_state.ml +++ b/src/lib/integration_test_lib/network_state.ml @@ -7,36 +7,38 @@ module Make (Engine : Intf.Engine.S) (Event_router : Intf.Dsl.Event_router_intf with module Engine := Engine) : Intf.Dsl.Network_state_intf - with module Engine := Engine - and module Event_router := Event_router = struct + with module Engine := Engine + and module Event_router := Event_router = struct module Node = Engine.Network.Node let map_to_yojson m ~f = `Assoc String.Map.(m |> map ~f |> to_alist) (* TODO: Just replace the first 3 fields here with Protocol_state *) type t = - { block_height: int - ; epoch: int - ; global_slot: int - ; snarked_ledgers_generated: int - ; blocks_generated: int - ; node_initialization: bool String.Map.t + { block_height : int + ; epoch : int + ; global_slot : int + ; snarked_ledgers_generated : int + ; blocks_generated : int + ; node_initialization : bool String.Map.t [@to_yojson map_to_yojson ~f:(fun b -> `Bool b)] - ; gossip_received: Gossip_state.t String.Map.t + ; gossip_received : Gossip_state.t String.Map.t [@to_yojson map_to_yojson ~f:Gossip_state.to_yojson] - ; best_tips_by_node: State_hash.t String.Map.t - [@to_yojson map_to_yojson ~f:State_hash.to_yojson] } + ; best_tips_by_node : State_hash.t String.Map.t + [@to_yojson map_to_yojson ~f:State_hash.to_yojson] + } [@@deriving to_yojson] let empty = - { block_height= 0 - ; epoch= 0 - ; global_slot= 0 - ; snarked_ledgers_generated= 0 - ; blocks_generated= 0 - ; node_initialization= String.Map.empty - ; gossip_received= String.Map.empty - ; best_tips_by_node= String.Map.empty } + { block_height = 0 + ; epoch = 0 + ; global_slot = 0 + ; snarked_ledgers_generated = 0 + ; blocks_generated = 0 + ; node_initialization = String.Map.empty + ; gossip_received = String.Map.empty + ; best_tips_by_node = String.Map.empty + } let listen ~logger event_router = let r, w = Broadcast_pipe.create empty in @@ -44,7 +46,7 @@ module Make (* should be safe to ignore the write here, so long as `f` is synchronous *) let state = f (Broadcast_pipe.Reader.peek r) in [%log debug] "updated network state to: $state" - ~metadata:[("state", to_yojson state)] ; + ~metadata:[ ("state", to_yojson state) ] ; ignore (Broadcast_pipe.Writer.write w state : unit Deferred.t) ; Deferred.return `Continue in @@ -54,20 +56,21 @@ module Make [%log debug] "Updating network state with block produced event" ; update ~f:(fun state -> [%log debug] "handling block production from $node" - ~metadata:[("node", `String (Node.id node))] ; + ~metadata:[ ("node", `String (Node.id node)) ] ; if block_produced.block_height > state.block_height then let snarked_ledgers_generated = if block_produced.snarked_ledger_generated then 1 else 0 in { state with - epoch= block_produced.global_slot - ; global_slot= block_produced.global_slot - ; block_height= block_produced.block_height - ; blocks_generated= state.blocks_generated + 1 - ; snarked_ledgers_generated= + epoch = block_produced.global_slot + ; global_slot = block_produced.global_slot + ; block_height = block_produced.block_height + ; blocks_generated = state.blocks_generated + 1 + ; snarked_ledgers_generated = state.snarked_ledgers_generated - + snarked_ledgers_generated } - else state ) ) + + snarked_ledgers_generated + } + else state)) : _ Event_router.event_subscription ) ; ignore ( Event_router.on event_router @@ -78,14 +81,14 @@ module Make application event" ; update ~f:(fun state -> [%log debug] "handling frontier diff application of $node" - ~metadata:[("node", `String (Node.id node))] ; + ~metadata:[ ("node", `String (Node.id node)) ] ; Option.value_map diff_application.best_tip_changed ~default:state ~f:(fun new_best_tip -> let best_tips_by_node' = - String.Map.set state.best_tips_by_node - ~key:(Node.id node) ~data:new_best_tip + String.Map.set state.best_tips_by_node ~key:(Node.id node) + ~data:new_best_tip in - {state with best_tips_by_node= best_tips_by_node'} ) ) ) + { state with best_tips_by_node = best_tips_by_node' }))) : _ Event_router.event_subscription ) ; let handle_gossip_received event_type = ignore @@ -93,7 +96,7 @@ module Make ~f:(fun node gossip_with_direction -> update ~f:(fun state -> { state with - gossip_received= + gossip_received = Map.update state.gossip_received (Node.id node) ~f:(fun gossip_state_opt -> let gossip_state = @@ -104,7 +107,7 @@ module Make state in [%log debug] "GOSSIP RECEIVED by $node" - ~metadata:[("node", `String (Node.id node))] ; + ~metadata:[ ("node", `String (Node.id node)) ] ; [%log debug] "GOSSIP RECEIVED recevied event: $event" ~metadata: [ ( "event" @@ -114,7 +117,8 @@ module Make ] ; Gossip_state.add gossip_state event_type gossip_with_direction ; - gossip_state ) } ) ) + gossip_state) + })) : _ Event_router.event_subscription ) in handle_gossip_received Block_gossip ; @@ -126,12 +130,12 @@ module Make update ~f:(fun state -> [%log debug] "Updating network state with initialization event of $node" - ~metadata:[("node", `String (Node.id node))] ; + ~metadata:[ ("node", `String (Node.id node)) ] ; let node_initialization' = String.Map.set state.node_initialization ~key:(Node.id node) ~data:true in - {state with node_initialization= node_initialization'} ) ) + { state with node_initialization = node_initialization' })) : _ Event_router.event_subscription ) ; (r, w) end diff --git a/src/lib/integration_test_lib/test_config.ml b/src/lib/integration_test_lib/test_config.ml index 7225747dd17..5462c29d94c 100644 --- a/src/lib/integration_test_lib/test_config.ml +++ b/src/lib/integration_test_lib/test_config.ml @@ -1,50 +1,54 @@ module Container_images = struct type t = - { coda: string - ; archive_node: string - ; user_agent: string - ; bots: string - ; points: string } + { coda : string + ; archive_node : string + ; user_agent : string + ; bots : string + ; points : string + } end module Block_producer = struct - type t = {balance: string; timing: Mina_base.Account_timing.t} + type t = { balance : string; timing : Mina_base.Account_timing.t } end type constants = - { constraints: Genesis_constants.Constraint_constants.t - ; genesis: Genesis_constants.t } + { constraints : Genesis_constants.Constraint_constants.t + ; genesis : Genesis_constants.t + } [@@deriving to_yojson] type t = { (* temporary flag to enable/disable graphql ingress deployments *) - requires_graphql: bool - ; k: int - ; delta: int - ; slots_per_epoch: int - ; slots_per_sub_window: int - ; proof_level: Runtime_config.Proof_keys.Level.t - ; txpool_max_size: int - ; block_producers: Block_producer.t list - ; num_snark_workers: int - ; num_archive_nodes: int - ; log_precomputed_blocks: bool - ; snark_worker_fee: string - ; snark_worker_public_key: string } + requires_graphql : bool + ; k : int + ; delta : int + ; slots_per_epoch : int + ; slots_per_sub_window : int + ; proof_level : Runtime_config.Proof_keys.Level.t + ; txpool_max_size : int + ; block_producers : Block_producer.t list + ; num_snark_workers : int + ; num_archive_nodes : int + ; log_precomputed_blocks : bool + ; snark_worker_fee : string + ; snark_worker_public_key : string + } let default = - { requires_graphql= false - ; k= 20 - ; slots_per_epoch= 3 * 8 * 20 - ; slots_per_sub_window= 2 - ; delta= 0 - ; proof_level= Full - ; txpool_max_size= 3000 - ; block_producers= [] - ; num_snark_workers= 0 - ; num_archive_nodes= 0 - ; log_precomputed_blocks= false - ; snark_worker_fee= "0.025" - ; snark_worker_public_key= + { requires_graphql = false + ; k = 20 + ; slots_per_epoch = 3 * 8 * 20 + ; slots_per_sub_window = 2 + ; delta = 0 + ; proof_level = Full + ; txpool_max_size = 3000 + ; block_producers = [] + ; num_snark_workers = 0 + ; num_archive_nodes = 0 + ; log_precomputed_blocks = false + ; snark_worker_fee = "0.025" + ; snark_worker_public_key = (let pk, _ = (Lazy.force Mina_base.Sample_keypairs.keypairs).(0) in - Signature_lib.Public_key.Compressed.to_string pk) } + Signature_lib.Public_key.Compressed.to_string pk) + } diff --git a/src/lib/integration_test_lib/test_error.ml b/src/lib/integration_test_lib/test_error.ml index d60832b8c13..8e8fccc6c8b 100644 --- a/src/lib/integration_test_lib/test_error.ml +++ b/src/lib/integration_test_lib/test_error.ml @@ -1,22 +1,23 @@ open Core -type remote_error = {node_id: string; error_message: Logger.Message.t} +type remote_error = { node_id : string; error_message : Logger.Message.t } (* NB: equality on internal errors ignores timestamp *) -type internal_error = {occurrence_time: Time.t [@sexp.opaque]; error: Error.t} +type internal_error = + { occurrence_time : Time.t [@sexp.opaque]; error : Error.t } [@@deriving sexp] -let equal_internal_error {occurrence_time= _; error= err1} - {occurrence_time= _; error= err2} = +let equal_internal_error { occurrence_time = _; error = err1 } + { occurrence_time = _; error = err2 } = String.equal (Error.to_string_hum err1) (Error.to_string_hum err2) -let compare_internal_error {occurrence_time= _; error= err1} - {occurrence_time= _; error= err2} = +let compare_internal_error { occurrence_time = _; error = err1 } + { occurrence_time = _; error = err2 } = String.compare (Error.to_string_hum err1) (Error.to_string_hum err2) -let internal_error error = {occurrence_time= Time.now (); error} +let internal_error error = { occurrence_time = Time.now (); error } -let occurrence_time {occurrence_time; _} = occurrence_time +let occurrence_time { occurrence_time; _ } = occurrence_time let compare_time a b = Time.compare (occurrence_time a) (occurrence_time b) @@ -24,19 +25,20 @@ let compare_time a b = Time.compare (occurrence_time a) (occurrence_time b) (* TODO: consider switching to explicit context "enters/exits", recording introduction time upon entrance *) module Error_accumulator = struct type 'error contextualized_errors = - {introduction_time: Time.t; errors_by_time: 'error list Time.Map.t} + { introduction_time : Time.t; errors_by_time : 'error list Time.Map.t } [@@deriving equal, sexp_of, compare] type 'error t = - { from_current_context: 'error list - ; contextualized_errors: 'error contextualized_errors String.Map.t } + { from_current_context : 'error list + ; contextualized_errors : 'error contextualized_errors String.Map.t + } [@@deriving equal, sexp_of, compare] let empty_contextualized_errors () = - {introduction_time= Time.now (); errors_by_time= Time.Map.empty} + { introduction_time = Time.now (); errors_by_time = Time.Map.empty } let empty = - {from_current_context= []; contextualized_errors= String.Map.empty} + { from_current_context = []; contextualized_errors = String.Map.empty } let record_errors map context new_errors ~time_of_error = String.Map.update map context ~f:(fun errors_opt -> @@ -45,75 +47,81 @@ module Error_accumulator = struct in let errors_by_time = List.fold new_errors ~init:errors.errors_by_time ~f:(fun acc error -> - Time.Map.add_multi acc ~key:(time_of_error error) ~data:error ) + Time.Map.add_multi acc ~key:(time_of_error error) ~data:error) in - {errors with errors_by_time} ) + { errors with errors_by_time }) - let error_count {from_current_context; contextualized_errors} = + let error_count { from_current_context; contextualized_errors } = let num_current_context = List.length from_current_context in let num_contextualized = String.Map.fold contextualized_errors ~init:0 ~f:(fun ~key:_ ~data sum -> - Time.Map.length data.errors_by_time + sum ) + Time.Map.length data.errors_by_time + sum) in num_current_context + num_contextualized - let all_errors {from_current_context; contextualized_errors} = + let all_errors { from_current_context; contextualized_errors } = let context_errors = String.Map.data contextualized_errors - |> List.bind ~f:(fun {errors_by_time; _} -> Time.Map.data errors_by_time) + |> List.bind ~f:(fun { errors_by_time; _ } -> + Time.Map.data errors_by_time) |> List.concat in from_current_context @ context_errors - let contextualize' context {from_current_context; contextualized_errors} + let contextualize' context { from_current_context; contextualized_errors } ~time_of_error = { empty with - contextualized_errors= + contextualized_errors = record_errors contextualized_errors context from_current_context - ~time_of_error } + ~time_of_error + } let contextualize = contextualize' ~time_of_error:occurrence_time - let singleton x = {empty with from_current_context= [x]} + let singleton x = { empty with from_current_context = [ x ] } - let of_context_free_list ls = {empty with from_current_context= ls} + let of_context_free_list ls = { empty with from_current_context = ls } let of_contextualized_list' context ls ~time_of_error = { empty with - contextualized_errors= - record_errors String.Map.empty context ls ~time_of_error } + contextualized_errors = + record_errors String.Map.empty context ls ~time_of_error + } let of_contextualized_list = of_contextualized_list' ~time_of_error:occurrence_time let add t error = - {t with from_current_context= error :: t.from_current_context} + { t with from_current_context = error :: t.from_current_context } let add_to_context t context error ~time_of_error = { t with - contextualized_errors= - record_errors t.contextualized_errors context [error] ~time_of_error } + contextualized_errors = + record_errors t.contextualized_errors context [ error ] ~time_of_error + } - let map {from_current_context; contextualized_errors} ~f = - { from_current_context= List.map from_current_context ~f - ; contextualized_errors= + let map { from_current_context; contextualized_errors } ~f = + { from_current_context = List.map from_current_context ~f + ; contextualized_errors = String.Map.map contextualized_errors ~f:(fun errors -> { errors with - errors_by_time= - Time.Map.map errors.errors_by_time ~f:(List.map ~f) } ) } + errors_by_time = + Time.Map.map errors.errors_by_time ~f:(List.map ~f) + }) + } (* This only iterates over contextualized errors. You must check errors in the current context manually *) - let iter_contexts {from_current_context= _; contextualized_errors} ~f = + let iter_contexts { from_current_context = _; contextualized_errors } ~f = let contexts_by_time = contextualized_errors |> String.Map.to_alist |> List.map ~f:(fun (ctx, errors) -> (errors.introduction_time, ctx)) |> Time.Map.of_alist_exn in Time.Map.iter contexts_by_time ~f:(fun context -> - let {errors_by_time; _} = + let { errors_by_time; _ } = String.Map.find_exn contextualized_errors context in - errors_by_time |> Time.Map.data |> List.concat |> f context ) + errors_by_time |> Time.Map.data |> List.concat |> f context) let merge a b = let from_current_context = @@ -123,30 +131,31 @@ module Error_accumulator = struct let merge_maps (type a key comparator_witness) (map_a : (key, a, comparator_witness) Map.t) (map_b : (key, a, comparator_witness) Map.t) - ~(resolve_conflict : a -> a -> a) : - (key, a, comparator_witness) Map.t = + ~(resolve_conflict : a -> a -> a) : (key, a, comparator_witness) Map.t + = Map.fold map_b ~init:map_a ~f:(fun ~key ~data acc -> Map.update acc key ~f:(function | None -> data | Some data' -> - resolve_conflict data' data ) ) + resolve_conflict data' data)) in let merge_contextualized_errors a_errors b_errors = - { introduction_time= + { introduction_time = Time.min a_errors.introduction_time b_errors.introduction_time - ; errors_by_time= + ; errors_by_time = merge_maps a_errors.errors_by_time b_errors.errors_by_time - ~resolve_conflict:( @ ) } + ~resolve_conflict:( @ ) + } in merge_maps a.contextualized_errors b.contextualized_errors ~resolve_conflict:merge_contextualized_errors in - {from_current_context; contextualized_errors} + { from_current_context; contextualized_errors } let combine = List.fold ~init:empty ~f:merge - let partition {from_current_context; contextualized_errors} ~f = + let partition { from_current_context; contextualized_errors } ~f = let from_current_context_a, from_current_context_b = List.partition_tf from_current_context ~f in @@ -158,7 +167,7 @@ module Error_accumulator = struct ~init:(Map.empty cmp, Map.empty cmp) ~f:(fun ~key ~data (left, right) -> let l, r = f data in - (Map.add_exn left ~key ~data:l, Map.add_exn right ~key ~data:r) ) + (Map.add_exn left ~key ~data:l, Map.add_exn right ~key ~data:r)) in partition_map (module String) @@ -169,41 +178,46 @@ module Error_accumulator = struct (module Time) ctx_errors.errors_by_time ~f:(List.partition_tf ~f) in - ( {ctx_errors with errors_by_time= l} - , {ctx_errors with errors_by_time= r} ) ) + ( { ctx_errors with errors_by_time = l } + , { ctx_errors with errors_by_time = r } )) in let a = - { from_current_context= from_current_context_a - ; contextualized_errors= contextualized_errors_a } + { from_current_context = from_current_context_a + ; contextualized_errors = contextualized_errors_a + } in let b = - { from_current_context= from_current_context_b - ; contextualized_errors= contextualized_errors_b } + { from_current_context = from_current_context_b + ; contextualized_errors = contextualized_errors_b + } in (a, b) end module Set = struct type nonrec 'error t = - { soft_errors: 'error Error_accumulator.t - ; hard_errors: 'error Error_accumulator.t } + { soft_errors : 'error Error_accumulator.t + ; hard_errors : 'error Error_accumulator.t + } let empty = - {soft_errors= Error_accumulator.empty; hard_errors= Error_accumulator.empty} + { soft_errors = Error_accumulator.empty + ; hard_errors = Error_accumulator.empty + } - let max_severity {soft_errors; hard_errors} = + let max_severity { soft_errors; hard_errors } = let num_soft = Error_accumulator.error_count soft_errors in let num_hard = Error_accumulator.error_count hard_errors in if num_hard > 0 then `Hard else if num_soft > 0 then `Soft else `None - let all_errors {soft_errors; hard_errors} = + let all_errors { soft_errors; hard_errors } = Error_accumulator.merge soft_errors hard_errors let soft_singleton err = - {empty with soft_errors= Error_accumulator.singleton err} + { empty with soft_errors = Error_accumulator.singleton err } let hard_singleton err = - {empty with hard_errors= Error_accumulator.singleton err} + { empty with hard_errors = Error_accumulator.singleton err } let of_soft_or_error = function | Ok () -> @@ -218,25 +232,26 @@ module Set = struct hard_singleton (internal_error err) let add_soft err t = - {t with soft_errors= Error_accumulator.add t.soft_errors err} + { t with soft_errors = Error_accumulator.add t.soft_errors err } let add_hard err t = - {t with hard_errors= Error_accumulator.add t.soft_errors err} + { t with hard_errors = Error_accumulator.add t.soft_errors err } let merge a b = - { soft_errors= Error_accumulator.merge a.soft_errors b.soft_errors - ; hard_errors= Error_accumulator.merge a.hard_errors b.hard_errors } + { soft_errors = Error_accumulator.merge a.soft_errors b.soft_errors + ; hard_errors = Error_accumulator.merge a.hard_errors b.hard_errors + } let combine = List.fold_left ~init:empty ~f:merge - let partition {soft_errors; hard_errors} ~f = + let partition { soft_errors; hard_errors } ~f = let soft_errors_a, soft_errors_b = Error_accumulator.partition soft_errors ~f in let hard_errors_a, hard_errors_b = Error_accumulator.partition hard_errors ~f in - let a = {soft_errors= soft_errors_a; hard_errors= hard_errors_a} in - let b = {soft_errors= soft_errors_b; hard_errors= hard_errors_b} in + let a = { soft_errors = soft_errors_a; hard_errors = hard_errors_a } in + let b = { soft_errors = soft_errors_b; hard_errors = hard_errors_b } in (a, b) end diff --git a/src/lib/integration_test_lib/util.ml b/src/lib/integration_test_lib/util.ml index 9485d695f8a..f156b293374 100644 --- a/src/lib/integration_test_lib/util.ml +++ b/src/lib/integration_test_lib/util.ml @@ -6,7 +6,7 @@ open Async let run_cmd dir prog args = [%log' spam (Logger.create ())] "Running command (from %s): $command" dir - ~metadata:[("command", `String (String.concat (prog :: args) ~sep:" "))] ; + ~metadata:[ ("command", `String (String.concat (prog :: args) ~sep:" ")) ] ; Process.create_exn ~working_dir:dir ~prog ~args () >>= Process.collect_output_and_wait diff --git a/src/lib/integration_test_lib/wait_condition.ml b/src/lib/integration_test_lib/wait_condition.ml index 5ef3f3ee11b..3a739ef29e1 100644 --- a/src/lib/integration_test_lib/wait_condition.ml +++ b/src/lib/integration_test_lib/wait_condition.ml @@ -7,8 +7,8 @@ module Make (Engine : Intf.Engine.S) (Event_router : Intf.Dsl.Event_router_intf with module Engine := Engine) (Network_state : Intf.Dsl.Network_state_intf - with module Engine := Engine - and module Event_router := Event_router) = + with module Engine := Engine + and module Event_router := Event_router) = struct open Network_state module Node = Engine.Network.Node @@ -29,15 +29,17 @@ struct -> predicate type t = - { description: string - ; predicate: predicate - ; soft_timeout: Network_time_span.t - ; hard_timeout: Network_time_span.t } + { description : string + ; predicate : predicate + ; soft_timeout : Network_time_span.t + ; hard_timeout : Network_time_span.t + } let with_timeouts ?soft_timeout ?hard_timeout t = { t with - soft_timeout= Option.value soft_timeout ~default:t.soft_timeout - ; hard_timeout= Option.value hard_timeout ~default:t.hard_timeout } + soft_timeout = Option.value soft_timeout ~default:t.soft_timeout + ; hard_timeout = Option.value hard_timeout ~default:t.hard_timeout + } (* TODO: does this actually work if it's run twice? I think not *) (* @@ -51,7 +53,7 @@ struct if List.for_all nodes ~f:(fun node -> String.Map.find state.node_initialization (Node.id node) - |> Option.value ~default:false ) + |> Option.value ~default:false) then Predicate_passed else Predicate_continuation () in @@ -60,11 +62,12 @@ struct |> Printf.sprintf "[%s] to initialize" in { description - ; predicate= Network_state_predicate (check (), check) - ; soft_timeout= Literal (Time.Span.of_min 10.0) - ; hard_timeout= Literal (Time.Span.of_min 15.0) } + ; predicate = Network_state_predicate (check (), check) + ; soft_timeout = Literal (Time.Span.of_min 10.0) + ; hard_timeout = Literal (Time.Span.of_min 15.0) + } - let node_to_initialize node = nodes_to_initialize [node] + let node_to_initialize node = nodes_to_initialize [ node ] (* let blocks_produced ?(active_stake_percentage = 1.0) n = *) let blocks_to_be_produced n = @@ -75,21 +78,22 @@ struct else Predicate_continuation init_blocks_generated in let soft_timeout_in_slots = 2 * n in - { description= Printf.sprintf "%d blocks to be produced" n - ; predicate= Network_state_predicate (init, check) - ; soft_timeout= Slots soft_timeout_in_slots - ; hard_timeout= Slots (soft_timeout_in_slots * 2) } + { description = Printf.sprintf "%d blocks to be produced" n + ; predicate = Network_state_predicate (init, check) + ; soft_timeout = Slots soft_timeout_in_slots + ; hard_timeout = Slots (soft_timeout_in_slots * 2) + } let nodes_to_synchronize (nodes : Node.t list) = let all_equal ls = Option.value_map (List.hd ls) ~default:true ~f:(fun h -> - [%equal: State_hash.t list] [h] - (List.find_all_dups ~compare:State_hash.compare ls) ) + [%equal: State_hash.t list] [ h ] + (List.find_all_dups ~compare:State_hash.compare ls)) in let check () state = let best_tips = List.map nodes ~f:(fun node -> - String.Map.find_exn state.best_tips_by_node (Node.id node) ) + String.Map.find_exn state.best_tips_by_node (Node.id node)) in if all_equal best_tips then Predicate_passed else Predicate_continuation () @@ -100,10 +104,11 @@ struct |> List.map ~f:(fun node -> "\"" ^ Node.id node ^ "\"") |> String.concat ~sep:", " in - { description= Printf.sprintf "%s to synchronize" formatted_nodes - ; predicate= Network_state_predicate (check (), check) - ; soft_timeout= Slots soft_timeout_in_slots - ; hard_timeout= Slots (soft_timeout_in_slots * 2) } + { description = Printf.sprintf "%s to synchronize" formatted_nodes + ; predicate = Network_state_predicate (check (), check) + ; soft_timeout = Slots soft_timeout_in_slots + ; hard_timeout = Slots (soft_timeout_in_slots * 2) + } let payment_to_be_included_in_frontier ~sender_pub_key ~receiver_pub_key ~amount = @@ -116,7 +121,7 @@ struct Signed_command.payload signed_cmd |> Signed_command_payload.body in match body with - | Payment {source_pk; receiver_pk; amount= paid_amt; token_id= _} + | Payment { source_pk; receiver_pk; amount = paid_amt; token_id = _ } when Public_key.Compressed.equal source_pk sender_pub_key && Public_key.Compressed.equal receiver_pk receiver_pub_key && Currency.Amount.equal paid_amt amount -> @@ -130,7 +135,7 @@ struct let payment_opt = List.find breadcrumb_added.user_commands ~f:(fun cmd_with_status -> cmd_with_status.With_status.data |> User_command.forget_check - |> command_matches_payment ) + |> command_matches_payment) in match payment_opt with | Some cmd_with_status -> @@ -152,12 +157,13 @@ struct Predicate_continuation () in let soft_timeout_in_slots = 8 in - { description= + { description = Printf.sprintf "payment from %s to %s of amount %s" (Public_key.Compressed.to_string sender_pub_key) (Public_key.Compressed.to_string receiver_pub_key) (Amount.to_string amount) - ; predicate= Event_predicate (Event_type.Breadcrumb_added, (), check) - ; soft_timeout= Slots soft_timeout_in_slots - ; hard_timeout= Slots (soft_timeout_in_slots * 2) } + ; predicate = Event_predicate (Event_type.Breadcrumb_added, (), check) + ; soft_timeout = Slots soft_timeout_in_slots + ; hard_timeout = Slots (soft_timeout_in_slots * 2) + } end diff --git a/src/lib/integration_test_lib/wait_condition.mli b/src/lib/integration_test_lib/wait_condition.mli index 6bc990ea0f8..253f292772d 100644 --- a/src/lib/integration_test_lib/wait_condition.mli +++ b/src/lib/integration_test_lib/wait_condition.mli @@ -4,8 +4,8 @@ module Make (Engine : Intf.Engine.S) (Event_router : Intf.Dsl.Event_router_intf with module Engine := Engine) (Network_state : Intf.Dsl.Network_state_intf - with module Engine := Engine - and module Event_router := Event_router) : sig + with module Engine := Engine + and module Event_router := Event_router) : sig type 'a predicate_result = | Predicate_passed | Predicate_continuation of 'a @@ -23,15 +23,16 @@ module Make -> predicate type t = - { description: string - ; predicate: predicate - ; soft_timeout: Network_time_span.t - ; hard_timeout: Network_time_span.t } + { description : string + ; predicate : predicate + ; soft_timeout : Network_time_span.t + ; hard_timeout : Network_time_span.t + } include Intf.Dsl.Wait_condition_intf - with type t := t - and module Engine := Engine - and module Event_router := Event_router - and module Network_state := Network_state + with type t := t + and module Engine := Engine + and module Event_router := Event_router + and module Network_state := Network_state end diff --git a/src/lib/interruptible/interruptible.ml b/src/lib/interruptible/interruptible.ml index aca68dc6503..7fa21056c50 100644 --- a/src/lib/interruptible/interruptible.ml +++ b/src/lib/interruptible/interruptible.ml @@ -3,7 +3,7 @@ open Async_kernel module T = struct type ('a, 's) t = - {interruption_signal: 's Ivar.t; d: ('a, 's) Deferred.Result.t} + { interruption_signal : 's Ivar.t; d : ('a, 's) Deferred.Result.t } let map_signal t ~f = let interruption_signal = @@ -13,10 +13,10 @@ module T = struct | None -> let interruption_signal = Ivar.create () in Deferred.upon (Ivar.read t.interruption_signal) (fun signal -> - Ivar.fill_if_empty interruption_signal (f signal) ) ; + Ivar.fill_if_empty interruption_signal (f signal)) ; interruption_signal in - {interruption_signal; d= Deferred.Result.map_error ~f t.d} + { interruption_signal; d = Deferred.Result.map_error ~f t.d } let map t ~f = match Ivar.peek t.interruption_signal with @@ -28,7 +28,8 @@ module T = struct let%map res = Deferred.choose [ Deferred.choice (Ivar.read t.interruption_signal) Result.fail - ; Deferred.choice t.d Fn.id ] + ; Deferred.choice t.d Fn.id + ] in (* If the interruption signal fires between [t.d] resolving and the scheduler running this code to call [f], we prefer the signal and @@ -40,12 +41,14 @@ module T = struct | Some e -> Error e in - {interruption_signal= t.interruption_signal; d} + { interruption_signal = t.interruption_signal; d } | Some e -> (* The interruption signal has already triggered, resolve to the signal's value. *) - {interruption_signal= t.interruption_signal; d= Deferred.Result.fail e} + { interruption_signal = t.interruption_signal + ; d = Deferred.Result.fail e + } let bind t ~f = let t : (('a, 's) t, 's) t = map ~f t in @@ -55,7 +58,7 @@ module T = struct | Ok t' -> Ivar.fill_if_empty t'.interruption_signal signal | Error _ -> - () ) ) ; + ())) ; let interruption_signal = match Ivar.peek t.interruption_signal with | Some interruption_signal -> @@ -69,7 +72,7 @@ module T = struct (Ivar.fill_if_empty interruption_signal) | Error signal -> (* [t] was interrupted by [signal], [f] was not run. *) - Ivar.fill_if_empty interruption_signal signal ) ; + Ivar.fill_if_empty interruption_signal signal) ; interruption_signal in Deferred.upon (Ivar.read interruption_signal) (fun signal -> @@ -85,26 +88,29 @@ module T = struct () | None -> (* The computation we bound hasn't resolved, interrupt it. *) - Ivar.fill_if_empty t.interruption_signal signal ) ; - {interruption_signal; d= Deferred.Result.bind t.d ~f:(fun t' -> t'.d)} + Ivar.fill_if_empty t.interruption_signal signal) ; + { interruption_signal; d = Deferred.Result.bind t.d ~f:(fun t' -> t'.d) } let return a = - {interruption_signal= Ivar.create (); d= Deferred.Result.return a} + { interruption_signal = Ivar.create (); d = Deferred.Result.return a } - let don't_wait_for {d; _} = + let don't_wait_for { d; _ } = don't_wait_for @@ Deferred.map d ~f:(function Ok () -> () | Error _ -> ()) let finally t ~f = - { interruption_signal= t.interruption_signal - ; d= Deferred.map t.d ~f:(fun r -> f () ; r) } + { interruption_signal = t.interruption_signal + ; d = Deferred.map t.d ~f:(fun r -> f () ; r) + } let uninterruptible d = - {interruption_signal= Ivar.create (); d= Deferred.map d ~f:(fun x -> Ok x)} + { interruption_signal = Ivar.create () + ; d = Deferred.map d ~f:(fun x -> Ok x) + } let lift d interrupt = let interruption_signal = Ivar.create () in Deferred.upon interrupt (Ivar.fill_if_empty interruption_signal) ; - {interruption_signal; d= Deferred.map d ~f:(fun x -> Ok x)} + { interruption_signal; d = Deferred.map d ~f:(fun x -> Ok x) } let force t = (* We use [map] here to prefer interrupt signals even where the underlying @@ -173,7 +179,7 @@ let%test_unit "monad gets interrupted" = let%bind () = wait 130. in Ivar.fill ivar () ; let%map () = wait 100. in - assert (!r = 1) ) + assert (!r = 1)) let%test_unit "monad gets interrupted within nested binds" = Async.Thread_safe.block_on_async_exn (fun () -> @@ -193,7 +199,7 @@ let%test_unit "monad gets interrupted within nested binds" = let%bind () = wait 130. in Ivar.fill ivar () ; let%map () = wait 100. in - assert (!r = 1) ) + assert (!r = 1)) let%test_unit "interruptions still run finally blocks" = Async.Thread_safe.block_on_async_exn (fun () -> @@ -213,7 +219,7 @@ let%test_unit "interruptions still run finally blocks" = let%bind () = wait 130. in Ivar.fill ivar () ; let%map () = wait 100. in - assert (!r = 2) ) + assert (!r = 2)) let%test_unit "interruptions branches do not cancel each other" = Async.Thread_safe.block_on_async_exn (fun () -> @@ -246,4 +252,4 @@ let%test_unit "interruptions branches do not cancel each other" = Ivar.fill ivar_s () ; let%map () = wait 100. in assert (!r = 1) ; - assert (!s = 2) ) + assert (!s = 2)) diff --git a/src/lib/key_cache/key_cache.ml b/src/lib/key_cache/key_cache.ml index 8e678231cce..8d20725e0ae 100644 --- a/src/lib/key_cache/key_cache.ml +++ b/src/lib/key_cache/key_cache.ml @@ -1,16 +1,14 @@ open Core -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] module Spec = struct type t = - | On_disk of {directory: string; should_write: bool} - | S3 of {bucket_prefix: string; install_path: string} + | On_disk of { directory : string; should_write : bool } + | S3 of { bucket_prefix : string; install_path : string } end -[%%inject -"may_download", download_snark_keys] +[%%inject "may_download", download_snark_keys] let may_download = ref may_download @@ -20,7 +18,7 @@ module T (M : sig type _ t end) = struct - type ('a, 'b) t = {write: 'a -> 'b -> unit M.t; read: 'a -> 'b M.t} + type ('a, 'b) t = { write : 'a -> 'b -> unit M.t; read : 'a -> 'b M.t } end module Disk_storable (M : sig @@ -28,9 +26,10 @@ module Disk_storable (M : sig end) = struct type ('k, 'v) t = - { to_string: 'k -> string - ; read: 'k -> path:string -> 'v M.t - ; write: 'k -> 'v -> string -> unit M.t } + { to_string : 'k -> string + ; read : 'k -> path:string -> 'v M.t + ; write : 'k -> 'v -> string -> unit M.t + } end module type S = sig @@ -39,13 +38,14 @@ module type S = sig end type ('a, 'b) t = ('a, 'b) T(M).t = - {write: 'a -> 'b -> unit M.t; read: 'a -> 'b M.t} + { write : 'a -> 'b -> unit M.t; read : 'a -> 'b M.t } module Disk_storable : sig type ('k, 'v) t = ('k, 'v) Disk_storable(M).t = - { to_string: 'k -> string - ; read: 'k -> path:string -> 'v M.t - ; write: 'k -> 'v -> string -> unit M.t } + { to_string : 'k -> string + ; read : 'k -> path:string -> 'v M.t + ; write : 'k -> 'v -> string -> unit M.t + } val of_binable : ('k -> string) -> (module Binable.S with type t = 'v) -> ('k, 'v) t @@ -61,7 +61,7 @@ module type S = sig Spec.t list -> ('k, 'v) Disk_storable.t -> 'k - -> ('v * [> `Cache_hit | `Locally_generated]) M.t + -> ('v * [> `Cache_hit | `Locally_generated ]) M.t val write : Spec.t list -> ('k, 'v) Disk_storable.t -> 'k -> 'v -> unit M.t end @@ -82,12 +82,11 @@ module Sync : S with module M := Or_error = struct let write key v = match Sys.is_directory prefix with | `No | `Unknown -> - Or_error.errorf "directory %s does not exist or cannot be read" - prefix + Or_error.errorf "directory %s does not exist or cannot be read" prefix | `Yes -> write key v (path key) in - {read; write} + { read; write } let s3 to_string read ~bucket_prefix ~install_path = let read k = @@ -98,7 +97,9 @@ module Sync : S with module M := Or_error = struct let open Or_error.Let_syntax in [%log trace] "Downloading key to key cache" ~metadata: - [("url", `String uri_string); ("local_file_path", `String file_path)] ; + [ ("url", `String uri_string) + ; ("local_file_path", `String file_path) + ] ; let%bind () = Result.map_error (ksprintf Unix.system @@ -108,21 +109,24 @@ module Sync : S with module M := Or_error = struct Error.of_string (Unix.Exit.to_string_hum (Error e)) | `Signal s -> Error.createf "died after receiving %s (signal number %d)" - (Signal.to_string s) (Signal.to_system_int s) ) + (Signal.to_string s) (Signal.to_system_int s)) |> Result.map_error ~f:(fun err -> [%log trace] "Could not download key to key cache" ~metadata: [ ("url", `String uri_string) - ; ("local_file_path", `String file_path) ] ; - err ) + ; ("local_file_path", `String file_path) + ] ; + err) in [%log trace] "Downloaded key to key cache" ~metadata: - [("url", `String uri_string); ("local_file_path", `String file_path)] ; + [ ("url", `String uri_string) + ; ("local_file_path", `String file_path) + ] ; read k ~path:file_path in let write _ _ = Or_error.return () in - {read; write} + { read; write } module Disk_storable = struct include Disk_storable (Or_error) @@ -131,50 +135,51 @@ module Sync : S with module M := Or_error = struct (* TODO: Make more efficient *) let read _ ~path = Or_error.try_with (fun () -> - Binable.of_string m (In_channel.read_all path) ) + Binable.of_string m (In_channel.read_all path)) in let write _k t path = Or_error.try_with (fun () -> - Out_channel.write_all path ~data:(Binable.to_string m t) ) + Out_channel.write_all path ~data:(Binable.to_string m t)) in - {to_string; read; write} + { to_string; read; write } let simple to_string read write = { to_string - ; read= (fun k ~path -> read k ~path) - ; write= (fun k v s -> write k v s) } + ; read = (fun k ~path -> read k ~path) + ; write = (fun k v s -> write k v s) + } end - let read spec {Disk_storable.to_string; read= r; write= w} k = + let read spec { Disk_storable.to_string; read = r; write = w } k = Or_error.find_map_ok spec ~f:(fun s -> let res, cache_hit = match s with - | Spec.On_disk {directory; should_write} -> + | Spec.On_disk { directory; should_write } -> ( (on_disk to_string r w directory).read k , if should_write then `Locally_generated else `Cache_hit ) | S3 _ when not !may_download -> (Or_error.errorf "Downloading from S3 is disabled", `Cache_hit) - | S3 {bucket_prefix; install_path} -> + | S3 { bucket_prefix; install_path } -> Unix.mkdir_p install_path ; ((s3 to_string r ~bucket_prefix ~install_path).read k, `Cache_hit) in let%map.Or_error res = res in - (res, cache_hit) ) + (res, cache_hit)) - let write spec {Disk_storable.to_string; read= r; write= w} k v = + let write spec { Disk_storable.to_string; read = r; write = w } k v = let errs = List.filter_map spec ~f:(fun s -> let res = match s with - | Spec.On_disk {directory; should_write} -> + | Spec.On_disk { directory; should_write } -> if should_write then ( Unix.mkdir_p directory ; (on_disk to_string r w directory).write k v ) else Or_error.return () - | S3 {bucket_prefix= _; install_path= _} -> + | S3 { bucket_prefix = _; install_path = _ } -> Or_error.return () in - match res with Error e -> Some e | Ok () -> None ) + match res with Error e -> Some e | Ok () -> None) in match errs with [] -> Ok () | errs -> Error (Error.of_list errs) end @@ -202,7 +207,7 @@ module Async : S with module M := Async.Deferred.Or_error = struct | `Yes -> write key v (path key) in - {read; write} + { read; write } let s3 to_string read ~bucket_prefix ~install_path = let read k = @@ -213,28 +218,38 @@ module Async : S with module M := Async.Deferred.Or_error = struct let logger = Logger.create () in [%log trace] "Downloading key to key cache" ~metadata: - [("url", `String uri_string); ("local_file_path", `String file_path)] ; + [ ("url", `String uri_string) + ; ("local_file_path", `String file_path) + ] ; let%bind result = Process.run ~prog:"curl" ~args: - ["--fail"; "--silent"; "--show-error"; "-o"; file_path; uri_string] + [ "--fail" + ; "--silent" + ; "--show-error" + ; "-o" + ; file_path + ; uri_string + ] () |> Deferred.Result.map_error ~f:(fun err -> [%log debug] "Could not download key to key cache" ~metadata: [ ("url", `String uri_string) - ; ("local_file_path", `String file_path) ] ; - err ) + ; ("local_file_path", `String file_path) + ] ; + err) in [%log trace] "Downloaded key to key cache" ~metadata: [ ("url", `String uri_string) ; ("local_file_path", `String file_path) - ; ("result", `String result) ] ; + ; ("result", `String result) + ] ; read k ~path:file_path in let write _ _ = Deferred.Or_error.return () in - {read; write} + { read; write } module Disk_storable = struct include Disk_storable (Deferred.Or_error) @@ -246,44 +261,45 @@ module Async : S with module M := Async.Deferred.Or_error = struct (Writer.save_bin_prot path B.bin_writer_t t) ~f:Or_error.return in - {to_string; read; write} + { to_string; read; write } let simple to_string read write = { to_string - ; read= (fun k ~path -> read k ~path) - ; write= (fun v s -> write v s) } + ; read = (fun k ~path -> read k ~path) + ; write = (fun v s -> write v s) + } end - let read spec {Disk_storable.to_string; read= r; write= w} k = + let read spec { Disk_storable.to_string; read = r; write = w } k = Deferred.Or_error.find_map_ok spec ~f:(fun s -> let open Deferred.Or_error.Let_syntax in match s with - | Spec.On_disk {directory; should_write} -> + | Spec.On_disk { directory; should_write } -> let%map res = (on_disk to_string r w directory).read k in (res, if should_write then `Locally_generated else `Cache_hit) | S3 _ when not !may_download -> Deferred.Or_error.errorf "Downloading from S3 is disabled" - | S3 {bucket_prefix; install_path} -> + | S3 { bucket_prefix; install_path } -> let%bind.Deferred () = Unix.mkdir ~p:() install_path in let%map res = (s3 to_string r ~bucket_prefix ~install_path).read k in - (res, `Cache_hit) ) + (res, `Cache_hit)) - let write spec {Disk_storable.to_string; read= r; write= w} k v = + let write spec { Disk_storable.to_string; read = r; write = w } k v = let%map errs = Deferred.List.filter_map spec ~f:(fun s -> let res = match s with - | Spec.On_disk {directory; should_write} -> + | Spec.On_disk { directory; should_write } -> if should_write then let%bind () = Unix.mkdir ~p:() directory in (on_disk to_string r w directory).write k v else Deferred.Or_error.return () - | S3 {bucket_prefix= _; install_path= _} -> + | S3 { bucket_prefix = _; install_path = _ } -> Deferred.Or_error.return () in - match%map res with Error e -> Some e | Ok () -> None ) + match%map res with Error e -> Some e | Ok () -> None) in match errs with [] -> Ok () | errs -> Error (Error.of_list errs) end diff --git a/src/lib/key_cache/key_cache.mli b/src/lib/key_cache/key_cache.mli index 1ea59a0f2b7..da63e1c5e84 100644 --- a/src/lib/key_cache/key_cache.mli +++ b/src/lib/key_cache/key_cache.mli @@ -2,8 +2,8 @@ open Core_kernel module Spec : sig type t = - | On_disk of {directory: string; should_write: bool} - | S3 of {bucket_prefix: string; install_path: string} + | On_disk of { directory : string; should_write : bool } + | S3 of { bucket_prefix : string; install_path : string } end val set_downloads_enabled : bool -> unit @@ -11,16 +11,17 @@ val set_downloads_enabled : bool -> unit module T (M : sig type _ t end) : sig - type ('a, 'b) t = {write: 'a -> 'b -> unit M.t; read: 'a -> 'b M.t} + type ('a, 'b) t = { write : 'a -> 'b -> unit M.t; read : 'a -> 'b M.t } end module Disk_storable (M : sig type _ t end) : sig type ('k, 'v) t = - { to_string: 'k -> string - ; read: 'k -> path:string -> 'v M.t - ; write: 'k -> 'v -> string -> unit M.t } + { to_string : 'k -> string + ; read : 'k -> path:string -> 'v M.t + ; write : 'k -> 'v -> string -> unit M.t + } end module type S = sig @@ -29,13 +30,14 @@ module type S = sig end type ('a, 'b) t = ('a, 'b) T(M).t = - {write: 'a -> 'b -> unit M.t; read: 'a -> 'b M.t} + { write : 'a -> 'b -> unit M.t; read : 'a -> 'b M.t } module Disk_storable : sig type ('k, 'v) t = ('k, 'v) Disk_storable(M).t = - { to_string: 'k -> string - ; read: 'k -> path:string -> 'v M.t - ; write: 'k -> 'v -> string -> unit M.t } + { to_string : 'k -> string + ; read : 'k -> path:string -> 'v M.t + ; write : 'k -> 'v -> string -> unit M.t + } val of_binable : ('k -> string) -> (module Binable.S with type t = 'v) -> ('k, 'v) t @@ -51,7 +53,7 @@ module type S = sig Spec.t list -> ('k, 'v) Disk_storable.t -> 'k - -> ('v * [> `Cache_hit | `Locally_generated]) M.t + -> ('v * [> `Cache_hit | `Locally_generated ]) M.t val write : Spec.t list -> ('k, 'v) Disk_storable.t -> 'k -> 'v -> unit M.t end diff --git a/src/lib/key_value_database/key_value_database.ml b/src/lib/key_value_database/key_value_database.ml index 6c5f125ab8a..0d3c7c86653 100644 --- a/src/lib/key_value_database/key_value_database.ml +++ b/src/lib/key_value_database/key_value_database.ml @@ -76,19 +76,19 @@ end module Make_mock (Key : Hashable.S) (Value : sig - type t + type t end) : Intf.Mock - with type t = Value.t Key.Table.t - and type key := Key.t - and type value := Value.t - and type config := unit = struct + with type t = Value.t Key.Table.t + and type key := Key.t + and type value := Value.t + and type config := unit = struct type t = Value.t Key.Table.t let to_sexp t ~key_sexp ~value_sexp = Key.Table.to_alist t |> List.map ~f:(fun (key, value) -> - [%sexp_of: Sexp.t * Sexp.t] (key_sexp key, value_sexp value) ) + [%sexp_of: Sexp.t * Sexp.t] (key_sexp key, value_sexp value)) |> [%sexp_of: Sexp.t list] let create _ = Key.Table.create () diff --git a/src/lib/ledger_catchup/best_tip_lru.ml b/src/lib/ledger_catchup/best_tip_lru.ml index 80f5b27f48c..9e518114c02 100644 --- a/src/lib/ledger_catchup/best_tip_lru.ml +++ b/src/lib/ledger_catchup/best_tip_lru.ml @@ -19,8 +19,7 @@ let add (x : elt) = |> External_transition.Initial_validated.state_hash in if not (Q.mem t h) then ( - if Q.length t >= max_size then - ignore (Q.dequeue_front t : elt option); + if Q.length t >= max_size then ignore (Q.dequeue_front t : elt option) ; Q.enqueue_back_exn t h x ) else ignore (Q.lookup_and_move_to_back t h : elt option) diff --git a/src/lib/ledger_catchup/ledger_catchup.mli b/src/lib/ledger_catchup/ledger_catchup.mli index 78c5b544de0..b844d6112d5 100644 --- a/src/lib/ledger_catchup/ledger_catchup.mli +++ b/src/lib/ledger_catchup/ledger_catchup.mli @@ -17,25 +17,21 @@ val run : -> verifier:Verifier.t -> network:Mina_networking.t -> frontier:Transition_frontier.t - -> catchup_job_reader:( State_hash.t - * ( External_transition.Initial_validated.t - Envelope.Incoming.t - , State_hash.t ) - Cached.t - Rose_tree.t - list ) - Strict_pipe.Reader.t - -> catchup_breadcrumbs_writer:( ( Transition_frontier.Breadcrumb.t - , State_hash.t ) - Cached.t - Rose_tree.t - list - * [ `Ledger_catchup of unit Ivar.t - | `Catchup_scheduler ] - , Strict_pipe.crash Strict_pipe.buffered - , unit ) - Strict_pipe.Writer.t - -> unprocessed_transition_cache:Transition_handler - .Unprocessed_transition_cache - .t + -> catchup_job_reader: + ( State_hash.t + * ( External_transition.Initial_validated.t Envelope.Incoming.t + , State_hash.t ) + Cached.t + Rose_tree.t + list ) + Strict_pipe.Reader.t + -> catchup_breadcrumbs_writer: + ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t Rose_tree.t + list + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] + , Strict_pipe.crash Strict_pipe.buffered + , unit ) + Strict_pipe.Writer.t + -> unprocessed_transition_cache: + Transition_handler.Unprocessed_transition_cache.t -> unit diff --git a/src/lib/ledger_catchup/normal_catchup.ml b/src/lib/ledger_catchup/normal_catchup.ml index 4bcb8d2a32d..61b28940bdd 100644 --- a/src/lib/ledger_catchup/normal_catchup.ml +++ b/src/lib/ledger_catchup/normal_catchup.ml @@ -57,8 +57,7 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier transition_with_hash |> External_transition.skip_time_received_validation `This_transition_was_not_received_via_gossip - |> External_transition.validate_genesis_protocol_state - ~genesis_state_hash + |> External_transition.validate_genesis_protocol_state ~genesis_state_hash >>= External_transition.validate_protocol_versions >>= External_transition.validate_delta_transition_chain in @@ -90,7 +89,7 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier Ok (`In_frontier hash) ) | Error (`Verifier_error error) -> [%log warn] - ~metadata:[("error", Error_json.error_to_yojson error)] + ~metadata:[ ("error", Error_json.error_to_yojson error) ] "verifier threw an error while verifying transiton queried during \ ledger catchup: $error" ; Deferred.Or_error.fail (Error.tag ~tag:"verifier threw an error" error) @@ -133,7 +132,8 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier ( External_transition.proposed_protocol_version_opt transition |> Option.value_map ~default:"" - ~f:Protocol_version.to_string ) ) ] ) ) + ~f:Protocol_version.to_string ) ) + ] ) ) in Error (Error.of_string "invalid protocol version") | Error `Mismatched_protocol_version -> @@ -151,8 +151,8 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier ( External_transition.current_protocol_version transition |> Protocol_version.to_string ) ) ; ( "daemon_current_protocol_version" - , `String Protocol_version.(get_current () |> to_string) ) ] - ) ) + , `String Protocol_version.(get_current () |> to_string) ) + ] ) ) in Error (Error.of_string "mismatched protocol version") | Error `Disconnected -> @@ -184,7 +184,7 @@ let find_map_ok l ~f = | Error current_error -> `Repeat (tl, current_error :: errors) | Ok result -> - `Finished (Ok result) ) ) + `Finished (Ok result))) type download_state_hashes_error = [ `Peer_moves_too_fast of Error.t @@ -224,7 +224,7 @@ let to_error = function let download_state_hashes ~logger ~trust_system ~network ~frontier ~peers ~target_hash ~job ~hash_tree ~blockchain_length_of_target_hash = [%log debug] - ~metadata:[("target_hash", State_hash.to_yojson target_hash)] + ~metadata:[ ("target_hash", State_hash.to_yojson target_hash) ] "Doing a catchup job with target $target_hash" ; let blockchain_length_of_root = Transition_frontier.root frontier @@ -251,9 +251,7 @@ let download_state_hashes ~logger ~trust_system ~network ~frontier ~peers Deferred.Result.return hashes | None -> let error_msg = - sprintf - !"Peer %{sexp:Network_peer.Peer.t} sent us bad proof" - peer + sprintf !"Peer %{sexp:Network_peer.Peer.t} sent us bad proof" peer in let%bind.Deferred () = Trust_system.( @@ -276,7 +274,7 @@ let download_state_hashes ~logger ~trust_system ~network ~frontier ~peers (Ok (peer, Frontier_base.Breadcrumb.state_hash final, acc)) | None -> Continue_or_stop.Continue - (Unsigned.UInt32.pred blockchain_length, hash :: acc) ) + (Unsigned.UInt32.pred blockchain_length, hash :: acc)) ~finish:(fun (blockchain_length, acc) -> let module T = struct type t = State_hash.t list [@@deriving to_yojson] @@ -289,7 +287,8 @@ let download_state_hashes ~logger ~trust_system ~network ~frontier ~peers ~metadata: [ ("n", `Int (List.length acc)) ; ("hashes", T.to_yojson acc) - ; ("all_hashes", T.to_yojson all_hashes) ] + ; ("all_hashes", T.to_yojson all_hashes) + ] "Finishing download_state_hashes with $n $hashes. with \ $all_hashes" ; if @@ -300,28 +299,25 @@ let download_state_hashes ~logger ~trust_system ~network ~frontier ~peers Result.fail @@ `No_common_ancestor (Error.of_string - "Requested block doesn't have a path to the root of \ - our frontier") + "Requested block doesn't have a path to the root of our \ + frontier") else let err_msg = - sprintf - !"Peer %{sexp:Network_peer.Peer.t} moves too fast" - peer + sprintf !"Peer %{sexp:Network_peer.Peer.t} moves too fast" peer in - Result.fail @@ `Peer_moves_too_fast (Error.of_string err_msg) ) - ) + Result.fail @@ `Peer_moves_too_fast (Error.of_string err_msg))) >>| fun (peer, final, hashes) -> let (_ : State_hash.t) = List.fold hashes ~init:final ~f:(fun parent h -> Transition_frontier.Catchup_hash_tree.add hash_tree h ~parent ~job ; - h ) + h) in (peer, hashes) let verify_against_hashes transitions hashes = List.length transitions = List.length hashes && List.for_all2_exn transitions hashes ~f:(fun transition hash -> - State_hash.equal (External_transition.state_hash transition) hash ) + State_hash.equal (External_transition.state_hash transition) hash) let rec partition size = function | [] -> @@ -332,12 +328,15 @@ let rec partition size = function module Peers_pool = struct type t = - {preferred: Peer.t Queue.t; normal: Peer.t Queue.t; busy: Peer.Hash_set.t} + { preferred : Peer.t Queue.t + ; normal : Peer.t Queue.t + ; busy : Peer.Hash_set.t + } let create ~busy ~preferred peers = - {preferred= Queue.of_list preferred; normal= Queue.of_list peers; busy} + { preferred = Queue.of_list preferred; normal = Queue.of_list peers; busy } - let dequeue {preferred; normal; busy} = + let dequeue { preferred; normal; busy } = let find_available q = let n = Queue.length q in let rec go tried = @@ -360,11 +359,11 @@ module Peers_pool = struct | `Empty -> find_available normal | `All_busy -> ( - match find_available normal with - | `Available x -> - `Available x - | `Empty | `All_busy -> - `All_busy ) + match find_available normal with + | `Available x -> + `Available x + | `Empty | `All_busy -> + `All_busy ) end (* returns a list of transitions with old ones comes first *) @@ -376,7 +375,7 @@ let download_transitions ~target_hash ~logger ~trust_system ~network hashes_of_missing_transitions) ~how:`Parallel ~f:(fun hashes -> let%bind.Async.Deferred peers = Mina_networking.peers network in let peers = - Peers_pool.create ~busy ~preferred:[preferred_peer] + Peers_pool.create ~busy ~preferred:[ preferred_peer ] (List.permute peers) in let rec go errs = @@ -396,7 +395,8 @@ let download_transitions ~target_hash ~logger ~trust_system ~network ~metadata: [ ("n", `Int (List.length hashes)) ; ("peer", Peer.to_yojson peer) - ; ("target_hash", State_hash.to_yojson target_hash) ] + ; ("target_hash", State_hash.to_yojson target_hash) + ] "requesting $n blocks from $peer for catchup to \ $target_hash" ; let%bind transitions = @@ -410,7 +410,8 @@ let download_transitions ~target_hash ~logger ~trust_system ~network ~metadata: [ ("error", `String (Error.to_string_hum e)) ; ("n", `Int (List.length hashes)) - ; ("peer", Peer.to_yojson peer) ] + ; ("peer", Peer.to_yojson peer) + ] "$error from downloading $n blocks from $peer" ; Error e in @@ -422,7 +423,8 @@ let download_transitions ~target_hash ~logger ~trust_system ~network [%log debug] ~metadata: [ ("n", `Int (List.length transitions)) - ; ("peer", Peer.to_yojson peer) ] + ; ("peer", Peer.to_yojson peer) + ] "downloaded $n blocks from $peer" ; if not @@ verify_against_hashes transitions hashes then ( let error_msg = @@ -445,7 +447,7 @@ let download_transitions ~target_hash ~logger ~trust_system ~network ~hash_data:(Fn.const hash) in Envelope.Incoming.wrap_peer - ~data:transition_with_hash ~sender:peer ) ) + ~data:transition_with_hash ~sender:peer)) in Hash_set.remove busy peer ; match res with @@ -454,7 +456,7 @@ let download_transitions ~target_hash ~logger ~trust_system ~network | Error e -> go (e :: errs) ) in - go [] ) + go []) let verify_transitions_and_build_breadcrumbs ~logger ~(precomputed_values : Precomputed_values.t) ~trust_system ~verifier @@ -468,20 +470,19 @@ let verify_transitions_and_build_breadcrumbs ~logger match%bind External_transition.validate_proofs ~verifier (List.map transitions ~f:(fun t -> - External_transition.Validation.wrap (Envelope.Incoming.data t) - )) + External_transition.Validation.wrap (Envelope.Incoming.data t))) with | Ok tvs -> return (Ok (List.map2_exn transitions tvs ~f:(fun e data -> (* this does not update the envelope timestamps *) - {e with data} ))) + { e with data }))) | Error (`Verifier_error error) -> [%log warn] - ~metadata:[("error", Error_json.error_to_yojson error)] - "verifier threw an error while verifying transition queried \ - during ledger catchup: $error" ; + ~metadata:[ ("error", Error_json.error_to_yojson error) ] + "verifier threw an error while verifying transition queried during \ + ledger catchup: $error" ; Deferred.Or_error.fail (Error.tag ~tag:"verifier threw an error" error) | Error `Invalid_proof -> @@ -499,7 +500,8 @@ let verify_transitions_and_build_breadcrumbs ~logger , `Float Core.Time.( Span.to_sec - @@ diff verification_end_time verification_start_time) ) ] + @@ diff verification_end_time verification_start_time) ) + ] "verification of proofs complete" ; fold_until (List.rev tvs) ~init:[] ~f:(fun acc transition -> @@ -516,12 +518,11 @@ let verify_transitions_and_build_breadcrumbs ~logger list ) ; Deferred.Or_error.fail e | Ok (`In_frontier initial_hash) -> - Deferred.Or_error.return - @@ Continue_or_stop.Stop (acc, initial_hash) + Deferred.Or_error.return @@ Continue_or_stop.Stop (acc, initial_hash) | Ok (`Building_path transition_with_initial_validation) -> Deferred.Or_error.return @@ Continue_or_stop.Continue - (transition_with_initial_validation :: acc) ) + (transition_with_initial_validation :: acc)) ~finish:(fun acc -> let validation_end_time = Core.Time.now () in [%log debug] @@ -531,7 +532,8 @@ let verify_transitions_and_build_breadcrumbs ~logger , `Float Core.Time.( Span.to_sec - @@ diff validation_end_time verification_end_time) ) ] + @@ diff validation_end_time verification_end_time) ) + ] "validation of transitions complete" ; if List.length transitions <= 0 then Deferred.Or_error.return ([], target_hash) @@ -542,14 +544,13 @@ let verify_transitions_and_build_breadcrumbs ~logger let initial_state_hash = External_transition.parent_hash oldest_missing_transition in - Deferred.Or_error.return (acc, initial_state_hash) ) + Deferred.Or_error.return (acc, initial_state_hash)) in let build_start_time = Core.Time.now () in let trees_of_transitions = - Option.fold - (Non_empty_list.of_list_opt transitions_with_initial_validation) + Option.fold (Non_empty_list.of_list_opt transitions_with_initial_validation) ~init:subtrees ~f:(fun _ transitions -> - [Rose_tree.of_non_empty_list ~subtrees transitions] ) + [ Rose_tree.of_non_empty_list ~subtrees transitions ]) in let open Deferred.Let_syntax in match%bind @@ -563,7 +564,8 @@ let verify_transitions_and_build_breadcrumbs ~logger [ ("target_hash", State_hash.to_yojson target_hash) ; ( "time_elapsed" , `Float Core.Time.(Span.to_sec @@ diff (now ()) build_start_time) - ) ] + ) + ] "build of breadcrumbs complete" ; Deferred.Or_error.return result | Error e -> @@ -573,17 +575,18 @@ let verify_transitions_and_build_breadcrumbs ~logger ; ( "time_elapsed" , `Float Core.Time.(Span.to_sec @@ diff (now ()) build_start_time) ) - ; ("error", `String (Error.to_string_hum e)) ] + ; ("error", `String (Error.to_string_hum e)) + ] "build of breadcrumbs failed with $error" ; ( try ignore ( List.map transitions_with_initial_validation ~f:Cached.invalidate_with_failure - : External_transition.Initial_validated.t Envelope.Incoming.t - list ) + : External_transition.Initial_validated.t Envelope.Incoming.t list + ) with e -> [%log error] - ~metadata:[("exn", `String (Exn.to_string e))] + ~metadata:[ ("exn", `String (Exn.to_string e)) ] "$exn in cached" ) ; Deferred.Or_error.fail e @@ -591,7 +594,7 @@ let garbage_collect_subtrees ~logger ~subtrees = List.iter subtrees ~f:(fun subtree -> ignore ( Rose_tree.map subtree ~f:Cached.invalidate_with_failure - : 'a Rose_tree.t ) ) ; + : 'a Rose_tree.t )) ; [%log trace] "garbage collected failed cached transitions" let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier @@ -599,7 +602,7 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier ~(catchup_breadcrumbs_writer : ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t Rose_tree.t list - * [`Ledger_catchup of unit Ivar.t | `Catchup_scheduler] + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] , Strict_pipe.crash Strict_pipe.buffered , unit ) Strict_pipe.Writer.t) ~unprocessed_transition_cache : unit = @@ -609,8 +612,8 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier t | Full _ -> failwith - "If normal catchup is running, the frontier should have a hash \ - tree, got a full one." + "If normal catchup is running, the frontier should have a hash tree, \ + got a full one." in don't_wait_for (Strict_pipe.Reader.iter_without_pushback catchup_job_reader @@ -624,7 +627,7 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier don't_wait_for (let start_time = Core.Time.now () in [%log info] "Catch up to $target_hash" - ~metadata:[("target_hash", State_hash.to_yojson target_hash)] ; + ~metadata:[ ("target_hash", State_hash.to_yojson target_hash) ] ; let%bind () = Catchup_jobs.incr () in let blockchain_length_of_target_hash = let blockchain_length_of_dangling_block = @@ -644,9 +647,9 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier | Local -> acc_inner | Remote peer -> - peer :: acc_inner ) + peer :: acc_inner) in - cached_peers @ acc_outer ) + cached_peers @ acc_outer) |> List.dedup_and_sort ~compare:Peer.compare in match%bind @@ -669,14 +672,15 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier [ ( "errors" , `List (List.map errors ~f:(fun err -> - `String (display_error err) )) ) ] ; + `String (display_error err))) ) + ] ; let%bind random_peers = Mina_networking.peers network >>| List.permute in match%bind download_state_hashes ~hash_tree ~logger ~trust_system - ~network ~frontier ~peers:random_peers ~target_hash - ~job ~blockchain_length_of_target_hash + ~network ~frontier ~peers:random_peers ~target_hash ~job + ~blockchain_length_of_target_hash with | Ok (peer, hashes) -> return (Ok (peer, hashes)) @@ -687,7 +691,8 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier [ ( "errors" , `List (List.map errors ~f:(fun err -> - `String (display_error err) )) ) ] ; + `String (display_error err))) ) + ] ; if contains_no_common_ancestor errors then List.iter subtrees ~f:(fun subtree -> let transition = @@ -705,7 +710,7 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier Cached.peek cached_transition |> Envelope.Incoming.data |> External_transition.Initial_validated - .state_hash ) + .state_hash) in [%log error] ~metadata: @@ -725,15 +730,15 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier , External_transition.Initial_validated .protocol_state transition |> Mina_state.Protocol_state - .value_to_yojson ) ] + .value_to_yojson ) + ] "Validation error: external transition with \ state hash $state_hash and its children were \ rejected for reason $reason" ; Mina_metrics.( Counter.inc Rejected_blocks.no_common_ancestor ( Float.of_int - @@ (1 + List.length children_transitions) )) - ) ; + @@ (1 + List.length children_transitions) ))) ; return (Error (Error.of_list @@ List.map errors ~f:to_error)) ) @@ -746,7 +751,8 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier [ ( "hashes_of_missing_transitions" , `List (List.map hashes_of_missing_transitions - ~f:State_hash.to_yojson) ) ] + ~f:State_hash.to_yojson) ) + ] !"Number of missing transitions is %d" num_of_missing_transitions ; let%bind transitions = @@ -757,7 +763,7 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier ~preferred_peer ~hashes_of_missing_transitions ~target_hash in [%log debug] - ~metadata:[("target_hash", State_hash.to_yojson target_hash)] + ~metadata:[ ("target_hash", State_hash.to_yojson target_hash) ] "Download transitions complete" ; verify_transitions_and_build_breadcrumbs ~logger ~precomputed_values ~trust_system ~verifier ~frontier @@ -774,8 +780,9 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier (fun breadcrumb -> Cached.peek breadcrumb |> Transition_frontier.Breadcrumb.state_hash - |> State_hash.to_yojson ) - tree )) ) ] + |> State_hash.to_yojson) + tree)) ) + ] "about to write to the catchup breadcrumbs pipe" ; if Strict_pipe.Writer.is_closed catchup_breadcrumbs_writer then ( [%log trace] @@ -799,16 +806,16 @@ let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier Catchup_jobs.decr () | Error e -> [%log warn] - ~metadata:[("error", Error_json.error_to_yojson e)] - "Catchup process failed -- unable to receive valid data \ - from peers or transition frontier progressed faster than \ - catchup data received. See error for details: $error" ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] + "Catchup process failed -- unable to receive valid data from \ + peers or transition frontier progressed faster than catchup \ + data received. See error for details: $error" ; notify_hash_tree_of_failure () ; garbage_collect_subtrees ~logger ~subtrees ; Mina_metrics.( Gauge.set Transition_frontier_controller.catchup_time_ms Core.Time.(Span.to_ms @@ diff (now ()) start_time)) ; - Catchup_jobs.decr ()) )) + Catchup_jobs.decr ()))) let%test_module "Ledger_catchup tests" = ( module struct @@ -834,13 +841,12 @@ let%test_module "Ledger_catchup tests" = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) let downcast_transition transition = let transition = transition - |> External_transition.Validation - .reset_frontier_dependencies_validation + |> External_transition.Validation.reset_frontier_dependencies_validation |> External_transition.Validation.reset_staged_ledger_diff_validation in Envelope.Incoming.wrap ~data:transition ~sender:Envelope.Sender.Local @@ -850,8 +856,8 @@ let%test_module "Ledger_catchup tests" = (Transition_frontier.Breadcrumb.validated_transition breadcrumb) type catchup_test = - { cache: Transition_handler.Unprocessed_transition_cache.t - ; job_writer: + { cache : Transition_handler.Unprocessed_transition_cache.t + ; job_writer : ( State_hash.t * ( External_transition.Initial_validated.t Envelope.Incoming.t , State_hash.t ) @@ -861,12 +867,12 @@ let%test_module "Ledger_catchup tests" = , Strict_pipe.crash Strict_pipe.buffered , unit ) Strict_pipe.Writer.t - ; breadcrumbs_reader: - ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t - Rose_tree.t + ; breadcrumbs_reader : + ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t Rose_tree.t list - * [`Catchup_scheduler | `Ledger_catchup of unit Ivar.t] ) - Strict_pipe.Reader.t } + * [ `Catchup_scheduler | `Ledger_catchup of unit Ivar.t ] ) + Strict_pipe.Reader.t + } let run_catchup ~network ~frontier = let catchup_job_reader, catchup_job_writer = @@ -880,12 +886,13 @@ let%test_module "Ledger_catchup tests" = let unprocessed_transition_cache = Transition_handler.Unprocessed_transition_cache.create ~logger in - run ~logger ~precomputed_values ~verifier ~trust_system ~network - ~frontier ~catchup_breadcrumbs_writer ~catchup_job_reader + run ~logger ~precomputed_values ~verifier ~trust_system ~network ~frontier + ~catchup_breadcrumbs_writer ~catchup_job_reader ~unprocessed_transition_cache ; - { cache= unprocessed_transition_cache - ; job_writer= catchup_job_writer - ; breadcrumbs_reader= catchup_breadcrumbs_reader } + { cache = unprocessed_transition_cache + ; job_writer = catchup_job_writer + ; breadcrumbs_reader = catchup_breadcrumbs_reader + } let run_catchup_with_target ~network ~frontier ~target_breadcrumb = let test = run_catchup ~network ~frontier in @@ -897,13 +904,13 @@ let%test_module "Ledger_catchup tests" = (downcast_breadcrumb target_breadcrumb) in Strict_pipe.Writer.write test.job_writer - (parent_hash, [Rose_tree.T (target_transition, [])]) ; + (parent_hash, [ Rose_tree.T (target_transition, []) ]) ; (`Test test, `Cached_transition target_transition) let test_successful_catchup ~my_net ~target_best_tip_path = let open Fake_network in let target_breadcrumb = List.last_exn target_best_tip_path in - let `Test {breadcrumbs_reader; _}, _ = + let `Test { breadcrumbs_reader; _ }, _ = run_catchup_with_target ~network:my_net.network ~frontier:my_net.state.frontier ~target_breadcrumb in @@ -936,7 +943,7 @@ let%test_module "Ledger_catchup tests" = (Transition_frontier.Breadcrumb.validated_transition breadcrumb_tree1) (Transition_frontier.Breadcrumb.validated_transition - breadcrumb_tree2) ) + breadcrumb_tree2)) in if not catchup_breadcrumbs_are_best_tip_path then failwith @@ -951,10 +958,11 @@ let%test_module "Ledger_catchup tests" = in gen ~precomputed_values ~verifier ~max_frontier_length [ fresh_peer - ; peer_with_branch ~frontier_branch_size:peer_branch_size ]) + ; peer_with_branch ~frontier_branch_size:peer_branch_size + ]) ~f:(fun network -> let open Fake_network in - let [my_net; peer_net] = network.peer_networks in + let [ my_net; peer_net ] = network.peer_networks in (* TODO: I don't think I'm testing this right... *) let target_best_tip_path = Transition_frontier.( @@ -962,22 +970,22 @@ let%test_module "Ledger_catchup tests" = (best_tip peer_net.state.frontier)) in Thread_safe.block_on_async_exn (fun () -> - test_successful_catchup ~my_net ~target_best_tip_path ) ) + test_successful_catchup ~my_net ~target_best_tip_path)) let%test_unit "catchup succeeds even if the parent transition is already \ in the frontier" = Quickcheck.test ~trials:1 Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length - [fresh_peer; peer_with_branch ~frontier_branch_size:1]) + [ fresh_peer; peer_with_branch ~frontier_branch_size:1 ]) ~f:(fun network -> let open Fake_network in - let [my_net; peer_net] = network.peer_networks in + let [ my_net; peer_net ] = network.peer_networks in let target_best_tip_path = - [Transition_frontier.best_tip peer_net.state.frontier] + [ Transition_frontier.best_tip peer_net.state.frontier ] in Thread_safe.block_on_async_exn (fun () -> - test_successful_catchup ~my_net ~target_best_tip_path ) ) + test_successful_catchup ~my_net ~target_best_tip_path)) let%test_unit "catchup fails if one of the parent transitions fail" = Quickcheck.test ~trials:1 @@ -988,7 +996,7 @@ let%test_module "Ledger_catchup tests" = ]) ~f:(fun network -> let open Fake_network in - let [my_net; peer_net] = network.peer_networks in + let [ my_net; peer_net ] = network.peer_networks in let target_breadcrumb = Transition_frontier.best_tip peer_net.state.frontier in @@ -1006,7 +1014,7 @@ let%test_module "Ledger_catchup tests" = (Frontier_base.Root_data.Historical.transition failing_root_data) in Thread_safe.block_on_async_exn (fun () -> - let `Test {cache; _}, `Cached_transition cached_transition = + let `Test { cache; _ }, `Cached_transition cached_transition = run_catchup_with_target ~network:my_net.network ~frontier:my_net.state.frontier ~target_breadcrumb in @@ -1025,9 +1033,8 @@ let%test_module "Ledger_catchup tests" = ~timeout_duration:(Block_time.Span.of_ms 10000L) (Ivar.read (Cache_lib.Cached.final_state cached_transition)) in - if not ([%equal: [`Failed | `Success of _]] result `Failed) then - failwith "expected ledger catchup to fail, but it succeeded" ) - ) + if not ([%equal: [ `Failed | `Success of _ ]] result `Failed) then + failwith "expected ledger catchup to fail, but it succeeded")) (* TODO: fix and re-enable *) (* diff --git a/src/lib/ledger_catchup/super_catchup.ml b/src/lib/ledger_catchup/super_catchup.ml index 36ad9a0004a..efeb42898d2 100644 --- a/src/lib/ledger_catchup/super_catchup.ml +++ b/src/lib/ledger_catchup/super_catchup.ml @@ -55,7 +55,7 @@ module G = Graph.Graphviz.Dot (struct end module E = struct - type t = {parent: Node.t; child: Node.t} + type t = { parent : Node.t; child : Node.t } let src t = t.parent @@ -70,13 +70,13 @@ module G = Graph.Graphviz.Dot (struct | None -> () | Some parent -> - f {child; parent} ) + f { child; parent }) - let graph_attributes (_ : t) = [`Rankdir `LeftToRight] + let graph_attributes (_ : t) = [ `Rankdir `LeftToRight ] let get_subgraph _ = None - let default_vertex_attributes _ = [`Shape `Circle] + let default_vertex_attributes _ = [ `Shape `Circle ] let vertex_attributes (v : Node.t) = let color = @@ -103,7 +103,7 @@ module G = Graph.Graphviz.Dot (struct (* orange *) 0xFF9933 in - [`Shape `Circle; `Style `Filled; `Fillcolor color] + [ `Shape `Circle; `Style `Filled; `Fillcolor color ] let vertex_name (node : V.t) = sprintf "\"%s\"" (State_hash.to_base58_check node.state_hash) @@ -128,8 +128,7 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier transition_with_hash |> External_transition.skip_time_received_validation `This_transition_was_not_received_via_gossip - |> External_transition.validate_genesis_protocol_state - ~genesis_state_hash + |> External_transition.validate_genesis_protocol_state ~genesis_state_hash >>= External_transition.validate_protocol_versions >>= External_transition.validate_delta_transition_chain in @@ -161,7 +160,7 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier Ok (`In_frontier hash) ) | Error (`Verifier_error error) -> [%log warn] - ~metadata:[("error", Error_json.error_to_yojson error)] + ~metadata:[ ("error", Error_json.error_to_yojson error) ] "verifier threw an error while verifying transiton queried during \ ledger catchup: $error" ; Deferred.Or_error.fail (Error.tag ~tag:"verifier threw an error" error) @@ -204,7 +203,8 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier ( External_transition.proposed_protocol_version_opt transition |> Option.value_map ~default:"" - ~f:Protocol_version.to_string ) ) ] ) ) + ~f:Protocol_version.to_string ) ) + ] ) ) in Error (Error.of_string "invalid protocol version") | Error `Mismatched_protocol_version -> @@ -222,8 +222,8 @@ let verify_transition ~logger ~consensus_constants ~trust_system ~frontier ( External_transition.current_protocol_version transition |> Protocol_version.to_string ) ) ; ( "daemon_current_protocol_version" - , `String Protocol_version.(get_current () |> to_string) ) ] - ) ) + , `String Protocol_version.(get_current () |> to_string) ) + ] ) ) in Error (Error.of_string "mismatched protocol version") | Error `Disconnected -> @@ -240,14 +240,15 @@ let find_map_ok ?how xs ~f = match%map choose [ choice (Ivar.read res) (fun _ -> `Finished) - ; choice (f x) (fun x -> `Ok x) ] + ; choice (f x) (fun x -> `Ok x) + ] with | `Finished -> () | `Ok (Ok x) -> Ivar.fill_if_empty res (Ok x) | `Ok (Error e) -> - errs := e :: !errs ) + errs := e :: !errs) in Ivar.fill_if_empty res (Error !errs)) ; Ivar.read res @@ -289,26 +290,26 @@ let try_to_connect_hash_chain t hashes ~frontier | None, Some b -> f (`Breadcrumb b) | None, None -> - Continue (Unsigned.UInt32.pred blockchain_length, hash :: acc) ) + Continue (Unsigned.UInt32.pred blockchain_length, hash :: acc)) ~finish:(fun (blockchain_length, acc) -> let module T = struct type t = State_hash.t list [@@deriving to_yojson] end in let all_hashes = List.map (Transition_frontier.all_breadcrumbs frontier) ~f:(fun b -> - Frontier_base.Breadcrumb.state_hash b ) + Frontier_base.Breadcrumb.state_hash b) in [%log debug] ~metadata: [ ("n", `Int (List.length acc)) ; ("hashes", T.to_yojson acc) - ; ("all_hashes", T.to_yojson all_hashes) ] + ; ("all_hashes", T.to_yojson all_hashes) + ] "Finishing download_state_hashes with $n $hashes. with $all_hashes" ; if - Unsigned.UInt32.compare blockchain_length blockchain_length_of_root - <= 0 + Unsigned.UInt32.compare blockchain_length blockchain_length_of_root <= 0 then Result.fail `No_common_ancestor - else Result.fail `Peer_moves_too_fast ) + else Result.fail `Peer_moves_too_fast) module Downloader = struct module Key = struct @@ -316,11 +317,7 @@ module Downloader = struct type t = State_hash.t * Length.t [@@deriving to_yojson, hash, sexp] let compare (h1, n1) (h2, n2) = - match Length.compare n1 n2 with - | 0 -> - State_hash.compare h1 h2 - | c -> - c + match Length.compare n1 n2 with 0 -> State_hash.compare h1 h2 | c -> c end include T @@ -333,7 +330,7 @@ module Downloader = struct (struct include Attempt_history.Attempt - let download : t = {failure_reason= `Download} + let download : t = { failure_reason = `Download } let worth_retrying (t : t) = match t.failure_reason with `Download -> true | _ -> false @@ -353,13 +350,13 @@ let with_lengths hs ~target_length = List.filter_mapi (Non_empty_list.to_list hs) ~f:(fun i x -> let open Option.Let_syntax in let%map x_len = Length.sub target_length (Length.of_int i) in - (x, x_len) ) + (x, x_len)) (* returns a list of state-hashes with the older ones at the front *) let download_state_hashes t ~logger ~trust_system ~network ~frontier ~target_hash ~target_length ~downloader ~blockchain_length_of_target_hash = [%log debug] - ~metadata:[("target_hash", State_hash.to_yojson target_hash)] + ~metadata:[ ("target_hash", State_hash.to_yojson target_hash) ] "Doing a catchup job with target $target_hash" ; let%bind peers = (* TODO: Find some preferred peers, e.g., whoever told us about this target_hash *) @@ -391,9 +388,7 @@ let download_state_hashes t ~logger ~trust_system ~network ~frontier Deferred.Result.return hs | None -> let error_msg = - sprintf - !"Peer %{sexp:Network_peer.Peer.t} sent us bad proof" - peer + sprintf !"Peer %{sexp:Network_peer.Peer.t} sent us bad proof" peer in let%bind.Deferred () = Trust_system.( @@ -413,7 +408,7 @@ let download_state_hashes t ~logger ~trust_system ~network ~frontier Downloader.mark_preferred downloader peer ~now ; Ok x | Error e -> - Error e ) ) + Error e )) let get_state_hashes = () @@ -429,7 +424,7 @@ module Initial_validate_batcher = struct create ~logger: (Logger.create - ~metadata:[("name", `String "initial_validate_batcher")] + ~metadata:[ ("name", `String "initial_validate_batcher") ] ()) ~how_to_add:`Insert ~max_weight_per_call:1000 ~weight:(fun _ -> 1) @@ -441,12 +436,12 @@ module Initial_validate_batcher = struct | 0 -> compare_envelope e1 e2 | c -> - c ) + c) (fun xs -> let input = function `Partially_validated x | `Init x -> x in List.map xs ~f:(fun x -> External_transition.Validation.wrap - (Envelope.Incoming.data (input x)) ) + (Envelope.Incoming.data (input x))) |> External_transition.validate_proofs ~verifier >>| function | Ok tvs -> @@ -454,7 +449,7 @@ module Initial_validate_batcher = struct | Error `Invalid_proof -> Ok (List.map xs ~f:(fun x -> `Potentially_invalid (input x))) | Error (`Verifier_error e) -> - Error e ) + Error e) let verify (t : _ t) = verify t end @@ -474,10 +469,10 @@ module Verify_work_batcher = struct in create ~logger: - (Logger.create ~metadata:[("name", `String "verify_work_batcher")] ()) + (Logger.create ~metadata:[ ("name", `String "verify_work_batcher") ] ()) ~weight:(fun (x : input) -> - List.fold ~init:0 (works x) ~f:(fun acc {proofs; _} -> - acc + One_or_two.length proofs ) ) + List.fold ~init:0 (works x) ~f:(fun acc { proofs; _ } -> + acc + One_or_two.length proofs)) ~max_weight_per_call:1000 ~how_to_add:`Insert ~compare_init:(fun e1 e2 -> let len (x : input) = @@ -487,7 +482,7 @@ module Verify_work_batcher = struct | 0 -> compare_envelope e1 e2 | c -> - c ) + c) (fun xs -> let input : _ -> input = function | `Partially_validated x | `Init x -> @@ -495,10 +490,10 @@ module Verify_work_batcher = struct in List.concat_map xs ~f:(fun x -> works (input x) - |> List.concat_map ~f:(fun {fee; prover; proofs} -> + |> List.concat_map ~f:(fun { fee; prover; proofs } -> let msg = Sok_message.create ~fee ~prover in One_or_two.to_list - (One_or_two.map proofs ~f:(fun p -> (p, msg))) ) ) + (One_or_two.map proofs ~f:(fun p -> (p, msg))))) |> Verifier.verify_transaction_snarks verifier >>| function | Ok true -> @@ -506,7 +501,7 @@ module Verify_work_batcher = struct | Ok false -> Ok (List.map xs ~f:(fun x -> `Potentially_invalid (input x))) | Error e -> - Error e ) + Error e) let verify (t : _ t) = verify t end @@ -520,7 +515,7 @@ let initial_validate ~(precomputed_values : Precomputed_values.t) ~logger let open Deferred.Let_syntax in match%bind Initial_validate_batcher.verify batcher transition with | Ok (Ok tv) -> - return (Ok {transition with data= tv}) + return (Ok { transition with data = tv }) | Ok (Error ()) -> let s = "proof failed to verify" in [%log warn] "%s" s ; @@ -536,7 +531,7 @@ let initial_validate ~(precomputed_values : Precomputed_values.t) ~logger Error (`Error (Error.of_string s)) | Error e -> [%log warn] - ~metadata:[("error", Error_json.error_to_yojson e)] + ~metadata:[ ("error", Error_json.error_to_yojson e) ] "verification of blockchain snark failed but it was our fault" ; return (Error `Couldn't_reach_verifier) in @@ -547,7 +542,8 @@ let initial_validate ~(precomputed_values : Precomputed_values.t) ~logger , `Float Core.Time.( Span.to_sec @@ diff verification_end_time verification_start_time) - ) ] + ) + ] "verification of proofs complete" ; verify_transition ~logger ~consensus_constants:precomputed_values.consensus_constants ~trust_system @@ -561,12 +557,12 @@ let check_invariant ~downloader t = [%test_eq: int] (Downloader.total_jobs downloader) (Hashtbl.count t.nodes ~f:(fun node -> - Node.State.Enum.equal (Node.State.enum node.state) To_download )) + Node.State.Enum.equal (Node.State.enum node.state) To_download)) let download s d ~key ~attempts = let logger = Logger.create () in [%log debug] - ~metadata:[("key", Downloader.Key.to_yojson key); ("caller", `String s)] + ~metadata:[ ("key", Downloader.Key.to_yojson key); ("caller", `String s) ] "Download download $key" ; Downloader.download d ~key ~attempts @@ -597,16 +593,16 @@ let create_node ~downloader t x = , Ivar.create () ) in let node = - {Node.state; state_hash= h; blockchain_length; attempts; parent; result} + { Node.state; state_hash = h; blockchain_length; attempts; parent; result } in upon (Ivar.read node.result) (fun _ -> - Downloader.cancel downloader (h, blockchain_length) ) ; + Downloader.cancel downloader (h, blockchain_length)) ; Hashtbl.incr t.states (Node.State.enum node.state) ; Hashtbl.set t.nodes ~key:h ~data:node ; ( try check_invariant ~downloader t with e -> [%log' debug t.logger] - ~metadata:[("exn", `String (Exn.to_string e))] + ~metadata:[ ("exn", `String (Exn.to_string e)) ] "create_node $exn" ) ; write_graph t ; node @@ -626,9 +622,9 @@ let pick ~constants y let forest_pick forest = - with_return (fun {return} -> + with_return (fun { return } -> List.iter forest ~f:(Rose_tree.iter ~f:return) ; - assert false ) + assert false) (* TODO: In the future, this could take over scheduling bootstraps too. *) let run ~logger ~trust_system ~verifier ~network ~frontier @@ -643,7 +639,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier ~(catchup_breadcrumbs_writer : ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t Rose_tree.t list - * [`Ledger_catchup of unit Ivar.t | `Catchup_scheduler] + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] , Strict_pipe.crash Strict_pipe.buffered , unit ) Strict_pipe.Writer.t) = @@ -674,15 +670,16 @@ let run ~logger ~trust_system ~verifier ~network ~frontier let f tree = let best = ref None in Rose_tree.iter tree - ~f:(fun (x : - ( External_transition.Initial_validated.t Envelope.Incoming.t - , _ ) - Cached.t) + ~f:(fun + (x : + ( External_transition.Initial_validated.t Envelope.Incoming.t + , _ ) + Cached.t) -> let x, _ = (Cached.peek x).data in best := combine !best - (Some (With_hash.map ~f:External_transition.protocol_state x)) ) ; + (Some (With_hash.map ~f:External_transition.protocol_state x))) ; !best in List.map trees ~f |> List.reduce ~f:combine |> Option.join @@ -702,15 +699,15 @@ let run ~logger ~trust_system ~verifier ~network ~frontier | Error _ -> `Some [] | Ok p -> ( - match - Transition_chain_verifier.verify ~target_hash:h - ~transition_chain_proof:p - with - | Some hs -> - let ks = with_lengths hs ~target_length:len in - `Some ks - | None -> - `Some [] ) ) + match + Transition_chain_verifier.verify ~target_hash:h + ~transition_chain_proof:p + with + | Some hs -> + let ks = with_lengths hs ~target_length:len in + `Some ks + | None -> + `Some [] ) ) in Downloader.create ~stop ~trust_system ~preferred:[] ~max_batch_size:5 ~get:(fun peer hs -> @@ -724,7 +721,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier in Mina_networking.get_transition_chain ~heartbeat_timeout:(Time_ns.Span.of_sec sec) - ~timeout:(Time.Span.of_sec sec) network peer (List.map hs ~f:fst) ) + ~timeout:(Time.Span.of_sec sec) network peer (List.map hs ~f:fst)) ~peers:(fun () -> Mina_networking.peers network) ~knowledge_context: (Broadcast_pipe.map best_tip_r @@ -732,16 +729,16 @@ let run ~logger ~trust_system ~verifier ~network ~frontier (Option.map ~f:(fun (x : _ With_hash.t) -> ( x.hash , Mina_state.Protocol_state.consensus_state x.data - |> Consensus.Data.Consensus_state.blockchain_length ) ))) + |> Consensus.Data.Consensus_state.blockchain_length )))) ~knowledge in check_invariant ~downloader t ; let () = Downloader.set_check_invariant (fun downloader -> - check_invariant ~downloader t ) + check_invariant ~downloader t) in every ~stop (Time.Span.of_sec 10.) (fun () -> - [%log debug] ~metadata:[("states", to_yojson t)] "Catchup states" ) ; + [%log debug] ~metadata:[ ("states", to_yojson t) ] "Catchup states") ; let initial_validation_batcher = Initial_validate_batcher.create ~verifier in let verify_work_batcher = Verify_work_batcher.create ~verifier in let set_state t node s = @@ -749,7 +746,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier try check_invariant ~downloader t with e -> [%log' debug t.logger] - ~metadata:[("exn", `String (Exn.to_string e))] + ~metadata:[ ("exn", `String (Exn.to_string e)) ] "set_state $exn" in let rec run_node (node : Node.t) = @@ -759,15 +756,15 @@ let run ~logger ~trust_system ~verifier ~network ~frontier ~metadata: [ ( "error" , Option.value_map ~default:`Null error ~f:(fun e -> - `String (Error.to_string_hum e) ) ) + `String (Error.to_string_hum e)) ) ; ("reason", Attempt_history.Attempt.reason_to_yojson failure_reason) ] ; - node.attempts - <- ( match sender with - | Envelope.Sender.Local -> - node.attempts - | Remote peer -> - Map.set node.attempts ~key:peer ~data:{failure_reason} ) ; + node.attempts <- + ( match sender with + | Envelope.Sender.Local -> + node.attempts + | Remote peer -> + Map.set node.attempts ~key:peer ~data:{ failure_reason } ) ; set_state t node (To_download (download "failed" downloader @@ -775,9 +772,9 @@ let run ~logger ~trust_system ~verifier ~network ~frontier ~attempts:node.attempts)) ; run_node node in - let step d : (_, [`Finished]) Deferred.Result.t = + let step d : (_, [ `Finished ]) Deferred.Result.t = (* TODO: See if the bail out is happening. *) - Deferred.any [(Ivar.read node.result >>| fun _ -> Error `Finished); d] + Deferred.any [ (Ivar.read node.result >>| fun _ -> Error `Finished); d ] in let open Deferred.Result.Let_syntax in let retry () = @@ -799,17 +796,18 @@ let run ~logger ~trust_system ~verifier ~network ~frontier (Hashtbl.count t.nodes ~f:(fun node -> Node.State.Enum.equal (Node.State.enum node.state) - To_download )) ) + To_download)) ) ; ("total_nodes", `Int (Hashtbl.length t.nodes)) ; ( "node_states" , let s = Node.State.Enum.Table.create () in Hashtbl.iter t.nodes ~f:(fun node -> - Hashtbl.incr s (Node.State.enum node.state) ) ; + Hashtbl.incr s (Node.State.enum node.state)) ; `List (List.map (Hashtbl.to_alist s) ~f:(fun (k, v) -> - `List [Node.State.Enum.to_yojson k; `Int v] )) ) + `List [ Node.State.Enum.to_yojson k; `Int v ])) ) ; ("total_jobs", `Int (Downloader.total_jobs downloader)) - ; ("downloader", Downloader.to_yojson downloader) ] + ; ("downloader", Downloader.to_yojson downloader) + ] "download finished $state_hash" ; node.attempts <- attempts ; set_state t node (To_initial_validate b) ; @@ -820,7 +818,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier ( initial_validate ~precomputed_values ~logger ~trust_system ~batcher:initial_validation_batcher ~frontier ~unprocessed_transition_cache - {b with data= {With_hash.data= b.data; hash= state_hash}} + { b with data = { With_hash.data = b.data; hash = state_hash } } |> Deferred.map ~f:(fun x -> Ok x) ) with | Error (`Error e) -> @@ -841,18 +839,18 @@ let run ~logger ~trust_system ~verifier ~network ~frontier match%bind step (* TODO: give the batch verifier a way to somehow throw away stuff if - this node gets removed from the tree. *) + this node gets removed from the tree. *) ( Verify_work_batcher.verify verify_work_batcher iv |> Deferred.map ~f:Result.return ) with | Error _e -> [%log' debug t.logger] "Couldn't reach verifier. Retrying" - ~metadata:[("state_hash", State_hash.to_yojson node.state_hash)] ; + ~metadata:[ ("state_hash", State_hash.to_yojson node.state_hash) ] ; (* No need to redownload in this case. We just wait a little and try again. *) retry () | Ok (Error ()) -> [%log' warn t.logger] "verification failed! redownloading" - ~metadata:[("state_hash", State_hash.to_yojson node.state_hash)] ; + ~metadata:[ ("state_hash", State_hash.to_yojson node.state_hash) ] ; ( match iv.sender with | Local -> () @@ -863,15 +861,15 @@ let run ~logger ~trust_system ~verifier ~network ~frontier |> don't_wait_for ) ; ignore ( Cached.invalidate_with_failure tv - : External_transition.Initial_validated.t Envelope.Incoming.t - ) ; + : External_transition.Initial_validated.t Envelope.Incoming.t ) ; failed ~sender:iv.sender `Verify | Ok (Ok av) -> let av = { av with - data= + data = External_transition.skip_frontier_dependencies_validation - `This_transition_belongs_to_a_detached_subtree av.data } + `This_transition_belongs_to_a_detached_subtree av.data + } in let av = Cached.transform tv ~f:(fun _ -> av) in set_state t node (Wait_for_parent av) ; @@ -931,20 +929,17 @@ let run ~logger ~trust_system ~verifier ~network ~frontier | `Parent_breadcrumb_not_found -> Error.tag (Error.of_string - (sprintf - "Parent breadcrumb with state_hash %s not found" + (sprintf "Parent breadcrumb with state_hash %s not found" (State_hash.to_string parent_hash))) ~tag:"parent breadcrumb not found" in failed ~error:e ~sender:av.sender `Build_breadcrumb | Ok breadcrumb -> - let%bind () = - Scheduler.yield () |> Deferred.map ~f:Result.return - in + let%bind () = Scheduler.yield () |> Deferred.map ~f:Result.return in let finished = Ivar.create () in let c = Cached.transform c ~f:(fun _ -> breadcrumb) in Strict_pipe.Writer.write catchup_breadcrumbs_writer - ( [Rose_tree.of_non_empty_list (Non_empty_list.singleton c)] + ( [ Rose_tree.of_non_empty_list (Non_empty_list.singleton c) ] , `Ledger_catchup finished ) ; let%bind () = (* The cached value is "freed" by the transition processor in [add_and_finalize]. *) @@ -996,9 +991,9 @@ let run ~logger ~trust_system ~verifier ~network ~frontier () | Remote peer -> Downloader.add_knowledge downloader peer - [(target_parent_hash, target_length)] ) ; + [ (target_parent_hash, target_length) ] ) ; let open Option.Let_syntax in - let%bind {proof= path, root; data} = Best_tip_lru.get h in + let%bind { proof = path, root; data } = Best_tip_lru.get h in let%bind p = Transition_chain_verifier.verify ~target_hash: @@ -1008,7 +1003,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier in Result.ok (try_to_connect_hash_chain t p ~frontier - ~blockchain_length_of_target_hash) ) + ~blockchain_length_of_target_hash)) with | None -> download_state_hashes t ~logger ~trust_system ~network ~frontier @@ -1022,7 +1017,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier | Error errors -> [%log debug] ~metadata: - [("target_hash", State_hash.to_yojson target_parent_hash)] + [ ("target_hash", State_hash.to_yojson target_parent_hash) ] "Failed to download state hashes for $target_hash" ; if contains_no_common_ancestor errors then List.iter forest ~f:(fun subtree -> @@ -1039,7 +1034,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier List.map children_transitions ~f:(fun cached_transition -> Cached.peek cached_transition |> Envelope.Incoming.data - |> External_transition.Initial_validated.state_hash ) + |> External_transition.Initial_validated.state_hash) in [%log error] ~metadata: @@ -1058,14 +1053,14 @@ let run ~logger ~trust_system ~verifier ~network ~frontier ; ( "protocol_state" , External_transition.Initial_validated.protocol_state transition - |> Mina_state.Protocol_state.value_to_yojson ) ] + |> Mina_state.Protocol_state.value_to_yojson ) + ] "Validation error: external transition with state hash \ $state_hash and its children were rejected for reason \ $reason" ; Mina_metrics.( Counter.inc Rejected_blocks.no_common_ancestor - (Float.of_int @@ (1 + List.length children_transitions))) - ) + (Float.of_int @@ (1 + List.length children_transitions)))) | Ok (root, state_hashes) -> [%log' debug t.logger] ~metadata: @@ -1073,24 +1068,25 @@ let run ~logger ~trust_system ~verifier ~network ~frontier ; ( "node_states" , let s = Node.State.Enum.Table.create () in Hashtbl.iter t.nodes ~f:(fun node -> - Hashtbl.incr s (Node.State.enum node.state) ) ; + Hashtbl.incr s (Node.State.enum node.state)) ; `List (List.map (Hashtbl.to_alist s) ~f:(fun (k, v) -> - `List [Node.State.Enum.to_yojson k; `Int v] )) ) ] + `List [ Node.State.Enum.to_yojson k; `Int v ])) ) + ] "before everything" ; let root = match root with | `Breadcrumb root -> (* If we hit this case we should probably remove the parent from the - table and prune, although in theory that should be handled by - the frontier calling [Full_catchup_tree.apply_diffs]. *) + table and prune, although in theory that should be handled by + the frontier calling [Full_catchup_tree.apply_diffs]. *) create_node ~downloader t (`Root root) | `Node node -> (* TODO: Log what is going on with transition frontier. *) node in [%log debug] - ~metadata:[("n", `Int (List.length state_hashes))] + ~metadata:[ ("n", `Int (List.length state_hashes)) ] "Adding $n nodes" ; List.iter forest ~f: @@ -1099,8 +1095,7 @@ let run ~logger ~trust_system ~verifier ~network ~frontier create_node ~downloader t (`Initial_validated c) in ignore - (run_node node : (unit, [`Finished]) Deferred.Result.t) - )) ; + (run_node node : (unit, [ `Finished ]) Deferred.Result.t))) ; ignore ( List.fold state_hashes ~init:(root.state_hash, root.blockchain_length) @@ -1111,8 +1106,8 @@ let run ~logger ~trust_system ~verifier ~network ~frontier create_node t ~downloader (`Hash (h, l, parent)) in don't_wait_for (run_node node >>| ignore) ) ; - (h, l) ) - : State_hash.t * Length.t )) ) + (h, l)) + : State_hash.t * Length.t ))) let run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier ~catchup_job_reader ~catchup_breadcrumbs_writer diff --git a/src/lib/ledger_proof/ledger_proof.ml b/src/lib/ledger_proof/ledger_proof.ml index d735e400699..c93e3b35350 100644 --- a/src/lib/ledger_proof/ledger_proof.ml +++ b/src/lib/ledger_proof/ledger_proof.ml @@ -25,14 +25,16 @@ module Prod : Ledger_proof_intf.S with type t = Transaction_snark.t = struct let underlying_proof = Transaction_snark.proof let create - ~statement:{ Transaction_snark.Statement.source - ; target - ; supply_increase - ; fee_excess - ; next_available_token_before - ; next_available_token_after - ; pending_coinbase_stack_state - ; sok_digest= () } ~sok_digest ~proof = + ~statement: + { Transaction_snark.Statement.source + ; target + ; supply_increase + ; fee_excess + ; next_available_token_before + ; next_available_token_after + ; pending_coinbase_stack_state + ; sok_digest = () + } ~sok_digest ~proof = Transaction_snark.create ~source ~target ~pending_coinbase_stack_state ~supply_increase ~fee_excess ~next_available_token_before ~next_available_token_after ~sok_digest ~proof diff --git a/src/lib/linked_tree/linked_tree.ml b/src/lib/linked_tree/linked_tree.ml index 5af0aae6907..d145f2697e3 100644 --- a/src/lib/linked_tree/linked_tree.ml +++ b/src/lib/linked_tree/linked_tree.ml @@ -14,7 +14,7 @@ module type S = sig -> key:Key.t -> length:Length.t -> data:'a - -> [`Ok | `Duplicate | `Too_old] + -> [ `Ok | `Duplicate | `Too_old ] val path : 'a t -> source:Key.t -> ancestor:Key.t -> 'a list option @@ -30,26 +30,28 @@ module Make (Key : Key) : S with module Key = Key = struct module Node = struct type 'a t = - { value: 'a - ; key: Key.t - ; length: Length.t - ; mutable parent: [`Node of 'a t | `Key of Key.t] - ; mutable children: 'a t list } + { value : 'a + ; key : Key.t + ; length : Length.t + ; mutable parent : [ `Node of 'a t | `Key of Key.t ] + ; mutable children : 'a t list + } end type 'a t = - { table: 'a Node.t Table.t - ; mutable roots: 'a Node.t list - ; max_size: int - ; mutable newest: Length.t option } + { table : 'a Node.t Table.t + ; mutable roots : 'a Node.t list + ; max_size : int + ; mutable newest : Length.t option + } let create ~max_size = - {table= Table.create (); newest= None; roots= []; max_size} + { table = Table.create (); newest = None; roots = []; max_size } let lookup_node (t : 'a t) k = Hashtbl.find t.table k (* TODO: May have to punish peers who ask for ancestor paths - that don't exist. *) + that don't exist. *) let path t ~source ~ancestor = let rec go acc (node : _ Node.t) = let acc = node.value :: acc in @@ -83,7 +85,7 @@ module Make (Key : Key) : S with module Key = Key = struct Hashtbl.remove t.table node.key ; List.iter node.children ~f:(fun child -> child.parent <- `Key node.key ; - go child ) ) + go child) ) in List.iter t.roots ~f:go @@ -102,21 +104,23 @@ module Make (Key : Key) : S with module Key = Key = struct match lookup_node t prev_key with | None -> let node = - { Node.value= data + { Node.value = data ; key - ; parent= `Key prev_key - ; children= [] - ; length } + ; parent = `Key prev_key + ; children = [] + ; length + } in t.roots <- node :: t.roots ; node | Some parent -> let node = - { Node.value= data + { Node.value = data ; key - ; parent= `Node parent - ; children= [] - ; length } + ; parent = `Node parent + ; children = [] + ; length + } in parent.children <- node :: parent.children ; node diff --git a/src/lib/logger/impl.ml b/src/lib/logger/impl.ml index 78510a902e6..ab4414967e1 100644 --- a/src/lib/logger/impl.ml +++ b/src/lib/logger/impl.ml @@ -3,7 +3,7 @@ open Async module Level = struct type t = Spam | Trace | Debug | Info | Warn | Error | Faulty_peer | Fatal - [@@deriving sexp, equal, compare, show {with_path= false}, enumerate] + [@@deriving sexp, equal, compare, show { with_path = false }, enumerate] let of_string str = try Ok (t_of_sexp (Sexp.Atom str)) @@ -25,10 +25,10 @@ module Time = struct end module Source = struct - type t = {module_: string [@key "module"]; location: string} + type t = { module_ : string [@key "module"]; location : string } [@@deriving yojson] - let create ~module_ ~location = {module_; location} + let create ~module_ ~location = { module_; location } end module Metadata = struct @@ -73,7 +73,7 @@ module Metadata = struct let extend (t : t) alist = List.fold_left alist ~init:t ~f:(fun acc (key, data) -> - String.Map.set acc ~key ~data ) + String.Map.set acc ~key ~data) let merge (a : t) (b : t) = extend a (String.Map.to_alist b) end @@ -86,12 +86,13 @@ let append_to_global_metadata l = module Message = struct type t = - { timestamp: Time.t - ; level: Level.t - ; source: Source.t option [@default None] - ; message: string - ; metadata: Metadata.t - ; event_id: Structured_log_events.id option [@default None] } + { timestamp : Time.t + ; level : Level.t + ; source : Source.t option [@default None] + ; message : string + ; metadata : Metadata.t + ; event_id : Structured_log_events.id option [@default None] + } [@@deriving yojson] let check_invariants (t : t) = @@ -103,7 +104,7 @@ module Message = struct | `Interpolate item -> Metadata.mem t.metadata item | `Raw _ -> - true ) + true) end module Processor = struct @@ -130,18 +131,18 @@ module Processor = struct if Level.compare msg.level Level.Spam = 0 then `Assoc (List.filter msg_json_fields ~f:(fun (k, _) -> - not (String.equal k "source") )) + not (String.equal k "source"))) else `Assoc msg_json_fields in Some (Yojson.Safe.to_string json) end module Pretty = struct - type t = {log_level: Level.t; config: Logproc_lib.Interpolator.config} + type t = { log_level : Level.t; config : Logproc_lib.Interpolator.config } - let create ~log_level ~config = {log_level; config} + let create ~log_level ~config = { log_level; config } - let process {log_level; config} (msg : Message.t) = + let process { log_level; config } (msg : Message.t) = let open Message in if Level.compare msg.level log_level < 0 then None else @@ -151,7 +152,7 @@ module Processor = struct | Error err -> Option.iter msg.source ~f:(fun source -> Core.printf "logproc interpolation error in %s: %s\n" - source.location err ) ; + source.location err) ; None | Ok (str, extra) -> let formatted_extra = @@ -164,8 +165,7 @@ module Processor = struct ~zone:Time.Zone.utc in Some - ( time ^ " [" ^ Level.show msg.level ^ "] " ^ str - ^ formatted_extra ) + (time ^ " [" ^ Level.show msg.level ^ "] " ^ str ^ formatted_extra) end let raw ?(log_level = Level.Spam) () = T ((module Raw), Raw.create ~log_level) @@ -200,28 +200,29 @@ module Transport = struct let log_perm = 0o644 type t = - { directory: string - ; log_filename: string - ; max_size: int - ; num_rotate: int - ; mutable curr_index: int - ; mutable primary_log: File_descr.t - ; mutable primary_log_size: int } + { directory : string + ; log_filename : string + ; max_size : int + ; num_rotate : int + ; mutable curr_index : int + ; mutable primary_log : File_descr.t + ; mutable primary_log_size : int + } let create ~directory ~max_size ~log_filename ~num_rotate = - if not (Result.is_ok (access directory [`Exists])) then + if not (Result.is_ok (access directory [ `Exists ])) then mkdir_p ~perm:0o755 directory ; - if not (Result.is_ok (access directory [`Exists; `Read; `Write])) then + if not (Result.is_ok (access directory [ `Exists; `Read; `Write ])) then failwithf "cannot create log files: read/write permissions required on %s" directory () ; let primary_log_loc = Filename.concat directory log_filename in let primary_log_size, mode = - if Result.is_ok (access primary_log_loc [`Exists; `Read; `Write]) + if Result.is_ok (access primary_log_loc [ `Exists; `Read; `Write ]) then let log_stats = stat primary_log_loc in - (Int64.to_int_exn log_stats.st_size, [O_RDWR; O_APPEND]) - else (0, [O_RDWR; O_CREAT]) + (Int64.to_int_exn log_stats.st_size, [ O_RDWR; O_APPEND ]) + else (0, [ O_RDWR; O_CREAT ]) in let primary_log = openfile ~perm:log_perm ~mode primary_log_loc in { directory @@ -230,7 +231,8 @@ module Transport = struct ; primary_log ; primary_log_size ; num_rotate - ; curr_index= 0 } + ; curr_index = 0 + } let rotate t = let primary_log_loc = Filename.concat t.directory t.log_filename in @@ -244,8 +246,8 @@ module Transport = struct in close t.primary_log ; rename ~src:primary_log_loc ~dst:secondary_log_loc ; - t.primary_log - <- openfile ~perm:log_perm ~mode:[O_RDWR; O_CREAT] primary_log_loc ; + t.primary_log <- + openfile ~perm:log_perm ~mode:[ O_RDWR; O_CREAT ] primary_log_loc ; t.primary_log_size <- 0 let transport t str = @@ -266,7 +268,7 @@ module Transport = struct end module Consumer_registry = struct - type consumer = {processor: Processor.t; transport: Transport.t} + type consumer = { processor : Processor.t; transport : Transport.t } module Consumer_tbl = Hashtbl.Make (String) @@ -277,20 +279,22 @@ module Consumer_registry = struct type id = string let register ~(id : id) ~processor ~transport = - Consumer_tbl.add_multi t ~key:id ~data:{processor; transport} + Consumer_tbl.add_multi t ~key:id ~data:{ processor; transport } let broadcast_log_message ~id msg = Hashtbl.find_and_call t id ~if_found:(fun consumers -> List.iter consumers - ~f:(fun { processor= Processor.T ((module Processor), processor) - ; transport= Transport.T ((module Transport), transport) } + ~f:(fun + { processor = Processor.T ((module Processor), processor) + ; transport = Transport.T ((module Transport), transport) + } -> match Processor.process processor msg with | Some str -> Transport.transport transport str | None -> - () ) ) + ())) ~if_not_found:(fun _ -> let (Processor.T ((module Processor), processor)) = Processor.raw () in let (Transport.T ((module Transport), transport)) = @@ -300,13 +304,13 @@ module Consumer_registry = struct | Some str -> Transport.transport transport str | None -> - () ) + ()) end [%%versioned module Stable = struct module V1 = struct - type t = {null: bool; metadata: Metadata.Stable.V1.t; id: string} + type t = { null : bool; metadata : Metadata.Stable.V1.t; id : string } let to_latest = Fn.id end @@ -317,13 +321,14 @@ let metadata t = t.metadata let create ?(metadata = []) ?(id = "default") () = let pid = lazy (Unix.getpid () |> Pid.to_int) in let metadata' = ("pid", `Int (Lazy.force pid)) :: metadata in - {null= false; metadata= Metadata.extend Metadata.empty metadata'; id} + { null = false; metadata = Metadata.extend Metadata.empty metadata'; id } -let null () = {null= true; metadata= Metadata.empty; id= "default"} +let null () = { null = true; metadata = Metadata.empty; id = "default" } -let extend t metadata = {t with metadata= Metadata.extend t.metadata metadata} +let extend t metadata = + { t with metadata = Metadata.extend t.metadata metadata } -let change_id {null; metadata; id= _} ~id = {null; metadata; id} +let change_id { null; metadata; id = _ } ~id = { null; metadata; id } let make_message (t : t) ~level ~module_ ~location ~metadata ~message ~event_id = @@ -337,17 +342,18 @@ let make_message (t : t) ~level ~module_ ~location ~metadata ~message ~event_id ("$duplicated_keys", `List (List.map ~f:(fun (s, _) -> `String s) dups)) :: List.dedup_and_sort m ~compare:key_cmp in - { Message.timestamp= Time.now () + { Message.timestamp = Time.now () ; level - ; source= Some (Source.create ~module_ ~location) + ; source = Some (Source.create ~module_ ~location) ; message - ; metadata= + ; metadata = Metadata.extend (Metadata.merge (Metadata.of_alist_exn global_metadata') t.metadata) metadata - ; event_id } + ; event_id + } -let raw ({id; _} as t) msg = +let raw ({ id; _ } as t) msg = if t.null then () else if Message.check_invariants msg then Consumer_registry.broadcast_log_message ~id msg @@ -356,7 +362,7 @@ let raw ({id; _} as t) msg = let add_tags_to_metadata metadata tags = Option.value_map tags ~default:metadata ~f:(fun tags -> let tags_item = ("tags", `List (List.map tags ~f:Tags.to_yojson)) in - tags_item :: metadata ) + tags_item :: metadata) let log t ~level ~module_ ~location ?tags ?(metadata = []) ?event_id fmt = let metadata = add_tags_to_metadata metadata tags in diff --git a/src/lib/logger/impl.mli b/src/lib/logger/impl.mli index d78424ee03e..1271b72169d 100644 --- a/src/lib/logger/impl.mli +++ b/src/lib/logger/impl.mli @@ -12,7 +12,8 @@ type t = Stable.V1.t module Level : sig type t = Spam | Trace | Debug | Info | Warn | Error | Faulty_peer | Fatal - [@@deriving sexp, equal, compare, yojson, show {with_path= false}, enumerate] + [@@deriving + sexp, equal, compare, yojson, show { with_path = false }, enumerate] val of_string : string -> (t, string) result end @@ -26,7 +27,7 @@ module Time : sig end module Source : sig - type t = {module_: string [@key "module"]; location: string} + type t = { module_ : string [@key "module"]; location : string } [@@deriving yojson] val create : module_:string -> location:string -> t @@ -49,12 +50,13 @@ val metadata : t -> Metadata.t module Message : sig type t = - { timestamp: Time.t - ; level: Level.t - ; source: Source.t option - ; message: string - ; metadata: Metadata.t - ; event_id: Structured_log_events.id option } + { timestamp : Time.t + ; level : Level.t + ; source : Source.t option + ; message : string + ; metadata : Metadata.t + ; event_id : Structured_log_events.id option + } [@@deriving yojson] end @@ -105,8 +107,7 @@ end module Consumer_registry : sig type id = string - val register : - id:id -> processor:Processor.t -> transport:Transport.t -> unit + val register : id:id -> processor:Processor.t -> transport:Transport.t -> unit end type 'a log_function = diff --git a/src/lib/logger/test.ml b/src/lib/logger/test.ml index 5438eb83779..fcc71341daf 100644 --- a/src/lib/logger/test.ml +++ b/src/lib/logger/test.ml @@ -8,7 +8,7 @@ let%test_unit "Logger.Dumb_logrotate rotates logs when expected" = let directory = Filename.temp_dir ~in_dir:"/tmp" "coda_spun_test" "" in let log_filename = "mina.log" in let exists name = - Result.is_ok (Unix.access (Filename.concat directory name) [`Exists]) + Result.is_ok (Unix.access (Filename.concat directory name) [ `Exists ]) in let get_size name = Int64.to_int_exn (Unix.stat (Filename.concat directory name)).st_size diff --git a/src/lib/logproc_lib/filter.ml b/src/lib/logproc_lib/filter.ml index 26cb5633c4a..eaa7a03bb09 100644 --- a/src/lib/logproc_lib/filter.ml +++ b/src/lib/logproc_lib/filter.ml @@ -144,14 +144,14 @@ module Parser = struct maybe (op <* commit) >>= function Some f -> p >>| f l | None -> return l let bool = - choice [string "true" *> return true; string "false" *> return false] + choice [ string "true" *> return true; string "false" *> return false ] "bool" let int = take_while1 is_numeric >>| int_of_string "int" let text_escape = - choice (List.map ~f:char ['"'; '\\'; '/'; 'b'; 'n'; 'r'; 't']) - >>| fun c -> String.of_char_list ['\\'; c] + choice (List.map ~f:char [ '"'; '\\'; '/'; 'b'; 'n'; 'r'; 't' ]) + >>| fun c -> String.of_char_list [ '\\'; c ] let text_component = char '\\' *> text_escape <|> (text_char >>| String.of_char) @@ -170,7 +170,7 @@ module Parser = struct let str = pad (char '"') text "str" let literal = - choice [bool >>| Ast.bool; int >>| Ast.int; str >>| Ast.string] + choice [ bool >>| Ast.bool; int >>| Ast.int; str >>| Ast.string ] "literal" let value_exp = @@ -179,13 +179,15 @@ module Parser = struct choice [ literal >>| Ast.value_lit ; wrap brackets (pad ws (sep_by (pad ws (char ',')) value_exp)) - >>| Ast.value_list ] + >>| Ast.value_list + ] in let rec access parent = choice [ char '.' *> (ident <|> wrap brackets (pad ws str)) >>| Ast.value_access_string parent - ; wrap brackets (pad ws int) >>| Ast.value_access_int parent ] + ; wrap brackets (pad ws int) >>| Ast.value_access_int parent + ] >>= fun parent' -> access parent' <|> return parent' in maybe base @@ -193,7 +195,7 @@ module Parser = struct | Some base -> access base <|> return base | None -> - access Ast.value_this ) + access Ast.value_this) "value_exp" let cmp_exp = @@ -203,7 +205,8 @@ module Parser = struct choice [ lift2 List.cons (string {|\/|}) inner ; char '/' *> return [] - ; lift2 List.cons (take 1) inner ] ) + ; lift2 List.cons (take 1) inner + ]) >>| String.concat ~sep:"" in char '/' *> commit *> inner @@ -216,7 +219,8 @@ module Parser = struct [ pad ws (stringc "==") *> value_exp >>| Fn.flip Ast.cmp_eq ; pad ws (stringc "!=") *> value_exp >>| Fn.flip Ast.cmp_neq ; pad ws (stringc "in") *> value_exp >>| Fn.flip Ast.cmp_in - ; pad ws (stringc "match") *> regex >>| Fn.flip Ast.cmp_match ]) + ; pad ws (stringc "match") *> regex >>| Fn.flip Ast.cmp_match + ]) <* commit "cmp_exp" let bool_exp = @@ -226,14 +230,16 @@ module Parser = struct [ wrap parens (pad ws bool_exp) ; bool >>| Ast.bool_lit ; char '!' *> ws *> bool_exp >>| Ast.bool_not - ; cmp_exp >>| Ast.bool_cmp ] + ; cmp_exp >>| Ast.bool_cmp + ] in let infix_op = choice [ stringc "&&" *> return Ast.bool_and - ; stringc "||" *> return Ast.bool_or ] + ; stringc "||" *> return Ast.bool_or + ] in - infix main (pad ws infix_op) ) + infix main (pad ws infix_op)) "bool_exp" let parser = ws *> bool_exp <* ws <* end_of_input @@ -247,7 +253,7 @@ module Parser = struct | _ -> err in - sprintf "invalid syntax (%s)" msg ) + sprintf "invalid syntax (%s)" msg) end module Interpreter = struct @@ -313,15 +319,11 @@ module Interpreter = struct | `List items -> List.exists items ~f:(Yojson.Safe.equal scalar) | _ -> - (* TODO: filter warnings *) false ) + (* TODO: filter warnings *) false) |> Option.value ~default:false | Cmp_match (x, regex) -> Option.map (interpret_value_exp json x) ~f:(fun value -> - match value with - | `String str -> - Re2.matches regex str - | _ -> - false ) + match value with `String str -> Re2.matches regex str | _ -> false) |> Option.value ~default:false let rec interpret_bool_exp json = function diff --git a/src/lib/logproc_lib/interpolator.ml b/src/lib/logproc_lib/interpolator.ml index e913bc424a3..94090583d45 100644 --- a/src/lib/logproc_lib/interpolator.ml +++ b/src/lib/logproc_lib/interpolator.ml @@ -2,18 +2,19 @@ open Core_kernel type mode = Hidden | Inline | After -type config = {mode: mode; max_interpolation_length: int; pretty_print: bool} +type config = + { mode : mode; max_interpolation_length : int; pretty_print : bool } let rec result_fold_left ls ~init ~f = match ls with | [] -> Ok init | h :: t -> ( - match f init h with - | Ok init' -> - result_fold_left t ~init:init' ~f - | Error err -> - Error err ) + match f init h with + | Ok init' -> + result_fold_left t ~init:init' ~f + | Error err -> + Error err ) let parser = let open Angstrom in @@ -36,7 +37,8 @@ let parser = many1 (choice [ (take_while1 (not_f (Char.equal '$')) >>| fun x -> `Raw x) - ; (interpolation >>| fun x -> `Interpolate x) ]) + ; (interpolation >>| fun x -> `Interpolate x) + ]) in message <* end_of_input @@ -53,17 +55,16 @@ let render ~max_interpolation_length ~format_json metadata items = | `Interpolate id -> let%map json = String.Map.find metadata id - |> Result.of_option - ~error:(sprintf "bad interpolation for %s" id) + |> Result.of_option ~error:(sprintf "bad interpolation for %s" id) in let str = format_json json in if String.length str > max_interpolation_length then (msg_acc ^ "$" ^ id, (id, str) :: extra_acc) - else (msg_acc ^ str, extra_acc) ) + else (msg_acc ^ str, extra_acc)) in (msg, List.rev extra) -let interpolate {mode; max_interpolation_length; pretty_print} msg metadata = +let interpolate { mode; max_interpolation_length; pretty_print } msg metadata = let open Result.Let_syntax in let format_json = if pretty_print then Yojson.Safe.pretty_to_string @@ -79,4 +80,4 @@ let interpolate {mode; max_interpolation_length; pretty_print} msg metadata = Ok ( msg , List.map (String.Map.to_alist metadata) ~f:(fun (k, v) -> - (k, format_json v) ) ) + (k, format_json v)) ) diff --git a/src/lib/marlin_plonk_bindings/bigint_256/test/test.ml b/src/lib/marlin_plonk_bindings/bigint_256/test/test.ml index 6b72372c1f1..0b3b6db8f60 100644 --- a/src/lib/marlin_plonk_bindings/bigint_256/test/test.ml +++ b/src/lib/marlin_plonk_bindings/bigint_256/test/test.ml @@ -16,12 +16,9 @@ let () = comparisons x x ; comparisons x y ; comparisons y x ; - Format.printf "compare(%s, %s)=%i@." (to_string x) (to_string x) - (compare x x) ; - Format.printf "compare(%s, %s)=%i@." (to_string x) (to_string y) - (compare x y) ; - Format.printf "compare(%s, %s)=%i@." (to_string y) (to_string x) - (compare y x) ; + Format.printf "compare(%s, %s)=%i@." (to_string x) (to_string x) (compare x x) ; + Format.printf "compare(%s, %s)=%i@." (to_string x) (to_string y) (compare x y) ; + Format.printf "compare(%s, %s)=%i@." (to_string y) (to_string x) (compare y x) ; let z = div x y in Format.printf "%s@." (to_string z) ; Format.printf "test_bit(1, 0)=%b@." (test_bit (of_decimal_string "1") 0) diff --git a/src/lib/marlin_plonk_bindings/pasta_fp/marlin_plonk_bindings_pasta_fp.ml b/src/lib/marlin_plonk_bindings/pasta_fp/marlin_plonk_bindings_pasta_fp.ml index 7549d463f6c..c9dbe27af0f 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fp/marlin_plonk_bindings_pasta_fp.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fp/marlin_plonk_bindings_pasta_fp.ml @@ -2,8 +2,7 @@ type t external size_in_bits : unit -> int = "caml_pasta_fp_size_in_bits" -external size : - unit -> Marlin_plonk_bindings_bigint_256.t +external size : unit -> Marlin_plonk_bindings_bigint_256.t = "caml_pasta_fp_size" external add : t -> t -> t = "caml_pasta_fp_add" @@ -50,16 +49,13 @@ external random : unit -> t = "caml_pasta_fp_random" external rng : int -> t = "caml_pasta_fp_rng" -external to_bigint : - t -> Marlin_plonk_bindings_bigint_256.t +external to_bigint : t -> Marlin_plonk_bindings_bigint_256.t = "caml_pasta_fp_to_bigint" -external of_bigint : - Marlin_plonk_bindings_bigint_256.t -> t +external of_bigint : Marlin_plonk_bindings_bigint_256.t -> t = "caml_pasta_fp_of_bigint" -external two_adic_root_of_unity : - unit -> t +external two_adic_root_of_unity : unit -> t = "caml_pasta_fp_two_adic_root_of_unity" external domain_generator : int -> t = "caml_pasta_fp_domain_generator" diff --git a/src/lib/marlin_plonk_bindings/pasta_fp_index/marlin_plonk_bindings_pasta_fp_index.ml b/src/lib/marlin_plonk_bindings/pasta_fp_index/marlin_plonk_bindings_pasta_fp_index.ml index 844b1a29ca4..c2281e1b5e3 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fp_index/marlin_plonk_bindings_pasta_fp_index.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fp_index/marlin_plonk_bindings_pasta_fp_index.ml @@ -5,16 +5,13 @@ module Gate_vector = struct external create : unit -> t = "caml_pasta_fp_plonk_gate_vector_create" - external add : - t -> Marlin_plonk_bindings_pasta_fp.t Plonk_gate.t -> unit + external add : t -> Marlin_plonk_bindings_pasta_fp.t Plonk_gate.t -> unit = "caml_pasta_fp_plonk_gate_vector_add" - external get : - t -> int -> Marlin_plonk_bindings_pasta_fp.t Plonk_gate.t + external get : t -> int -> Marlin_plonk_bindings_pasta_fp.t Plonk_gate.t = "caml_pasta_fp_plonk_gate_vector_get" - external wrap : - t -> Plonk_gate.Wire.t -> Plonk_gate.Wire.t -> unit + external wrap : t -> Plonk_gate.Wire.t -> Plonk_gate.Wire.t -> unit = "caml_pasta_fp_plonk_gate_vector_wrap" end @@ -38,6 +35,5 @@ external read : ?offset:int -> Marlin_plonk_bindings_pasta_fp_urs.t -> string -> t = "caml_pasta_fp_plonk_index_read" -external write : - ?append:bool -> t -> string -> unit +external write : ?append:bool -> t -> string -> unit = "caml_pasta_fp_plonk_index_write" diff --git a/src/lib/marlin_plonk_bindings/pasta_fp_plonk_oracles/marlin_plonk_bindings_pasta_fp_oracles.ml b/src/lib/marlin_plonk_bindings/pasta_fp_plonk_oracles/marlin_plonk_bindings_pasta_fp_oracles.ml index 117c365ee15..d0f0876ae82 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fp_plonk_oracles/marlin_plonk_bindings_pasta_fp_oracles.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fp_plonk_oracles/marlin_plonk_bindings_pasta_fp_oracles.ml @@ -6,8 +6,7 @@ external create : Marlin_plonk_bindings_pasta_fp_urs.Poly_comm.t array -> Marlin_plonk_bindings_pasta_fp_verifier_index.t -> Marlin_plonk_bindings_pasta_fp_proof.t - -> t - = "caml_pasta_fp_plonk_oracles_create" + -> t = "caml_pasta_fp_plonk_oracles_create" external dummy : unit -> t = "caml_pasta_fp_plonk_oracles_dummy" diff --git a/src/lib/marlin_plonk_bindings/pasta_fp_plonk_verifier_index/marlin_plonk_bindings_pasta_fp_verifier_index.ml b/src/lib/marlin_plonk_bindings/pasta_fp_plonk_verifier_index/marlin_plonk_bindings_pasta_fp_verifier_index.ml index fe0a39257f9..cec402904ad 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fp_plonk_verifier_index/marlin_plonk_bindings_pasta_fp_verifier_index.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fp_plonk_verifier_index/marlin_plonk_bindings_pasta_fp_verifier_index.ml @@ -6,16 +6,14 @@ type t = , Marlin_plonk_bindings_pasta_fp_urs.Poly_comm.t ) Plonk_verifier_index.t -external create : - Marlin_plonk_bindings_pasta_fp_index.t -> t +external create : Marlin_plonk_bindings_pasta_fp_index.t -> t = "caml_pasta_fp_plonk_verifier_index_create" external read : ?offset:int -> Marlin_plonk_bindings_pasta_fp_urs.t -> string -> t = "caml_pasta_fp_plonk_verifier_index_read" -external write : - ?append:bool -> t -> string -> unit +external write : ?append:bool -> t -> string -> unit = "caml_pasta_fp_plonk_verifier_index_write" external shifts : diff --git a/src/lib/marlin_plonk_bindings/pasta_fp_proof/marlin_plonk_bindings_pasta_fp_proof.ml b/src/lib/marlin_plonk_bindings/pasta_fp_proof/marlin_plonk_bindings_pasta_fp_proof.ml index 2f7d8f48f54..0ac64fad9e0 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fp_proof/marlin_plonk_bindings_pasta_fp_proof.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fp_proof/marlin_plonk_bindings_pasta_fp_proof.ml @@ -13,22 +13,19 @@ external create : -> auxiliary_input:Marlin_plonk_bindings_pasta_fp_vector.t -> prev_challenges:Marlin_plonk_bindings_pasta_fp.t array -> prev_sgs:Marlin_plonk_bindings_pasta_vesta.Affine.t array - -> t - = "caml_pasta_fp_plonk_proof_create" + -> t = "caml_pasta_fp_plonk_proof_create" external verify : Marlin_plonk_bindings_pasta_fp_urs.Poly_comm.t array -> Marlin_plonk_bindings_pasta_fp_verifier_index.t -> t - -> bool - = "caml_pasta_fp_plonk_proof_verify" + -> bool = "caml_pasta_fp_plonk_proof_verify" external batch_verify : Marlin_plonk_bindings_pasta_fp_urs.Poly_comm.t array array -> Marlin_plonk_bindings_pasta_fp_verifier_index.t array -> t array - -> bool - = "caml_pasta_fp_plonk_proof_batch_verify" + -> bool = "caml_pasta_fp_plonk_proof_batch_verify" external dummy : unit -> t = "caml_pasta_fp_plonk_proof_dummy" diff --git a/src/lib/marlin_plonk_bindings/pasta_fp_urs/marlin_plonk_bindings_pasta_fp_urs.ml b/src/lib/marlin_plonk_bindings/pasta_fp_urs/marlin_plonk_bindings_pasta_fp_urs.ml index 125a7ff3c17..98ff13caee7 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fp_urs/marlin_plonk_bindings_pasta_fp_urs.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fp_urs/marlin_plonk_bindings_pasta_fp_urs.ml @@ -8,14 +8,11 @@ end external create : int -> t = "caml_pasta_fp_urs_create" -external write : - ?append:bool -> t -> string -> unit - = "caml_pasta_fp_urs_write" +external write : ?append:bool -> t -> string -> unit = "caml_pasta_fp_urs_write" external read : ?offset:int -> string -> t option = "caml_pasta_fp_urs_read" -external lagrange_commitment : - t -> domain_size:int -> int -> Poly_comm.t +external lagrange_commitment : t -> domain_size:int -> int -> Poly_comm.t = "caml_pasta_fp_urs_lagrange_commitment" external commit_evaluations : @@ -30,9 +27,7 @@ external batch_accumulator_check : t -> Marlin_plonk_bindings_pasta_vesta.Affine.t array -> Marlin_plonk_bindings_pasta_fp.t array - -> bool - = "caml_pasta_fp_urs_batch_accumulator_check" + -> bool = "caml_pasta_fp_urs_batch_accumulator_check" -external h : - t -> Marlin_plonk_bindings_pasta_vesta.Affine.t +external h : t -> Marlin_plonk_bindings_pasta_vesta.Affine.t = "caml_pasta_fp_urs_h" diff --git a/src/lib/marlin_plonk_bindings/pasta_fp_urs/test/test.ml b/src/lib/marlin_plonk_bindings/pasta_fp_urs/test/test.ml index 2c1e2fb5e56..adbd8cd365f 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fp_urs/test/test.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fp_urs/test/test.ml @@ -5,7 +5,7 @@ let () = let urs = create 17 in let _lgr = lagrange_commitment urs ~domain_size:255 1 in let _evals = - commit_evaluations urs ~domain_size:12 [|Fp.of_int 15; Fp.of_int 35|] + commit_evaluations urs ~domain_size:12 [| Fp.of_int 15; Fp.of_int 35 |] in Format.printf "batch_accumulator_check=%b@." (batch_accumulator_check urs [||] [||]) ; @@ -23,7 +23,7 @@ let () = Sys.remove "./test_urs_17" ; let _lgr = lagrange_commitment urs2 ~domain_size:255 1 in let _evals = - commit_evaluations urs2 ~domain_size:12 [|Fp.of_int 15; Fp.of_int 35|] + commit_evaluations urs2 ~domain_size:12 [| Fp.of_int 15; Fp.of_int 35 |] in Format.printf "batch_accumulator_check=%b@." (batch_accumulator_check urs2 [||] [||]) ; diff --git a/src/lib/marlin_plonk_bindings/pasta_fq/marlin_plonk_bindings_pasta_fq.ml b/src/lib/marlin_plonk_bindings/pasta_fq/marlin_plonk_bindings_pasta_fq.ml index c6d506ad708..f1d984d3492 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fq/marlin_plonk_bindings_pasta_fq.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fq/marlin_plonk_bindings_pasta_fq.ml @@ -2,8 +2,7 @@ type t external size_in_bits : unit -> int = "caml_pasta_fq_size_in_bits" -external size : - unit -> Marlin_plonk_bindings_bigint_256.t +external size : unit -> Marlin_plonk_bindings_bigint_256.t = "caml_pasta_fq_size" external add : t -> t -> t = "caml_pasta_fq_add" @@ -50,16 +49,13 @@ external random : unit -> t = "caml_pasta_fq_random" external rng : int -> t = "caml_pasta_fq_rng" -external to_bigint : - t -> Marlin_plonk_bindings_bigint_256.t +external to_bigint : t -> Marlin_plonk_bindings_bigint_256.t = "caml_pasta_fq_to_bigint" -external of_bigint : - Marlin_plonk_bindings_bigint_256.t -> t +external of_bigint : Marlin_plonk_bindings_bigint_256.t -> t = "caml_pasta_fq_of_bigint" -external two_adic_root_of_unity : - unit -> t +external two_adic_root_of_unity : unit -> t = "caml_pasta_fq_two_adic_root_of_unity" external domain_generator : int -> t = "caml_pasta_fq_domain_generator" diff --git a/src/lib/marlin_plonk_bindings/pasta_fq_index/marlin_plonk_bindings_pasta_fq_index.ml b/src/lib/marlin_plonk_bindings/pasta_fq_index/marlin_plonk_bindings_pasta_fq_index.ml index a84ab6083e5..ea37c798a57 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fq_index/marlin_plonk_bindings_pasta_fq_index.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fq_index/marlin_plonk_bindings_pasta_fq_index.ml @@ -5,16 +5,13 @@ module Gate_vector = struct external create : unit -> t = "caml_pasta_fq_plonk_gate_vector_create" - external add : - t -> Marlin_plonk_bindings_pasta_fq.t Plonk_gate.t -> unit + external add : t -> Marlin_plonk_bindings_pasta_fq.t Plonk_gate.t -> unit = "caml_pasta_fq_plonk_gate_vector_add" - external get : - t -> int -> Marlin_plonk_bindings_pasta_fq.t Plonk_gate.t + external get : t -> int -> Marlin_plonk_bindings_pasta_fq.t Plonk_gate.t = "caml_pasta_fq_plonk_gate_vector_get" - external wrap : - t -> Plonk_gate.Wire.t -> Plonk_gate.Wire.t -> unit + external wrap : t -> Plonk_gate.Wire.t -> Plonk_gate.Wire.t -> unit = "caml_pasta_fq_plonk_gate_vector_wrap" end @@ -38,6 +35,5 @@ external read : ?offset:int -> Marlin_plonk_bindings_pasta_fq_urs.t -> string -> t = "caml_pasta_fq_plonk_index_read" -external write : - ?append:bool -> t -> string -> unit +external write : ?append:bool -> t -> string -> unit = "caml_pasta_fq_plonk_index_write" diff --git a/src/lib/marlin_plonk_bindings/pasta_fq_plonk_oracles/marlin_plonk_bindings_pasta_fq_oracles.ml b/src/lib/marlin_plonk_bindings/pasta_fq_plonk_oracles/marlin_plonk_bindings_pasta_fq_oracles.ml index 1a512f10bfe..35164621fce 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fq_plonk_oracles/marlin_plonk_bindings_pasta_fq_oracles.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fq_plonk_oracles/marlin_plonk_bindings_pasta_fq_oracles.ml @@ -6,8 +6,7 @@ external create : Marlin_plonk_bindings_pasta_fq_urs.Poly_comm.t array -> Marlin_plonk_bindings_pasta_fq_verifier_index.t -> Marlin_plonk_bindings_pasta_fq_proof.t - -> t - = "caml_pasta_fq_plonk_oracles_create" + -> t = "caml_pasta_fq_plonk_oracles_create" external dummy : unit -> t = "caml_pasta_fq_plonk_oracles_dummy" diff --git a/src/lib/marlin_plonk_bindings/pasta_fq_plonk_verifier_index/marlin_plonk_bindings_pasta_fq_verifier_index.ml b/src/lib/marlin_plonk_bindings/pasta_fq_plonk_verifier_index/marlin_plonk_bindings_pasta_fq_verifier_index.ml index a784616aa0e..92c40704a00 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fq_plonk_verifier_index/marlin_plonk_bindings_pasta_fq_verifier_index.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fq_plonk_verifier_index/marlin_plonk_bindings_pasta_fq_verifier_index.ml @@ -6,16 +6,14 @@ type t = , Marlin_plonk_bindings_pasta_fq_urs.Poly_comm.t ) Plonk_verifier_index.t -external create : - Marlin_plonk_bindings_pasta_fq_index.t -> t +external create : Marlin_plonk_bindings_pasta_fq_index.t -> t = "caml_pasta_fq_plonk_verifier_index_create" external read : ?offset:int -> Marlin_plonk_bindings_pasta_fq_urs.t -> string -> t = "caml_pasta_fq_plonk_verifier_index_read" -external write : - ?append:bool -> t -> string -> unit +external write : ?append:bool -> t -> string -> unit = "caml_pasta_fq_plonk_verifier_index_write" external shifts : diff --git a/src/lib/marlin_plonk_bindings/pasta_fq_proof/marlin_plonk_bindings_pasta_fq_proof.ml b/src/lib/marlin_plonk_bindings/pasta_fq_proof/marlin_plonk_bindings_pasta_fq_proof.ml index d8dc9fdf129..96f03b3ca61 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fq_proof/marlin_plonk_bindings_pasta_fq_proof.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fq_proof/marlin_plonk_bindings_pasta_fq_proof.ml @@ -13,22 +13,19 @@ external create : -> auxiliary_input:Marlin_plonk_bindings_pasta_fq_vector.t -> prev_challenges:Marlin_plonk_bindings_pasta_fq.t array -> prev_sgs:Marlin_plonk_bindings_pasta_pallas.Affine.t array - -> t - = "caml_pasta_fq_plonk_proof_create" + -> t = "caml_pasta_fq_plonk_proof_create" external verify : Marlin_plonk_bindings_pasta_fq_urs.Poly_comm.t array -> Marlin_plonk_bindings_pasta_fq_verifier_index.t -> t - -> bool - = "caml_pasta_fq_plonk_proof_verify" + -> bool = "caml_pasta_fq_plonk_proof_verify" external batch_verify : Marlin_plonk_bindings_pasta_fq_urs.Poly_comm.t array array -> Marlin_plonk_bindings_pasta_fq_verifier_index.t array -> t array - -> bool - = "caml_pasta_fq_plonk_proof_batch_verify" + -> bool = "caml_pasta_fq_plonk_proof_batch_verify" external dummy : unit -> t = "caml_pasta_fq_plonk_proof_dummy" diff --git a/src/lib/marlin_plonk_bindings/pasta_fq_urs/marlin_plonk_bindings_pasta_fq_urs.ml b/src/lib/marlin_plonk_bindings/pasta_fq_urs/marlin_plonk_bindings_pasta_fq_urs.ml index 8ad07f2c313..7b670c88bd3 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fq_urs/marlin_plonk_bindings_pasta_fq_urs.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fq_urs/marlin_plonk_bindings_pasta_fq_urs.ml @@ -8,14 +8,11 @@ end external create : int -> t = "caml_pasta_fq_urs_create" -external write : - ?append:bool -> t -> string -> unit - = "caml_pasta_fq_urs_write" +external write : ?append:bool -> t -> string -> unit = "caml_pasta_fq_urs_write" external read : ?offset:int -> string -> t option = "caml_pasta_fq_urs_read" -external lagrange_commitment : - t -> domain_size:int -> int -> Poly_comm.t +external lagrange_commitment : t -> domain_size:int -> int -> Poly_comm.t = "caml_pasta_fq_urs_lagrange_commitment" external commit_evaluations : @@ -30,9 +27,7 @@ external batch_accumulator_check : t -> Marlin_plonk_bindings_pasta_pallas.Affine.t array -> Marlin_plonk_bindings_pasta_fq.t array - -> bool - = "caml_pasta_fq_urs_batch_accumulator_check" + -> bool = "caml_pasta_fq_urs_batch_accumulator_check" -external h : - t -> Marlin_plonk_bindings_pasta_pallas.Affine.t +external h : t -> Marlin_plonk_bindings_pasta_pallas.Affine.t = "caml_pasta_fq_urs_h" diff --git a/src/lib/marlin_plonk_bindings/pasta_fq_urs/test/test.ml b/src/lib/marlin_plonk_bindings/pasta_fq_urs/test/test.ml index 5aa3ba825e9..9d10b4a601e 100644 --- a/src/lib/marlin_plonk_bindings/pasta_fq_urs/test/test.ml +++ b/src/lib/marlin_plonk_bindings/pasta_fq_urs/test/test.ml @@ -5,7 +5,7 @@ let () = let urs = create 17 in let _lgr = lagrange_commitment urs ~domain_size:255 1 in let _evals = - commit_evaluations urs ~domain_size:12 [|Fq.of_int 15; Fq.of_int 35|] + commit_evaluations urs ~domain_size:12 [| Fq.of_int 15; Fq.of_int 35 |] in Format.printf "batch_accumulator_check=%b@." (batch_accumulator_check urs [||] [||]) ; @@ -23,7 +23,7 @@ let () = Sys.remove "./test_urs_17" ; let _lgr = lagrange_commitment urs2 ~domain_size:255 1 in let _evals = - commit_evaluations urs2 ~domain_size:12 [|Fq.of_int 15; Fq.of_int 35|] + commit_evaluations urs2 ~domain_size:12 [| Fq.of_int 15; Fq.of_int 35 |] in Format.printf "batch_accumulator_check=%b@." (batch_accumulator_check urs2 [||] [||]) ; diff --git a/src/lib/marlin_plonk_bindings/pasta_pallas/marlin_plonk_bindings_pasta_pallas.ml b/src/lib/marlin_plonk_bindings/pasta_pallas/marlin_plonk_bindings_pasta_pallas.ml index ec48428ab48..08e0586bf25 100644 --- a/src/lib/marlin_plonk_bindings/pasta_pallas/marlin_plonk_bindings_pasta_pallas.ml +++ b/src/lib/marlin_plonk_bindings/pasta_pallas/marlin_plonk_bindings_pasta_pallas.ml @@ -16,8 +16,7 @@ external negate : t -> t = "caml_pasta_pallas_negate" external double : t -> t = "caml_pasta_pallas_double" -external scale : - t -> Marlin_plonk_bindings_pasta_fq.t -> t +external scale : t -> Marlin_plonk_bindings_pasta_fq.t -> t = "caml_pasta_pallas_scale" external random : unit -> t = "caml_pasta_pallas_random" @@ -32,16 +31,13 @@ external of_affine_coordinates : Marlin_plonk_bindings_pasta_fp.t -> Marlin_plonk_bindings_pasta_fp.t -> t = "caml_pasta_pallas_of_affine_coordinates" -external endo_base : - unit -> Marlin_plonk_bindings_pasta_fp.t +external endo_base : unit -> Marlin_plonk_bindings_pasta_fp.t = "caml_pasta_pallas_endo_base" -external endo_scalar : - unit -> Marlin_plonk_bindings_pasta_fq.t +external endo_scalar : unit -> Marlin_plonk_bindings_pasta_fq.t = "caml_pasta_pallas_endo_scalar" -external affine_deep_copy : - Affine.t -> Affine.t +external affine_deep_copy : Affine.t -> Affine.t = "caml_pasta_pallas_affine_deep_copy" let%test "affine dump_copy" = diff --git a/src/lib/marlin_plonk_bindings/pasta_vesta/marlin_plonk_bindings_pasta_vesta.ml b/src/lib/marlin_plonk_bindings/pasta_vesta/marlin_plonk_bindings_pasta_vesta.ml index f6609dee028..6729f9cde33 100644 --- a/src/lib/marlin_plonk_bindings/pasta_vesta/marlin_plonk_bindings_pasta_vesta.ml +++ b/src/lib/marlin_plonk_bindings/pasta_vesta/marlin_plonk_bindings_pasta_vesta.ml @@ -16,8 +16,7 @@ external negate : t -> t = "caml_pasta_vesta_negate" external double : t -> t = "caml_pasta_vesta_double" -external scale : - t -> Marlin_plonk_bindings_pasta_fp.t -> t +external scale : t -> Marlin_plonk_bindings_pasta_fp.t -> t = "caml_pasta_vesta_scale" external random : unit -> t = "caml_pasta_vesta_random" @@ -32,16 +31,13 @@ external of_affine_coordinates : Marlin_plonk_bindings_pasta_fq.t -> Marlin_plonk_bindings_pasta_fq.t -> t = "caml_pasta_vesta_of_affine_coordinates" -external endo_base : - unit -> Marlin_plonk_bindings_pasta_fq.t +external endo_base : unit -> Marlin_plonk_bindings_pasta_fq.t = "caml_pasta_vesta_endo_base" -external endo_scalar : - unit -> Marlin_plonk_bindings_pasta_fp.t +external endo_scalar : unit -> Marlin_plonk_bindings_pasta_fp.t = "caml_pasta_vesta_endo_scalar" -external affine_deep_copy : - Affine.t -> Affine.t +external affine_deep_copy : Affine.t -> Affine.t = "caml_pasta_vesta_affine_deep_copy" let%test "affine deep_copy" = diff --git a/src/lib/marlin_plonk_bindings/tools/allocation_stress_test/allocation_stress_test.ml b/src/lib/marlin_plonk_bindings/tools/allocation_stress_test/allocation_stress_test.ml index b5ba4c6d490..d1963b32fdd 100644 --- a/src/lib/marlin_plonk_bindings/tools/allocation_stress_test/allocation_stress_test.ml +++ b/src/lib/marlin_plonk_bindings/tools/allocation_stress_test/allocation_stress_test.ml @@ -47,7 +47,7 @@ let register l = List.iter l ~f:register_one let run_simple_exercise name count = let (module Exercise) = List.find_exn !exercises ~f:(fun (module Exercise) -> - String.equal name Exercise.name ) + String.equal name Exercise.name) in let outer_array = Array.init 100 ~f:(fun _ -> None) in let state = Splittable_random.State.of_int 13 in @@ -72,7 +72,7 @@ let run_simple_exercise name count = let new_array = Array.init length ~f:(fun _ -> incr allocated_count ; - Some (exercise ()) ) + Some (exercise ())) in let overwrite = Splittable_random.bool state in if overwrite then outer_array.(i) <- Some new_array ; diff --git a/src/lib/marlin_plonk_bindings/types/marlin_plonk_bindings_types.ml b/src/lib/marlin_plonk_bindings/types/marlin_plonk_bindings_types.ml index 01b69889072..f7714ed0445 100644 --- a/src/lib/marlin_plonk_bindings/types/marlin_plonk_bindings_types.ml +++ b/src/lib/marlin_plonk_bindings/types/marlin_plonk_bindings_types.ml @@ -7,47 +7,49 @@ module Scalar_challenge = struct end module Poly_comm = struct - type 'a t = {unshifted: 'a array; shifted: 'a option} + type 'a t = { unshifted : 'a array; shifted : 'a option } end module Plonk_domain = struct - type 'field t = {log_size_of_group: int; group_gen: 'field} + type 'field t = { log_size_of_group : int; group_gen : 'field } end module Plonk_verification_evals = struct type 'poly_comm t = - { sigma_comm_0: 'poly_comm - ; sigma_comm_1: 'poly_comm - ; sigma_comm_2: 'poly_comm - ; ql_comm: 'poly_comm - ; qr_comm: 'poly_comm - ; qo_comm: 'poly_comm - ; qm_comm: 'poly_comm - ; qc_comm: 'poly_comm - ; rcm_comm_0: 'poly_comm - ; rcm_comm_1: 'poly_comm - ; rcm_comm_2: 'poly_comm - ; psm_comm: 'poly_comm - ; add_comm: 'poly_comm - ; mul1_comm: 'poly_comm - ; mul2_comm: 'poly_comm - ; emul1_comm: 'poly_comm - ; emul2_comm: 'poly_comm - ; emul3_comm: 'poly_comm } + { sigma_comm_0 : 'poly_comm + ; sigma_comm_1 : 'poly_comm + ; sigma_comm_2 : 'poly_comm + ; ql_comm : 'poly_comm + ; qr_comm : 'poly_comm + ; qo_comm : 'poly_comm + ; qm_comm : 'poly_comm + ; qc_comm : 'poly_comm + ; rcm_comm_0 : 'poly_comm + ; rcm_comm_1 : 'poly_comm + ; rcm_comm_2 : 'poly_comm + ; psm_comm : 'poly_comm + ; add_comm : 'poly_comm + ; mul1_comm : 'poly_comm + ; mul2_comm : 'poly_comm + ; emul1_comm : 'poly_comm + ; emul2_comm : 'poly_comm + ; emul3_comm : 'poly_comm + } end module Plonk_verification_shifts = struct - type 'field t = {r: 'field; o: 'field} + type 'field t = { r : 'field; o : 'field } end module Plonk_verifier_index = struct type ('field, 'urs, 'poly_comm) t = - { domain: 'field Plonk_domain.t - ; max_poly_size: int - ; max_quot_size: int - ; urs: 'urs - ; evals: 'poly_comm Plonk_verification_evals.t - ; shifts: 'field Plonk_verification_shifts.t } + { domain : 'field Plonk_domain.t + ; max_poly_size : int + ; max_quot_size : int + ; urs : 'urs + ; evals : 'poly_comm Plonk_verification_evals.t + ; shifts : 'field Plonk_verification_shifts.t + } end module Plonk_gate = struct @@ -72,69 +74,74 @@ module Plonk_gate = struct end module Wire = struct - type t = {row: int; col: Col.t} + type t = { row : int; col : Col.t } end module Wires = struct - type t = {row: int; l: Wire.t; r: Wire.t; o: Wire.t} + type t = { row : int; l : Wire.t; r : Wire.t; o : Wire.t } end - type 'a t = {kind: Kind.t; wires: Wires.t; c: 'a array} + type 'a t = { kind : Kind.t; wires : Wires.t; c : 'a array } end module Plonk_proof = struct module Evaluations = struct type 'field t = - { l: 'field array - ; r: 'field array - ; o: 'field array - ; z: 'field array - ; t: 'field array - ; f: 'field array - ; sigma1: 'field array - ; sigma2: 'field array } + { l : 'field array + ; r : 'field array + ; o : 'field array + ; z : 'field array + ; t : 'field array + ; f : 'field array + ; sigma1 : 'field array + ; sigma2 : 'field array + } end module Opening_proof = struct type ('field, 'g) t = - {lr: ('g * 'g) array; delta: 'g; z1: 'field; z2: 'field; sg: 'g} + { lr : ('g * 'g) array; delta : 'g; z1 : 'field; z2 : 'field; sg : 'g } end module Messages = struct type 'poly_comm t = - { l_comm: 'poly_comm - ; r_comm: 'poly_comm - ; o_comm: 'poly_comm - ; z_comm: 'poly_comm - ; t_comm: 'poly_comm } + { l_comm : 'poly_comm + ; r_comm : 'poly_comm + ; o_comm : 'poly_comm + ; z_comm : 'poly_comm + ; t_comm : 'poly_comm + } end type ('field, 'g, 'poly_comm) t = - { messages: 'poly_comm Messages.t - ; proof: ('field, 'g) Opening_proof.t - ; evals: 'field Evaluations.t * 'field Evaluations.t - ; public: 'field array - ; prev_challenges: ('field array * 'poly_comm) array } + { messages : 'poly_comm Messages.t + ; proof : ('field, 'g) Opening_proof.t + ; evals : 'field Evaluations.t * 'field Evaluations.t + ; public : 'field array + ; prev_challenges : ('field array * 'poly_comm) array + } end module Oracles = struct module Random_oracles = struct type 'field t = - { beta: 'field - ; gamma: 'field - ; alpha_chal: 'field Scalar_challenge.t - ; alpha: 'field - ; zeta: 'field - ; v: 'field - ; u: 'field - ; zeta_chal: 'field Scalar_challenge.t - ; v_chal: 'field Scalar_challenge.t - ; u_chal: 'field Scalar_challenge.t } + { beta : 'field + ; gamma : 'field + ; alpha_chal : 'field Scalar_challenge.t + ; alpha : 'field + ; zeta : 'field + ; v : 'field + ; u : 'field + ; zeta_chal : 'field Scalar_challenge.t + ; v_chal : 'field Scalar_challenge.t + ; u_chal : 'field Scalar_challenge.t + } end type 'field t = - { o: 'field Random_oracles.t - ; p_eval: 'field * 'field - ; opening_prechallenges: 'field array - ; digest_before_evaluations: 'field } + { o : 'field Random_oracles.t + ; p_eval : 'field * 'field + ; opening_prechallenges : 'field array + ; digest_before_evaluations : 'field + } end diff --git a/src/lib/memory_stats/memory_stats.ml b/src/lib/memory_stats/memory_stats.ml index 6473dae0a62..892f3599d90 100644 --- a/src/lib/memory_stats/memory_stats.ml +++ b/src/lib/memory_stats/memory_stats.ml @@ -12,16 +12,18 @@ let ocaml_memory_stats () = ; ("max_heap_size_bytes", `Int (stat.top_heap_words * bytes_per_word)) ; ("live_size_bytes", `Int (stat.live_words * bytes_per_word)) ; ("live_blocks", `Int stat.live_blocks) - ; ("fragments", `Int stat.fragments) ] + ; ("fragments", `Int stat.fragments) + ] let jemalloc_memory_stats () = - let {Jemalloc.active; resident; allocated; mapped} = + let { Jemalloc.active; resident; allocated; mapped } = Jemalloc.get_memory_stats () in [ ("active", `Int active) ; ("resident", `Int resident) ; ("allocated", `Int allocated) - ; ("mapped", `Int mapped) ] + ; ("mapped", `Int mapped) + ] let log_memory_stats logger ~process = don't_wait_for @@ -32,7 +34,7 @@ let log_memory_stats logger ~process = run major GCs often enough, which means the finalizers don't run and we use way too much memory. As a band-aid solution, we run a major GC cycle every ten minutes. - *) + *) let gc_method = Option.value ~default:"full" @@ Unix.getenv "CODA_GC_HACK_MODE" in @@ -47,8 +49,8 @@ let log_memory_stats logger ~process = fun () -> ignore (Gc.major_slice 0) | other -> failwithf - "CODA_GC_HACK_MODE was %s, it should be full or slice. Default \ - is full." + "CODA_GC_HACK_MODE was %s, it should be full or slice. Default is \ + full." other in let interval = diff --git a/src/lib/merkle_address/merkle_address.ml b/src/lib/merkle_address/merkle_address.ml index ed58f8b06d3..a8d5a4d8033 100644 --- a/src/lib/merkle_address/merkle_address.ml +++ b/src/lib/merkle_address/merkle_address.ml @@ -6,7 +6,7 @@ let depth = bitstring_length let add_padding path = let length = depth path in if length mod 8 = 0 then path - else concat [path; zeroes_bitstring (8 - (length mod 8))] + else concat [ path; zeroes_bitstring (8 - (length mod 8)) ] let slice = subbitstring @@ -84,8 +84,7 @@ module Stable = struct let hash_fold_t hash_state t = [%hash_fold: int * string] hash_state (to_tuple t) - [%%define_from_scope - compare, to_yojson] + [%%define_from_scope compare, to_yojson] let equal = equals end @@ -137,7 +136,7 @@ let to_int (path : t) : int = Sequence.range 0 (depth path) |> Sequence.fold ~init:0 ~f:(fun acc i -> let index = depth path - 1 - i in - acc + ((if get path index <> 0 then 1 else 0) lsl i) ) + acc + ((if get path index <> 0 then 1 else 0) lsl i)) let of_int_exn ~ledger_depth index = if index >= 1 lsl ledger_depth then failwith "Index is too large" @@ -148,7 +147,7 @@ let of_int_exn ~ledger_depth index = (ledger_depth - 1) 0 |> Sequence.fold ~init:index ~f:(fun i pos -> Bitstring.put buf pos (i % 2) ; - i / 2 ) + i / 2) : int ) ; buf @@ -211,7 +210,7 @@ let serialize ~ledger_depth path = assert (path_len <= required_bits) ; let required_padding = required_bits - path_len in Bigstring.of_string @@ string_of_bitstring - @@ concat [path; zeroes_bitstring required_padding] + @@ concat [ path; zeroes_bitstring required_padding ] let is_parent_of parent ~maybe_child = Bitstring.is_prefix maybe_child parent @@ -223,8 +222,7 @@ module Range = struct if comparison > 0 then raise (Invalid_argument "first address needs to precede last address") else if comparison = 0 then init - else - fold_exl (next first |> Option.value_exn, last) ~init:(f first init) ~f + else fold_exl (next first |> Option.value_exn, last) ~init:(f first init) ~f let fold_incl (first, last) ~init ~f = f last @@ fold_exl (first, last) ~init ~f @@ -239,10 +237,10 @@ module Range = struct let subtree_range ~ledger_depth address = let first_node = - concat [address; zeroes_bitstring @@ height ~ledger_depth address] + concat [ address; zeroes_bitstring @@ height ~ledger_depth address ] in let last_node = - concat [address; ones_bitstring @@ height ~ledger_depth address] + concat [ address; ones_bitstring @@ height ~ledger_depth address ] in (first_node, last_node) @@ -258,14 +256,14 @@ module Range = struct Some (current_node, (current_node, `Stop)) else Option.map (next current_node) ~f:(fun next_node -> - (current_node, (next_node, `Don't_stop)) ) ) + (current_node, (next_node, `Don't_stop)))) end let%test "Bitstring bin_io serialization does not change" = (* Bitstring.t is trustlisted as a versioned type. This test assures that serializations of that type haven't changed *) let text = - "Contrary to popular belief, Lorem Ipsum is not simply random text. It \ - has roots in a piece of classical Latin literature." + "Contrary to popular belief, Lorem Ipsum is not simply random text. It has \ + roots in a piece of classical Latin literature." in let bitstring = Bitstring.bitstring_of_string text in let known_good_digest = "c4c7ade09ba305b69ffac494a6eab60e" in @@ -289,28 +287,27 @@ struct let address = of_directions path in [%test_eq: t] (parent_exn (child_exn ~ledger_depth:Input.depth address direction)) - address ) + address) let%test_unit "to_index(of_index_exn(i)) = i" = Quickcheck.test ~sexp_of:[%sexp_of: int] (Int.gen_incl 0 ((1 lsl Input.depth) - 1)) ~f:(fun index -> [%test_result: int] ~expect:index - (to_int @@ of_int_exn ~ledger_depth:Input.depth index) ) + (to_int @@ of_int_exn ~ledger_depth:Input.depth index)) let%test_unit "of_index_exn(to_index(addr)) = addr" = Quickcheck.test ~sexp_of:[%sexp_of: Direction.t list] (Direction.gen_list Input.depth) ~f:(fun directions -> let address = of_directions directions in [%test_result: t] ~expect:address - (of_int_exn ~ledger_depth:Input.depth @@ to_int address) ) + (of_int_exn ~ledger_depth:Input.depth @@ to_int address)) let%test_unit "nonempty(addr): sibling(sibling(addr)) = addr" = Quickcheck.test ~sexp_of:[%sexp_of: Direction.t list] - (Direction.gen_var_length_list ~start:1 Input.depth) - ~f:(fun directions -> + (Direction.gen_var_length_list ~start:1 Input.depth) ~f:(fun directions -> let address = of_directions directions in - [%test_result: t] ~expect:address (sibling @@ sibling address) ) + [%test_result: t] ~expect:address (sibling @@ sibling address)) let%test_unit "prev(next(addr)) = addr" = Quickcheck.test ~sexp_of:[%sexp_of: Direction.t list] @@ -320,7 +317,7 @@ struct | None -> () | Some addr' -> - [%test_result: t option] ~expect:(Some address) (prev addr') ) + [%test_result: t option] ~expect:(Some address) (prev addr')) end let%test_module "Address" = diff --git a/src/lib/merkle_address/merkle_address.mli b/src/lib/merkle_address/merkle_address.mli index 3468615b0b4..767fc004961 100644 --- a/src/lib/merkle_address/merkle_address.mli +++ b/src/lib/merkle_address/merkle_address.mli @@ -55,7 +55,7 @@ module Range : sig type nonrec t = t * t val fold : - ?stop:[`Inclusive | `Exclusive] + ?stop:[ `Inclusive | `Exclusive ] -> t -> init:'a -> f:(Stable.Latest.t -> 'a -> 'a) diff --git a/src/lib/merkle_ledger/any_ledger.ml b/src/lib/merkle_ledger/any_ledger.ml index 86d4e5d088b..37ad15e9959 100644 --- a/src/lib/merkle_ledger/any_ledger.ml +++ b/src/lib/merkle_ledger/any_ledger.ml @@ -38,16 +38,16 @@ module type S = sig module type Base_intf = Base_ledger_intf.S - with module Addr = Location.Addr - with module Location = Location - with type key := key - and type token_id := token_id - and type token_id_set := token_id_set - and type account_id := account_id - and type account_id_set := account_id_set - and type hash := hash - and type root_hash := hash - and type account := account + with module Addr = Location.Addr + with module Location = Location + with type key := key + and type token_id := token_id + and type token_id_set := token_id_set + and type account_id := account_id + and type account_id_set := account_id_set + and type hash := hash + and type root_hash := hash + and type account := account val cast : (module Base_intf with type t = 'a) -> 'a -> witness @@ -62,29 +62,29 @@ end module Make_base (Inputs : Inputs_intf) : S - with module Location = Inputs.Location - with type key := Inputs.Key.t - and type token_id := Inputs.Token_id.t - and type token_id_set := Inputs.Token_id.Set.t - and type account_id := Inputs.Account_id.t - and type hash := Inputs.Hash.t - and type account_id_set := Inputs.Account_id.Set.t - and type account := Inputs.Account.t = struct + with module Location = Inputs.Location + with type key := Inputs.Key.t + and type token_id := Inputs.Token_id.t + and type token_id_set := Inputs.Token_id.Set.t + and type account_id := Inputs.Account_id.t + and type hash := Inputs.Hash.t + and type account_id_set := Inputs.Account_id.Set.t + and type account := Inputs.Account.t = struct open Inputs module Location = Location module type Base_intf = Base_ledger_intf.S - with module Addr = Location.Addr - with module Location = Location - with type key := Inputs.Key.t - and type token_id := Inputs.Token_id.t - and type token_id_set := Inputs.Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Hash.t - and type root_hash := Hash.t - and type account := Account.t + with module Addr = Location.Addr + with module Location = Location + with type key := Inputs.Key.t + and type token_id := Inputs.Token_id.t + and type token_id_set := Inputs.Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Hash.t + and type root_hash := Hash.t + and type account := Account.t type witness = T : (module Base_intf with type t = 't) * 't -> witness @@ -168,7 +168,7 @@ module Make_base (Inputs : Inputs_intf) : (* ignored_keys must be Base.Keys.Set.t, but that isn't necessarily the same as Keys.Set.t for the Keys passed to this functor; as long as we use the same Keys for all ledgers, this should work - *) + *) let foldi_with_ignored_accounts (T ((module Base), t)) = Base.foldi_with_ignored_accounts t diff --git a/src/lib/merkle_ledger/base_inputs_intf.ml b/src/lib/merkle_ledger/base_inputs_intf.ml index e8f0699ca4e..cda6d078c74 100644 --- a/src/lib/merkle_ledger/base_inputs_intf.ml +++ b/src/lib/merkle_ledger/base_inputs_intf.ml @@ -10,9 +10,9 @@ module type S = sig module Account : Intf.Account - with type token_id := Token_id.t - and type account_id := Account_id.t - and type balance := Balance.t + with type token_id := Token_id.t + and type account_id := Account_id.t + and type balance := Balance.t module Hash : Intf.Hash with type account := Account.t end diff --git a/src/lib/merkle_ledger/base_ledger_intf.ml b/src/lib/merkle_ledger/base_ledger_intf.ml index 9bcdd2692b2..25b68bff697 100644 --- a/src/lib/merkle_ledger/base_ledger_intf.ml +++ b/src/lib/merkle_ledger/base_ledger_intf.ml @@ -34,12 +34,12 @@ module type S = sig include Syncable_intf.S - with type root_hash := root_hash - and type hash := hash - and type account := account - and type addr := Addr.t - and type path = Path.t - and type t := t + with type root_hash := root_hash + and type hash := hash + and type account := account + and type addr := Addr.t + and type path = Path.t + and type t := t (** list of accounts in the ledger *) val to_list : t -> account list @@ -97,7 +97,10 @@ module type S = sig (** This may return an error if the ledger is full. *) val get_or_create_account : - t -> account_id -> account -> ([`Added | `Existed] * Location.t) Or_error.t + t + -> account_id + -> account + -> ([ `Added | `Existed ] * Location.t) Or_error.t (** the ledger should not be used after calling [close] *) val close : t -> unit diff --git a/src/lib/merkle_ledger/database.ml b/src/lib/merkle_ledger/database.ml index c8bb4aa8e48..39c7f31fc00 100644 --- a/src/lib/merkle_ledger/database.ml +++ b/src/lib/merkle_ledger/database.ml @@ -14,16 +14,16 @@ end module Make (Inputs : Inputs_intf) : Database_intf.S - with module Location = Inputs.Location - and module Addr = Inputs.Location.Addr - and type key := Inputs.Key.t - and type token_id := Inputs.Token_id.t - and type token_id_set := Inputs.Token_id.Set.t - and type account := Inputs.Account.t - and type root_hash := Inputs.Hash.t - and type hash := Inputs.Hash.t - and type account_id := Inputs.Account_id.t - and type account_id_set := Inputs.Account_id.Set.t = struct + with module Location = Inputs.Location + and module Addr = Inputs.Location.Addr + and type key := Inputs.Key.t + and type token_id := Inputs.Token_id.t + and type token_id_set := Inputs.Token_id.Set.t + and type account := Inputs.Account.t + and type root_hash := Inputs.Hash.t + and type hash := Inputs.Hash.t + and type account_id := Inputs.Account_id.t + and type account_id_set := Inputs.Account_id.Set.t = struct (* The max depth of a merkle tree can never be greater than 253. *) open Inputs @@ -54,11 +54,12 @@ module Make (Inputs : Inputs_intf) : end type t = - { uuid: Uuid.Stable.V1.t - ; kvdb: Kvdb.t [@sexp.opaque] - ; depth: int - ; directory: string - ; detached_parent_signal: Detached_parent_signal.t } + { uuid : Uuid.Stable.V1.t + ; kvdb : Kvdb.t [@sexp.opaque] + ; depth : int + ; directory : string + ; detached_parent_signal : Detached_parent_signal.t + } [@@deriving sexp] let get_uuid t = t.uuid @@ -82,22 +83,29 @@ module Make (Inputs : Inputs_intf) : in Unix.mkdir_p directory ; let kvdb = Kvdb.create directory in - {uuid; kvdb; depth; directory; detached_parent_signal= Async.Ivar.create ()} + { uuid + ; kvdb + ; depth + ; directory + ; detached_parent_signal = Async.Ivar.create () + } let create_checkpoint t ~directory_name () = let uuid = Uuid_unix.create () in let kvdb = Kvdb.create_checkpoint t.kvdb directory_name in { uuid ; kvdb - ; depth= t.depth - ; directory= directory_name - ; detached_parent_signal= Async.Ivar.create () } + ; depth = t.depth + ; directory = directory_name + ; detached_parent_signal = Async.Ivar.create () + } - let close {kvdb; uuid= _; depth= _; directory= _; detached_parent_signal} = + let close { kvdb; uuid = _; depth = _; directory = _; detached_parent_signal } + = Kvdb.close kvdb ; Async.Ivar.fill_if_empty detached_parent_signal () - let detached_signal {detached_parent_signal; _} = + let detached_signal { detached_parent_signal; _ } = Async.Ivar.read detached_parent_signal let with_ledger ~depth ~f = @@ -110,14 +118,13 @@ module Make (Inputs : Inputs_intf) : let empty_hash = Empty_hashes.extensible_cache (module Hash) ~init_hash:Hash.empty_account - let get_raw {kvdb; depth; _} location = + let get_raw { kvdb; depth; _ } location = Kvdb.get kvdb ~key:(Location.serialize ~ledger_depth:depth location) let get_bin mdb location bin_read = - get_raw mdb location - |> Option.map ~f:(fun v -> bin_read v ~pos_ref:(ref 0)) + get_raw mdb location |> Option.map ~f:(fun v -> bin_read v ~pos_ref:(ref 0)) - let delete_raw {kvdb; depth; _} location = + let delete_raw { kvdb; depth; _ } location = Kvdb.remove kvdb ~key:(Location.serialize ~ledger_depth:depth location) let get mdb location = @@ -132,7 +139,7 @@ module Make (Inputs : Inputs_intf) : | None -> empty_hash (Location.height ~ledger_depth:mdb.depth location) - let account_list_bin {kvdb; _} account_bin_read : Account.t list = + let account_list_bin { kvdb; _ } account_bin_read : Account.t list = let all_keys_values = Kvdb.to_alist kvdb in (* see comment at top of location.ml about encoding of locations *) let account_location_prefix = @@ -142,22 +149,22 @@ module Make (Inputs : Inputs_intf) : let locations_accounts_bin = List.filter all_keys_values ~f:(fun (loc, _v) -> let ch = Bigstring.get_uint8 loc ~pos:0 in - Int.equal ch account_location_prefix ) + Int.equal ch account_location_prefix) in List.map locations_accounts_bin ~f:(fun (_location_bin, account_bin) -> - account_bin_read account_bin ~pos_ref:(ref 0) ) + account_bin_read account_bin ~pos_ref:(ref 0)) let to_list mdb = account_list_bin mdb Account.bin_read_t let accounts mdb = to_list mdb |> List.map ~f:Account.identifier |> Account_id.Set.of_list - let set_raw {kvdb; depth; _} location bin = + let set_raw { kvdb; depth; _ } location bin = Kvdb.set kvdb ~key:(Location.serialize ~ledger_depth:depth location) ~data:bin - let set_raw_batch {kvdb; depth; _} locations_bins = + let set_raw_batch { kvdb; depth; _ } locations_bins = let serialize_location (loc, bin) = (Location.serialize ~ledger_depth:depth loc, bin) in @@ -234,8 +241,8 @@ module Make (Inputs : Inputs_intf) : let last_location_key () = Location.build_generic (Bigstring.of_string "last_account_location") - let serialize_last_account_kv ~ledger_depth - (location, last_account_location) = + let serialize_last_account_kv ~ledger_depth (location, last_account_location) + = ( Location.serialize ~ledger_depth location , Location.serialize ~ledger_depth last_account_location ) @@ -249,25 +256,24 @@ module Make (Inputs : Inputs_intf) : ( Addr.of_directions @@ List.init mdb.depth ~f:(fun _ -> Direction.Left) ) in - set_raw mdb location - (Location.serialize ~ledger_depth first_location) ; + set_raw mdb location (Location.serialize ~ledger_depth first_location) ; Result.return first_location | Some prev_location -> ( - match Location.parse ~ledger_depth:mdb.depth prev_location with - | Error () -> - Error Db_error.Malformed_database - | Ok prev_account_location -> - Location.next prev_account_location - |> Result.of_option ~error:Db_error.Out_of_leaves - |> Result.map ~f:(fun next_account_location -> - set_raw mdb location - (Location.serialize ~ledger_depth next_account_location) ; - next_account_location ) ) + match Location.parse ~ledger_depth:mdb.depth prev_location with + | Error () -> + Error Db_error.Malformed_database + | Ok prev_account_location -> + Location.next prev_account_location + |> Result.of_option ~error:Db_error.Out_of_leaves + |> Result.map ~f:(fun next_account_location -> + set_raw mdb location + (Location.serialize ~ledger_depth next_account_location) ; + next_account_location) ) let allocate mdb key = let location_result = increment_last_account_location mdb in Result.map location_result ~f:(fun location -> - set mdb key location ; location ) + set mdb key location ; location) let last_location_address mdb = match @@ -298,8 +304,7 @@ module Make (Inputs : Inputs_intf) : module Tokens = struct let next_available_key = Memo.unit (fun () -> - Location.build_generic (Bigstring.of_string "next_available_token") - ) + Location.build_generic (Bigstring.of_string "next_available_token")) let next_available mdb = Option.value @@ -376,8 +381,7 @@ module Make (Inputs : Inputs_intf) : ignore (Token_id.Set.bin_write_t tokens_buf ~pos:0 tids : int) ; (Location.serialize ~ledger_depth (build_location pk), tokens_buf) - let get_opt mdb pk = - get_bin mdb (build_location pk) Token_id.Set.bin_read_t + let get_opt mdb pk = get_bin mdb (build_location pk) Token_id.Set.bin_read_t let get mdb pk = Option.value ~default:Token_id.Set.empty (get_opt mdb pk) @@ -403,13 +407,13 @@ module Make (Inputs : Inputs_intf) : let update mdb pk ~f = change_opt mdb pk ~f:(fun x -> - to_opt @@ f (Option.value ~default:Token_id.Set.empty x) ) + to_opt @@ f (Option.value ~default:Token_id.Set.empty x)) let add mdb pk tid = update mdb pk ~f:(fun tids -> Set.add tids tid) let _add_several mdb pk new_tids = update mdb pk ~f:(fun tids -> - Set.union tids (Token_id.Set.of_list new_tids) ) + Set.union tids (Token_id.Set.of_list new_tids)) let add_account mdb aid account = let token = Account_id.token_id aid in @@ -422,7 +426,7 @@ module Make (Inputs : Inputs_intf) : let _remove_several mdb pk rem_tids = update mdb pk ~f:(fun tids -> - Set.diff tids (Token_id.Set.of_list rem_tids) ) + Set.diff tids (Token_id.Set.of_list rem_tids)) let remove_account mdb aid = let token = Account_id.token_id aid in @@ -435,7 +439,7 @@ module Make (Inputs : Inputs_intf) : let add_batch_create mdb pks_to_tokens = let pks_to_all_tokens = Map.filter_mapi pks_to_tokens ~f:(fun ~key:pk ~data:tokens_to_add -> - to_opt (Set.union (get mdb pk) tokens_to_add) ) + to_opt (Set.union (get mdb pk) tokens_to_add)) in Map.to_alist pks_to_all_tokens |> List.map ~f:(serialize_kv ~ledger_depth:mdb.depth) @@ -509,17 +513,18 @@ module Make (Inputs : Inputs_intf) : | Some set -> Set.add set (Account_id.token_id aid) | None -> - Token_id.Set.singleton (Account_id.token_id aid) ) + Token_id.Set.singleton (Account_id.token_id aid)) , (* If the token is present in an account, it is no longer available. *) Token_id.max next_available_token - (Token_id.next (Account_id.token_id aid)) ) ) + (Token_id.next (Account_id.token_id aid)) )) in let next_available_token_change = if Token_id.(new_next_available_token > next_available_token) then [ Tokens.next_available_kv ~ledger_depth:mdb.depth - new_next_available_token ] + new_next_available_token + ] else [] in let batched_changes = @@ -543,13 +548,14 @@ module Make (Inputs : Inputs_intf) : Some (Tokens.Owner.serialize_kv ~ledger_depth:mdb.depth (Account_id.token_id aid, Account_id.public_key aid)) - else None ) + else None) in Kvdb.set_batch mdb.kvdb ~remove_keys:[] ~key_data_pairs:token_owner_changes end) - let set_hash mdb location new_hash = set_hash_batch mdb [(location, new_hash)] + let set_hash mdb location new_hash = + set_hash_batch mdb [ (location, new_hash) ] module For_tests = struct let gen_account_location ~ledger_depth = @@ -586,13 +592,14 @@ module Make (Inputs : Inputs_intf) : let get_or_create_account mdb account_id account = match Account_location.get mdb account_id with | Error Account_location_not_found -> ( - match Account_location.allocate mdb account_id with - | Ok location -> - set mdb location account ; - Tokens.add_account mdb account_id account ; - Ok (`Added, location) - | Error err -> - Error (Error.create "get_or_create_account" err Db_error.sexp_of_t) ) + match Account_location.allocate mdb account_id with + | Ok location -> + set mdb location account ; + Tokens.add_account mdb account_id account ; + Ok (`Added, location) + | Error err -> + Error (Error.create "get_or_create_account" err Db_error.sexp_of_t) + ) | Error err -> Error (Error.create "get_or_create_account" err Db_error.sexp_of_t) | Ok location -> @@ -627,7 +634,7 @@ module Make (Inputs : Inputs_intf) : let ignored_indices = Int.Set.map ignored_accounts ~f:(fun account_id -> try index_of_account_exn t account_id with _ -> -1 - (* dummy index for accounts not in database *) ) + (* dummy index for accounts not in database *)) in let last = Addr.to_int last_addr in Sequence.range ~stop:`inclusive 0 last @@ -667,11 +674,11 @@ module Make (Inputs : Inputs_intf) : | [] -> accum (* no need to reverse *) | key :: rest -> ( - match Account_location.get t key with - | Ok loc -> - loop rest (loc :: accum) - | Error err -> - raise (Db_error.Db_exception err) ) + match Account_location.get t key with + | Ok loc -> + loop rest (loc :: accum) + | Error err -> + raise (Db_error.Db_exception err) ) in loop keys [] in @@ -683,7 +690,7 @@ module Make (Inputs : Inputs_intf) : (* recalculate hashes for each removed account *) List.iter locations ~f:(fun loc -> let hash_loc = Location.Hash (Location.to_path_exn loc) in - set_hash t hash_loc Hash.empty_account ) + set_hash t hash_loc Hash.empty_account) let merkle_path mdb location = let location = diff --git a/src/lib/merkle_ledger/graphviz.ml b/src/lib/merkle_ledger/graphviz.ml index 3323f3b6c73..5afca85d124 100644 --- a/src/lib/merkle_ledger/graphviz.ml +++ b/src/lib/merkle_ledger/graphviz.ml @@ -31,8 +31,8 @@ module type Inputs_intf = sig module Account : Intf.Account - with type account_id := Account_id.t - and type balance := Balance.t + with type account_id := Account_id.t + and type balance := Balance.t module Hash : Intf.Hash with type account := Account.t @@ -40,13 +40,13 @@ module type Inputs_intf = sig module Ledger : Base_ledger_intf.S - with module Addr = Location.Addr - and module Location = Location - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Hash.t - and type root_hash := Hash.t - and type account := Account.t + with module Addr = Location.Addr + and module Location = Location + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Hash.t + and type root_hash := Hash.t + and type account := Account.t end module Make (Inputs : Inputs_intf) : @@ -59,7 +59,7 @@ struct include Comparator.Make (Account) end - type ('source, 'target) edge = {source: 'source; target: 'target} + type ('source, 'target) edge = { source : 'source; target : 'target } type target = | Hash of Hash.t @@ -69,7 +69,7 @@ struct type merkle_tree_edge = (Hash.t, target) edge - type pretty_format_account = {public_key: string; balance: int} + type pretty_format_account = { public_key : string; balance : int } type pretty_target = | Pretty_hash of string @@ -105,19 +105,20 @@ struct match Ledger.get t (Location.Account address) with | Some new_account -> (* let public_key = Account.public_key new_account in - let location = Ledger.location_of_account t public_key |> Option.value_exn in - let queried_account = Ledger.get t location |> Option.value_exn in - assert (Account.equal queried_account new_account); *) + let location = Ledger.location_of_account t public_key |> Option.value_exn in + let queried_account = Ledger.get t location |> Option.value_exn in + assert (Account.equal queried_account new_account); *) assert (not @@ Set.mem accounts new_account) ; let new_accounts = Set.add accounts new_account in bfs ~edges: - ( {source= parent_hash; target= Account new_account} + ( { source = parent_hash; target = Account new_account } :: edges ) ~accounts:new_accounts jobs | None -> bfs - ~edges:({source= parent_hash; target= Empty_account} :: edges) + ~edges: + ({ source = parent_hash; target = Empty_account } :: edges) ~accounts jobs else let current_hash = Ledger.get_inner_hash_at_addr_exn t address in @@ -136,40 +137,46 @@ struct Hash current_hash ) else Empty_hash in - bfs ~edges:({source= parent_hash; target} :: edges) ~accounts jobs + bfs + ~edges:({ source = parent_hash; target } :: edges) + ~accounts jobs in let edges = bfs ~edges:[] ~accounts:(Set.empty (module Account)) (Queue.of_list [ Addr.child_exn ~ledger_depth initial_address Direction.Left - ; Addr.child_exn ~ledger_depth initial_address Direction.Right ]) + ; Addr.child_exn ~ledger_depth initial_address Direction.Right + ]) in let edges = List.folding_map edges ~init:(0, 0) - ~f:(fun (empty_account_counter, empty_hash_counter) {source; target} -> + ~f:(fun (empty_account_counter, empty_hash_counter) { source; target } + -> let source = string_of_hash source in match target with | Hash target_hash -> ( (empty_account_counter, empty_hash_counter) - , {source; target= Pretty_hash (string_of_hash target_hash)} ) + , { source; target = Pretty_hash (string_of_hash target_hash) } ) | Account account -> let string_key = string_of_account_id account in let pretty_account = - { public_key= string_key - ; balance= Account.balance account |> Balance.to_int } + { public_key = string_key + ; balance = Account.balance account |> Balance.to_int + } in ( (empty_account_counter, empty_hash_counter) - , {source; target= Pretty_account pretty_account} ) + , { source; target = Pretty_account pretty_account } ) | Empty_hash -> let new_empty_hash_counter = empty_hash_counter + 1 in ( (empty_account_counter, new_empty_hash_counter) - , {source; target= Pretty_empty_hash new_empty_hash_counter} ) + , { source; target = Pretty_empty_hash new_empty_hash_counter } ) | Empty_account -> let new_empty_account_counter = empty_account_counter + 1 in ( (new_empty_account_counter, empty_hash_counter) - , {source; target= Pretty_empty_account new_empty_account_counter} - ) ) + , { source + ; target = Pretty_empty_account new_empty_account_counter + } )) in edges @@ -179,22 +186,24 @@ struct let write_empty_entry ~id source count = let empty_hash_id = sprintf "EMPTY_%s_%d" id count in [ sprintf "\"%s\" -> \"%s\" " source empty_hash_id - ; sprintf "\"%s\" [shape=point]" empty_hash_id ] + ; sprintf "\"%s\" [shape=point]" empty_hash_id + ] let write ~path ~name edges = let body = - List.map edges ~f:(fun {source; target} -> + List.map edges ~f:(fun { source; target } -> match target with | Pretty_hash hash -> - [sprintf "\"%s\" -> \"%s\" " source hash] - | Pretty_account {public_key; balance} -> + [ sprintf "\"%s\" -> \"%s\" " source hash ] + | Pretty_account { public_key; balance } -> [ sprintf "\"%s\" -> \"%s\" " source public_key ; sprintf "\"%s\" [shape=record,label=\"{%s|%d}\"]" public_key - public_key balance ] + public_key balance + ] | Pretty_empty_hash count -> write_empty_entry ~id:"HASH" source count | Pretty_empty_account count -> - write_empty_entry ~id:"ACCOUNT" source count ) + write_empty_entry ~id:"ACCOUNT" source count) |> List.concat |> String.concat ~sep:"\n" in let code = wrapper ~name body in diff --git a/src/lib/merkle_ledger/intf.ml b/src/lib/merkle_ledger/intf.ml index 2a1bdb602f0..30e6cb2b4db 100644 --- a/src/lib/merkle_ledger/intf.ml +++ b/src/lib/merkle_ledger/intf.ml @@ -3,15 +3,14 @@ open Core module type Key = sig type t [@@deriving sexp] - module Stable : - sig - module V1 : sig - type t [@@deriving sexp, bin_io] - end - - module Latest = V1 + module Stable : sig + module V1 : sig + type t [@@deriving sexp, bin_io] end - with type V1.t = t + + module Latest = V1 + end + with type V1.t = t val empty : t @@ -25,13 +24,12 @@ end module type Token_id = sig type t [@@deriving sexp] - module Stable : - sig - module Latest : sig - type t [@@deriving bin_io] - end + module Stable : sig + module Latest : sig + type t [@@deriving bin_io] end - with type Latest.t = t + end + with type Latest.t = t val default : t @@ -120,10 +118,10 @@ module type Key_value_database = sig include Key_value_database.Intf.Ident - with type t := t - and type key := Bigstring.t - and type value := Bigstring.t - and type config := config + with type t := t + and type key := Bigstring.t + and type value := Bigstring.t + and type config := config val create_checkpoint : t -> string -> t diff --git a/src/lib/merkle_ledger/location.ml b/src/lib/merkle_ledger/location.ml index 38cc3151e1d..0c56b8327b4 100644 --- a/src/lib/merkle_ledger/location.ml +++ b/src/lib/merkle_ledger/location.ml @@ -118,8 +118,7 @@ module T = struct let next : t -> t Option.t = function | Generic _ -> - raise - (Invalid_argument "next: generic locations have no next location") + raise (Invalid_argument "next: generic locations have no next location") | Account path -> Addr.next path |> Option.map ~f:(fun next -> Account next) | Hash path -> @@ -127,8 +126,7 @@ module T = struct let prev : t -> t Option.t = function | Generic _ -> - raise - (Invalid_argument "prev: generic locations have no prev location") + raise (Invalid_argument "prev: generic locations have no prev location") | Account path -> Addr.prev path |> Option.map ~f:(fun prev -> Account prev) | Hash path -> diff --git a/src/lib/merkle_ledger/merkle_path.ml b/src/lib/merkle_ledger/merkle_path.ml index 3e3c42e8871..eb0b11eab36 100644 --- a/src/lib/merkle_ledger/merkle_path.ml +++ b/src/lib/merkle_ledger/merkle_path.ml @@ -3,7 +3,7 @@ open Core_kernel module type S = sig type hash - type elem = [`Left of hash | `Right of hash] [@@deriving sexp, equal] + type elem = [ `Left of hash | `Right of hash ] [@@deriving sexp, equal] val elem_hash : elem -> hash @@ -21,7 +21,7 @@ module Make (Hash : sig val equal : t -> t -> bool end) : S with type hash := Hash.t = struct - type elem = [`Left of Hash.t | `Right of Hash.t] [@@deriving sexp, equal] + type elem = [ `Left of Hash.t | `Right of Hash.t ] [@@deriving sexp, equal] let elem_hash = function `Left h | `Right h -> h @@ -36,7 +36,7 @@ end) : S with type hash := Hash.t = struct | `Right h -> Hash.merge ~height h acc in - (acc, height + 1) ) + (acc, height + 1)) |> fst let check_path t leaf_hash root_hash = diff --git a/src/lib/merkle_ledger/merkle_path_intf.ml b/src/lib/merkle_ledger/merkle_path_intf.ml index 6ef298a871f..4627b1ddd3d 100644 --- a/src/lib/merkle_ledger/merkle_path_intf.ml +++ b/src/lib/merkle_ledger/merkle_path_intf.ml @@ -1,7 +1,7 @@ module type S = sig type hash - type elem = [`Left of hash | `Right of hash] [@@deriving sexp, equal] + type elem = [ `Left of hash | `Right of hash ] [@@deriving sexp, equal] val elem_hash : elem -> hash diff --git a/src/lib/merkle_ledger/null_ledger.ml b/src/lib/merkle_ledger/null_ledger.ml index 42a4dc09efc..1e836a74876 100644 --- a/src/lib/merkle_ledger/null_ledger.ml +++ b/src/lib/merkle_ledger/null_ledger.ml @@ -9,22 +9,22 @@ end module Make (Inputs : Inputs_intf) : sig include Base_ledger_intf.S - with module Addr = Inputs.Location.Addr - with module Location = Inputs.Location - with type key := Inputs.Key.t - and type token_id := Inputs.Token_id.t - and type token_id_set := Inputs.Token_id.Set.t - and type account_id := Inputs.Account_id.t - and type account_id_set := Inputs.Account_id.Set.t - and type hash := Inputs.Hash.t - and type root_hash := Inputs.Hash.t - and type account := Inputs.Account.t + with module Addr = Inputs.Location.Addr + with module Location = Inputs.Location + with type key := Inputs.Key.t + and type token_id := Inputs.Token_id.t + and type token_id_set := Inputs.Token_id.Set.t + and type account_id := Inputs.Account_id.t + and type account_id_set := Inputs.Account_id.Set.t + and type hash := Inputs.Hash.t + and type root_hash := Inputs.Hash.t + and type account := Inputs.Account.t val create : depth:int -> unit -> t end = struct open Inputs - type t = {uuid: Uuid.t; depth: int} [@@deriving sexp_of] + type t = { uuid : Uuid.t; depth : int } [@@deriving sexp_of] let t_of_sexp _ = failwith "t_of_sexp unimplemented" @@ -37,7 +37,7 @@ end = struct module Addr = Location.Addr - let create ~depth () = {uuid= Uuid_unix.create (); depth} + let create ~depth () = { uuid = Uuid_unix.create (); depth } let remove_accounts_exn _t keys = if List.is_empty keys then () diff --git a/src/lib/merkle_ledger/util.ml b/src/lib/merkle_ledger/util.ml index cca9e985032..aa0fe1ee615 100644 --- a/src/lib/merkle_ledger/util.ml +++ b/src/lib/merkle_ledger/util.ml @@ -16,9 +16,9 @@ module type Inputs_intf = sig module Account : Intf.Account - with type balance := Balance.t - and type account_id := Account_id.t - and type token_id := Token_id.t + with type balance := Balance.t + and type account_id := Account_id.t + and type token_id := Token_id.t module Hash : Intf.Hash with type account := Account.t @@ -71,18 +71,18 @@ end = struct let open Inputs in let result = Location.Addr.Range.fold - (Location.Addr.Range.subtree_range - ~ledger_depth:(Inputs.ledger_depth t) address) + (Location.Addr.Range.subtree_range ~ledger_depth:(Inputs.ledger_depth t) + address) ~init:[] ~f:(fun bit_index acc -> let account = Base.get t (location_of_account_addr bit_index) in - (bit_index, account) :: acc ) + (bit_index, account) :: acc) in List.rev_filter_map result ~f:(function | _, None -> None | addr, Some account -> - Some (addr, account) ) + Some (addr, account)) let rec compute_affected_locations_and_hashes t locations_and_hashes acc = let ledger_depth = Inputs.ledger_depth t in @@ -118,7 +118,7 @@ end = struct (* This is the first child of its parent that we have encountered. *) - `One_side (location, hash) ) ) + `One_side (location, hash))) in let rev_parent_locations_and_hashes = Map.fold parents_to_children ~init:[] ~f:(fun ~key ~data acc -> @@ -138,7 +138,7 @@ end = struct (key, parent_hash) :: acc | `Hash parent_hash -> (* We have already computed the hash above. *) - (key, parent_hash) :: acc ) + (key, parent_hash) :: acc) in compute_affected_locations_and_hashes t rev_parent_locations_and_hashes (List.rev_append rev_parent_locations_and_hashes acc) @@ -162,7 +162,7 @@ end = struct let key_locations = Non_empty_list.map nonempty_addresses_and_accounts ~f:(fun (address, account) -> - (Inputs.Account.identifier account, address) ) + (Inputs.Account.identifier account, address)) in let new_last_location = let current_last_index = @@ -183,7 +183,7 @@ end = struct Account (Addr.of_int_exn ~ledger_depth max_index_in_all_accounts)) in let last_location = new_last_location in - Inputs.set_location_batch ~last_location t key_locations ) + Inputs.set_location_batch ~last_location t key_locations) (* TODO: When we do batch on a database, we should add accounts, locations and hashes simulatenously for full atomicity. *) @@ -193,12 +193,12 @@ end = struct set_hash_batch t @@ List.map locations_and_accounts ~f:(fun (location, account) -> ( Inputs.location_of_hash_addr (Inputs.Location.to_path_exn location) - , Inputs.Hash.hash_account account ) ) + , Inputs.Hash.hash_account account )) let set_batch_accounts t addresses_and_accounts = set_batch t @@ List.map addresses_and_accounts ~f:(fun (addr, account) -> - (Inputs.location_of_account_addr addr, account) ) + (Inputs.location_of_account_addr addr, account)) let set_all_accounts_rooted_at_exn t address accounts = let addresses = diff --git a/src/lib/merkle_ledger_tests/binary_tree.ml b/src/lib/merkle_ledger_tests/binary_tree.ml index c689e796797..2f45a3a41ed 100644 --- a/src/lib/merkle_ledger_tests/binary_tree.ml +++ b/src/lib/merkle_ledger_tests/binary_tree.ml @@ -4,15 +4,15 @@ module Make (Account : sig type t end) (Hash : Merkle_ledger.Intf.Hash with type account := Account.t) (Depth : sig - val depth : int + val depth : int end) = struct - type t = Node of {hash: Hash.t; left: t; right: t} | Leaf of Hash.t + type t = Node of { hash : Hash.t; left : t; right : t } | Leaf of Hash.t [@@deriving sexp] let max_depth = Depth.depth - let get_hash = function Leaf hash -> hash | Node {hash; _} -> hash + let get_hash = function Leaf hash -> hash | Node { hash; _ } -> hash let set_accounts (list : Account.t list) = let rec go (list : Hash.t list) num_nodes = @@ -32,7 +32,7 @@ struct Hash.merge ~height:left_height (get_hash left_tree) (get_hash right_tree) in - ( Node {hash; left= left_tree; right= right_tree} + ( Node { hash; left = left_tree; right = right_tree } , remaining_nodes , left_height + 1 ) in @@ -51,7 +51,7 @@ struct | Leaf hash -> ( function | [] -> hash | _ :: _ -> failwith "Could not traverse beyond a leaf" ) - | Node {hash; left; right} -> ( + | Node { hash; left; right } -> ( function | [] -> hash diff --git a/src/lib/merkle_ledger_tests/test.ml b/src/lib/merkle_ledger_tests/test.ml index fd1dfece7f6..e14f2e99d7f 100644 --- a/src/lib/merkle_ledger_tests/test.ml +++ b/src/lib/merkle_ledger_tests/test.ml @@ -49,20 +49,18 @@ let%test_module "Database integration test" = Quickcheck.test ~trials:5 ~sexp_of:[%sexp_of: Balance.t list] gen_non_zero_balances ~f:(fun balances -> let account_ids = Account_id.gen_accounts num_accounts in - let accounts = - List.map2_exn account_ids balances ~f:Account.create - in + let accounts = List.map2_exn account_ids balances ~f:Account.create in DB.with_ledger ~depth:Depth.depth ~f:(fun db -> let enumerate_dir_combinations max_depth = Sequence.range 0 (max_depth - 1) - |> Sequence.fold ~init:[[]] ~f:(fun acc _ -> + |> Sequence.fold ~init:[ [] ] ~f:(fun acc _ -> acc @ List.map acc ~f:(List.cons Direction.Left) - @ List.map acc ~f:(List.cons Direction.Right) ) + @ List.map acc ~f:(List.cons Direction.Right)) in List.iter accounts ~f:(fun account -> let account_id = Account.identifier account in - ignore @@ DB.get_or_create_account db account_id account ) ; + ignore @@ DB.get_or_create_account db account_id account) ; let binary_tree = Binary_tree.set_accounts accounts in Sequence.iter (enumerate_dir_combinations Depth.depth |> Sequence.of_list) @@ -74,5 +72,5 @@ let%test_module "Database integration test" = let binary_hash = Binary_tree.get_inner_hash_at_addr_exn binary_tree dirs in - assert (Hash.equal binary_hash db_hash) ) ) ) + assert (Hash.equal binary_hash db_hash)))) end ) diff --git a/src/lib/merkle_ledger_tests/test_database.ml b/src/lib/merkle_ledger_tests/test_database.ml index cc402178e08..283053a02a7 100644 --- a/src/lib/merkle_ledger_tests/test_database.ml +++ b/src/lib/merkle_ledger_tests/test_database.ml @@ -8,14 +8,14 @@ let%test_module "test functor on in memory databases" = module type DB = Merkle_ledger.Database_intf.S - with type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type account := Account.t - and type root_hash := Hash.t - and type hash := Hash.t + with type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type account := Account.t + and type root_hash := Hash.t + and type hash := Hash.t module type Test_intf = sig val depth : int @@ -35,8 +35,7 @@ let%test_module "test functor on in memory databases" = Test.with_instance (fun mdb -> Quickcheck.test (MT.For_tests.gen_account_location ~ledger_depth:(MT.depth mdb)) - ~f:(fun location -> assert (Option.is_none (MT.get mdb location))) - ) + ~f:(fun location -> assert (Option.is_none (MT.get mdb location)))) let create_new_account_exn mdb account = let public_key = Account.identifier account in @@ -53,7 +52,7 @@ let%test_module "test functor on in memory databases" = Test.with_instance (fun mdb -> let account = Quickcheck.random_value Account.gen in let location = create_new_account_exn mdb account in - Account.equal (Option.value_exn (MT.get mdb location)) account ) + Account.equal (Option.value_exn (MT.get mdb location)) account) let%test "accounts are atomic" = Test.with_instance (fun mdb -> @@ -70,13 +69,13 @@ let%test_module "test functor on in memory databases" = | Some acct, Some acct' -> Account.equal acct acct' | _, _ -> - false ) + false) let dedup_accounts accounts = List.dedup_and_sort accounts ~compare:(fun account1 account2 -> Account_id.compare (Account.identifier account1) - (Account.identifier account2) ) + (Account.identifier account2)) let%test_unit "length" = Test.with_instance (fun mdb -> @@ -96,12 +95,12 @@ let%test_module "test functor on in memory databases" = in let num_initial_accounts = List.length accounts in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn mdb account ) ; + ignore @@ create_new_account_exn mdb account) ; let result = MT.num_accounts mdb in - [%test_eq: int] result num_initial_accounts ) + [%test_eq: int] result num_initial_accounts) - let%test "get_or_create_acount does not update an account if key \ - already exists" = + let%test "get_or_create_acount does not update an account if key already \ + exists" = Test.with_instance (fun mdb -> let account_id = Quickcheck.random_value Account_id.gen in let balance = @@ -124,7 +123,7 @@ let%test_module "test functor on in memory databases" = && not (Mina_base.Account.equal (Option.value_exn (MT.get mdb location)) - account') ) + account')) let%test_unit "get_or_create_account t account = location_of_account \ account.key" = @@ -146,7 +145,7 @@ let%test_module "test functor on in memory databases" = let location' = MT.location_of_account mdb account_id |> Option.value_exn in - assert ([%equal: Test.Location.t] location location') ) ) + assert ([%equal: Test.Location.t] location location'))) let%test_unit "set_inner_hash_at_addr_exn(address,hash); \ get_inner_hash_at_addr_exn(address) = hash" = @@ -161,7 +160,7 @@ let%test_module "test functor on in memory databases" = let address = MT.Addr.of_directions direction in MT.set_inner_hash_at_addr_exn mdb address random_hash ; let result = MT.get_inner_hash_at_addr_exn mdb address in - assert (Hash.equal result random_hash) ) ) + assert (Hash.equal result random_hash))) let random_accounts max_height = let num_accounts = 1 lsl max_height in @@ -181,7 +180,7 @@ let%test_module "test functor on in memory databases" = | `Added -> () | `Existed -> - MT.set mdb location account ) + MT.set mdb location account) let%test_unit "If the entire database is full, let \ addresses_and_accounts = \ @@ -195,10 +194,8 @@ let%test_module "test functor on in memory databases" = ~sexp_of:[%sexp_of: Direction.t List.t] ~f:(fun directions -> let address = let offset = depth - max_height in - let padding = - List.init offset ~f:(fun _ -> Direction.Left) - in - let padded_directions = List.concat [padding; directions] in + let padding = List.init offset ~f:(fun _ -> Direction.Left) in + let padded_directions = List.concat [ padding; directions ] in MT.Addr.of_directions padded_directions in let old_merkle_root = MT.merkle_root mdb in @@ -207,7 +204,7 @@ let%test_module "test functor on in memory databases" = in MT.set_batch_accounts mdb addresses_and_accounts ; let new_merkle_root = MT.merkle_root mdb in - assert (Hash.equal old_merkle_root new_merkle_root) ) ) + assert (Hash.equal old_merkle_root new_merkle_root))) let%test_unit "set_batch_accounts would change the merkle root" = Test.with_instance (fun mdb -> @@ -218,10 +215,8 @@ let%test_module "test functor on in memory databases" = ~sexp_of:[%sexp_of: Direction.t List.t] ~f:(fun directions -> let address = let offset = depth - max_height in - let padding = - List.init offset ~f:(fun _ -> Direction.Left) - in - let padded_directions = List.concat [padding; directions] in + let padding = List.init offset ~f:(fun _ -> Direction.Left) in + let padded_directions = List.concat [ padding; directions ] in MT.Addr.of_directions padded_directions in let num_accounts = 1 lsl (depth - MT.Addr.depth address) in @@ -236,7 +231,7 @@ let%test_module "test functor on in memory databases" = @@ MT.Addr.Range.fold (MT.Addr.Range.subtree_range ~ledger_depth:depth address) ~init:[] ~f:(fun address addresses -> - address :: addresses ) + address :: addresses) in let new_addresses_and_accounts = List.zip_exn addresses accounts @@ -251,14 +246,13 @@ let%test_module "test functor on in memory databases" = @@ List.equal (fun (addr1, account1) (addr2, account2) -> MT.Addr.equal addr1 addr2 - && Account.equal account1 account2 ) + && Account.equal account1 account2) old_addresses_and_accounts new_addresses_and_accounts then ( let old_merkle_root = MT.merkle_root mdb in MT.set_batch_accounts mdb new_addresses_and_accounts ; let new_merkle_root = MT.merkle_root mdb in - assert (not @@ Hash.equal old_merkle_root new_merkle_root) ) - ) ) + assert (not @@ Hash.equal old_merkle_root new_merkle_root) ))) let%test_unit "We can retrieve accounts by their by key after using \ set_batch_accounts" = @@ -276,8 +270,7 @@ let%test_module "test functor on in memory databases" = let location = Test.Location.next prev_location |> Option.value_exn in - (location, (location |> Test.Location.to_path_exn, account)) - ) + (location, (location |> Test.Location.to_path_exn, account))) in MT.set_batch_accounts mdb accounts_with_addresses ; List.iter accounts ~f:(fun account -> @@ -285,10 +278,8 @@ let%test_module "test functor on in memory databases" = let location = MT.location_of_account mdb aid |> Option.value_exn in - let queried_account = - MT.get mdb location |> Option.value_exn - in - assert (Account.equal queried_account account) ) ; + let queried_account = MT.get mdb location |> Option.value_exn in + assert (Account.equal queried_account account)) ; let to_int = Fn.compose MT.Location.Addr.to_int MT.Location.to_path_exn in @@ -303,7 +294,7 @@ let%test_module "test functor on in memory databases" = actual_last_location ~message: (sprintf "(expected_location: %i) (actual_location: %i)" - expected_last_location actual_last_location) ) + expected_last_location actual_last_location)) let%test_unit "If the entire database is full, \ set_all_accounts_rooted_at_exn(address,accounts);get_all_accounts_rooted_at_exn(address) \ @@ -315,10 +306,8 @@ let%test_module "test functor on in memory databases" = ~sexp_of:[%sexp_of: Direction.t List.t] ~f:(fun directions -> let address = let offset = MT.depth mdb - max_height in - let padding = - List.init offset ~f:(fun _ -> Direction.Left) - in - let padded_directions = List.concat [padding; directions] in + let padding = List.init offset ~f:(fun _ -> Direction.Left) in + let padded_directions = List.concat [ padding; directions ] in MT.Addr.of_directions padded_directions in let num_accounts = @@ -334,7 +323,7 @@ let%test_module "test functor on in memory databases" = List.map ~f:snd @@ MT.get_all_accounts_rooted_at_exn mdb address in - assert (List.equal Account.equal accounts result) ) ) + assert (List.equal Account.equal accounts result))) let%test_unit "create_empty doesn't modify the hash" = Test.with_instance (fun ledger -> @@ -348,7 +337,7 @@ let%test_module "test functor on in memory databases" = failwith "create_empty with empty ledger somehow already has that key?" | `Added, _ -> - [%test_eq: Hash.t] start_hash (merkle_root ledger) ) + [%test_eq: Hash.t] start_hash (merkle_root ledger)) let%test "get_at_index_exn t (index_of_account_exn t public_key) = \ account" = @@ -356,14 +345,14 @@ let%test_module "test functor on in memory databases" = let max_height = Int.min (MT.depth mdb) 5 in let accounts = random_accounts max_height |> dedup_accounts in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn mdb account ) ; + ignore @@ create_new_account_exn mdb account) ; Sequence.of_list accounts |> Sequence.for_all ~f:(fun account -> let indexed_account = MT.index_of_account_exn mdb (Account.identifier account) |> MT.get_at_index_exn mdb in - Account.equal account indexed_account ) ) + Account.equal account indexed_account)) let test_subtree_range mdb ~f max_height = populate_db mdb max_height ; @@ -377,7 +366,7 @@ let%test_module "test functor on in memory databases" = let account = Quickcheck.random_value Account.gen in MT.set_at_index_exn mdb index account ; let result = MT.get_at_index_exn mdb index in - assert (Account.equal account result) ) ) + assert (Account.equal account result))) let%test_unit "implied_root(account) = root_hash" = Test.with_instance (fun mdb -> @@ -389,12 +378,12 @@ let%test_module "test functor on in memory databases" = let offset = List.init (depth - max_height) ~f:(fun _ -> Direction.Left) in - let padded_directions = List.concat [offset; directions] in + let padded_directions = List.concat [ offset; directions ] in let address = MT.Addr.of_directions padded_directions in let path = MT.merkle_path_at_addr_exn mdb address in let leaf_hash = MT.get_inner_hash_at_addr_exn mdb address in let root_hash = MT.merkle_root mdb in - assert (MT.Path.check_path path leaf_hash root_hash) ) ) + assert (MT.Path.check_path path leaf_hash root_hash))) let%test_unit "implied_root(index) = root_hash" = Test.with_instance (fun mdb -> @@ -407,16 +396,15 @@ let%test_module "test functor on in memory databases" = (MT.Addr.of_int_exn ~ledger_depth:depth index) in let root_hash = MT.merkle_root mdb in - assert (MT.Path.check_path path leaf_hash root_hash) ) ) + assert (MT.Path.check_path path leaf_hash root_hash))) let%test_unit "iter" = Test.with_instance (fun mdb -> let max_height = Int.min (MT.depth mdb) 5 in let accounts = random_accounts max_height |> dedup_accounts in List.iter accounts ~f:(fun account -> - ignore (create_new_account_exn mdb account : Test.Location.t) - ) ; - [%test_result: Account.t list] accounts ~expect:(MT.to_list mdb) ) + ignore (create_new_account_exn mdb account : Test.Location.t)) ; + [%test_result: Account.t list] accounts ~expect:(MT.to_list mdb)) let%test_unit "Add 2^d accounts (for testing, d is small)" = if Test.depth <= 8 then @@ -432,13 +420,13 @@ let%test_module "test functor on in memory databases" = List.map2_exn account_ids balances ~f:Account.create in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn mdb account ) ; + ignore @@ create_new_account_exn mdb account) ; let retrieved_accounts = List.map ~f:snd @@ MT.get_all_accounts_rooted_at_exn mdb (MT.Addr.root ()) in assert (List.length accounts = List.length retrieved_accounts) ; - assert (List.equal Account.equal accounts retrieved_accounts) ) + assert (List.equal Account.equal accounts retrieved_accounts)) let%test_unit "removing accounts restores Merkle root" = Test.with_instance (fun mdb -> @@ -453,14 +441,14 @@ let%test_module "test functor on in memory databases" = in let merkle_root0 = MT.merkle_root mdb in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn mdb account ) ; + ignore @@ create_new_account_exn mdb account) ; let merkle_root1 = MT.merkle_root mdb in (* adding accounts should change the Merkle root *) assert (not (Hash.equal merkle_root0 merkle_root1)) ; MT.remove_accounts_exn mdb account_ids ; (* should see original Merkle root after removing the accounts *) let merkle_root2 = MT.merkle_root mdb in - assert (Hash.equal merkle_root2 merkle_root0) ) + assert (Hash.equal merkle_root2 merkle_root0)) let%test_unit "fold over account balances" = Test.with_instance (fun mdb -> @@ -472,18 +460,18 @@ let%test_module "test functor on in memory databases" = in let total = List.fold balances ~init:0 ~f:(fun accum balance -> - Balance.to_int balance + accum ) + Balance.to_int balance + accum) in let accounts = List.map2_exn account_ids balances ~f:Account.create in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn mdb account ) ; + ignore @@ create_new_account_exn mdb account) ; let retrieved_total = MT.foldi mdb ~init:0 ~f:(fun _addr total account -> - Balance.to_int (Account.balance account) + total ) + Balance.to_int (Account.balance account) + total) in - assert (Int.equal retrieved_total total) ) + assert (Int.equal retrieved_total total)) let%test_unit "fold_until over account balances" = Test.with_instance (fun mdb -> @@ -499,13 +487,13 @@ let%test_module "test functor on in memory databases" = let some_balances = List.take balances some_num in let total = List.fold some_balances ~init:0 ~f:(fun accum balance -> - Balance.to_int balance + accum ) + Balance.to_int balance + accum) in let accounts = List.map2_exn account_ids balances ~f:Account.create in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn mdb account ) ; + ignore @@ create_new_account_exn mdb account) ; (* stop folding on last_account_id, sum of balances in accounts should be same as some_balances *) let retrieved_total = MT.fold_until mdb ~init:0 @@ -515,10 +503,10 @@ let%test_module "test functor on in memory databases" = let new_total = Balance.to_int current_balance + total in if Account_id.equal current_account_id last_account_id then Stop new_total - else Continue new_total ) + else Continue new_total) ~finish:(fun total -> total) in - assert (Int.equal retrieved_total total) ) + assert (Int.equal retrieved_total total)) end module Make_db (Depth : sig diff --git a/src/lib/merkle_ledger_tests/test_mask.ml b/src/lib/merkle_ledger_tests/test_mask.ml index fd6a30a70b0..16554d4e815 100644 --- a/src/lib/merkle_ledger_tests/test_mask.ml +++ b/src/lib/merkle_ledger_tests/test_mask.ml @@ -11,46 +11,46 @@ module type Test_intf = sig module Base : Merkle_mask.Base_merkle_tree_intf.S - with module Addr = Location.Addr - and module Location = Location - and type account := Account.t - and type root_hash := Hash.t - and type hash := Hash.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t + with module Addr = Location.Addr + and module Location = Location + and type account := Account.t + and type root_hash := Hash.t + and type hash := Hash.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t module Mask : Merkle_mask.Masking_merkle_tree_intf.S - with module Location = Location - and module Attached.Addr = Location.Addr - with type account := Account.t - and type location := Location.t - and type hash := Hash.t - and type parent := Base.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t + with module Location = Location + and module Attached.Addr = Location.Addr + with type account := Account.t + and type location := Location.t + and type hash := Hash.t + and type parent := Base.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t module Maskable : Merkle_mask.Maskable_merkle_tree_intf.S - with module Location = Location - and module Addr = Location.Addr - with type account := Account.t - and type root_hash := Hash.t - and type hash := Hash.t - and type unattached_mask := Mask.t - and type attached_mask := Mask.Attached.t - and type t := Base.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t + with module Location = Location + and module Addr = Location.Addr + with type account := Account.t + and type root_hash := Hash.t + and type hash := Hash.t + and type unattached_mask := Mask.t + and type attached_mask := Mask.Attached.t + and type t := Base.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t val with_instances : (Base.t -> Mask.t -> 'a) -> 'a @@ -133,7 +133,7 @@ module Make (Test : Test_intf) = struct && let maskable_account = Option.value_exn maskable_result in let mask_account = Option.value_exn mask_result in - Account.equal maskable_account mask_account ) + Account.equal maskable_account mask_account) let compare_maskable_mask_hashes ?(check_hash_in_mask = false) maskable mask addr = @@ -166,7 +166,7 @@ module Make (Test : Test_intf) = struct && let maskable_account = Option.value_exn maskable_result in let mask_account = Option.value_exn mask_result in - Account.equal maskable_account mask_account ) + Account.equal maskable_account mask_account) let%test "parent, mask agree on hashes; set in both mask and parent" = Test.with_instances (fun maskable mask -> @@ -176,7 +176,7 @@ module Make (Test : Test_intf) = struct Mask.Attached.set attached_mask dummy_location dummy_account ; (* verify all hashes to root are same in mask and parent *) compare_maskable_mask_hashes ~check_hash_in_mask:true maskable - attached_mask dummy_address ) + attached_mask dummy_address) let%test "parent, mask agree on hashes; set only in parent" = Test.with_instances (fun maskable mask -> @@ -184,7 +184,7 @@ module Make (Test : Test_intf) = struct (* set only in parent *) Maskable.set maskable dummy_location dummy_account ; (* verify all hashes to root are same in mask and parent *) - compare_maskable_mask_hashes maskable attached_mask dummy_address ) + compare_maskable_mask_hashes maskable attached_mask dummy_address) let%test "mask delegates to parent" = Test.with_instances (fun maskable mask -> @@ -195,7 +195,7 @@ module Make (Test : Test_intf) = struct Option.is_some mask_result && let mask_account = Option.value_exn mask_result in - Account.equal dummy_account mask_account ) + Account.equal dummy_account mask_account) let%test "mask prune after parent notification" = Test.with_instances (fun maskable mask -> @@ -212,7 +212,7 @@ module Make (Test : Test_intf) = struct not (Mask.Attached.For_testing.location_in_mask attached_mask dummy_location) ) - else false ) + else false) let%test "commit puts mask contents in parent, flushes mask" = Test.with_instances (fun maskable mask -> @@ -230,25 +230,22 @@ module Make (Test : Test_intf) = struct (Mask.Attached.For_testing.location_in_mask attached_mask dummy_location)) && Option.is_some (Maskable.get maskable dummy_location) ) - else false ) + else false) let%test_unit "commit at layer2, dumps to layer1, not in base" = Test.with_chain (fun base ~mask:level1 ~mask_as_base:_ ~mask2:level2 -> Mask.Attached.set level2 dummy_location dummy_account ; (* verify account is in the layer2 mask *) - assert ( - Mask.Attached.For_testing.location_in_mask level2 dummy_location ) ; + assert (Mask.Attached.For_testing.location_in_mask level2 dummy_location) ; Mask.Attached.commit level2 ; (* account is no longer in layer2 *) assert ( - not - (Mask.Attached.For_testing.location_in_mask level2 dummy_location) + not (Mask.Attached.For_testing.location_in_mask level2 dummy_location) ) ; (* account is still not in base *) assert (Option.is_none @@ Maskable.get base dummy_location) ; (* account is present in layer1 *) - assert ( - Mask.Attached.For_testing.location_in_mask level1 dummy_location ) ) + assert (Mask.Attached.For_testing.location_in_mask level1 dummy_location)) let%test "register and unregister mask" = Test.with_instances (fun maskable mask -> @@ -260,7 +257,7 @@ module Make (Test : Test_intf) = struct Maskable.unregister_mask_exn ~loc:__LOC__ attached_mask in true - with Failure _ -> false ) + with Failure _ -> false) let%test_unit "root hash invariant if interior changes but not accounts" = if Test.depth <= 8 then @@ -277,10 +274,10 @@ module Make (Test : Test_intf) = struct let balances = gen_values Balance.gen in let accounts = List.map2_exn account_ids balances ~f:(fun public_key balance -> - Account.create public_key balance ) + Account.create public_key balance) in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn attached_mask account ) ; + ignore @@ create_new_account_exn attached_mask account) ; (* Set some inner hashes *) let reset_hash_of_parent_of_index i = let a1 = List.nth_exn accounts i in @@ -300,7 +297,7 @@ module Make (Test : Test_intf) = struct reset_hash_of_parent_of_index 0 ; reset_hash_of_parent_of_index 3 ; let root_hash' = Mask.Attached.merkle_root attached_mask in - assert (Hash.equal root_hash root_hash') ) + assert (Hash.equal root_hash root_hash')) let%test "mask and parent agree on Merkle path" = Test.with_instances (fun maskable mask -> @@ -309,7 +306,7 @@ module Make (Test : Test_intf) = struct (* set affects hashes along the path P from location to the root, while the Merkle path for the location contains the siblings of P elements; to observe a hash in the Merkle path changed by the set, choose an address that is a sibling of an element in P; the Merkle path for that address will include a P element - *) + *) let address = dummy_address |> Maskable.Addr.parent_exn |> Maskable.Addr.sibling in @@ -320,14 +317,14 @@ module Make (Test : Test_intf) = struct let maskable_merkle_path = Maskable.merkle_path_at_addr_exn maskable address in - [%equal: Mask.Attached.Path.t] mask_merkle_path maskable_merkle_path ) + [%equal: Mask.Attached.Path.t] mask_merkle_path maskable_merkle_path) let%test "mask and parent agree on Merkle root before set" = Test.with_instances (fun maskable mask -> let attached_mask = Maskable.register_mask maskable mask in let mask_merkle_root = Mask.Attached.merkle_root attached_mask in let maskable_merkle_root = Maskable.merkle_root maskable in - Hash.equal mask_merkle_root maskable_merkle_root ) + Hash.equal mask_merkle_root maskable_merkle_root) let%test "mask and parent agree on Merkle root after set" = Test.with_instances (fun maskable mask -> @@ -335,7 +332,7 @@ module Make (Test : Test_intf) = struct (* the order of sets matters here; if we set in the mask first, the set in the maskable notifies the mask, which then removes the account, changing the Merkle root to what it was before the set - *) + *) Maskable.set maskable dummy_location dummy_account ; Mask.Attached.set attached_mask dummy_location dummy_account ; let mask_merkle_root = Mask.Attached.merkle_root attached_mask in @@ -343,7 +340,7 @@ module Make (Test : Test_intf) = struct (* verify root address in mask *) Mask.Attached.For_testing.address_in_mask attached_mask (Mask.Addr.root ()) - && Hash.equal mask_merkle_root maskable_merkle_root ) + && Hash.equal mask_merkle_root maskable_merkle_root) let%test_unit "add and retrieve a block of accounts" = (* see similar test in test_database *) @@ -359,17 +356,17 @@ module Make (Test : Test_intf) = struct let balances = gen_values Balance.gen in let accounts = List.map2_exn account_ids balances ~f:(fun public_key balance -> - Account.create public_key balance ) + Account.create public_key balance) in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn attached_mask account ) ; + ignore @@ create_new_account_exn attached_mask account) ; let retrieved_accounts = List.map ~f:snd @@ Mask.Attached.get_all_accounts_rooted_at_exn attached_mask (Mask.Addr.root ()) in assert (List.length accounts = List.length retrieved_accounts) ; - assert (List.equal Account.equal accounts retrieved_accounts) ) + assert (List.equal Account.equal accounts retrieved_accounts)) let%test_unit "get_all_accounts should preserve the ordering of accounts by \ location with noncontiguous updates of accounts on the mask" = @@ -385,17 +382,17 @@ module Make (Test : Test_intf) = struct let balances = gen_values Balance.gen num_accounts in let base_accounts = List.map2_exn account_ids balances ~f:(fun public_key balance -> - Account.create public_key balance ) + Account.create public_key balance) in List.iter base_accounts ~f:(fun account -> - ignore @@ create_new_account_exn mask1 account ) ; + ignore @@ create_new_account_exn mask1 account) ; let num_subset = Quickcheck.random_value (Int.gen_incl 3 num_accounts) in let subset_indices, subset_accounts = List.permute - (List.mapi base_accounts ~f:(fun index account -> (index, account) - )) + (List.mapi base_accounts ~f:(fun index account -> + (index, account))) |> (Fn.flip List.take) num_subset |> List.unzip in @@ -403,11 +400,11 @@ module Make (Test : Test_intf) = struct let subset_updated_accounts = List.map2_exn subset_accounts subset_balances ~f:(fun account balance -> - let updated_account = {account with balance} in + let updated_account = { account with balance } in ignore ( create_existing_account_exn mask2 updated_account : Test.Location.t ) ; - updated_account ) + updated_account) in let updated_accounts_map = Int.Map.of_alist_exn @@ -417,7 +414,7 @@ module Make (Test : Test_intf) = struct List.mapi base_accounts ~f:(fun index base_account -> Option.value (Map.find updated_accounts_map index) - ~default:base_account ) + ~default:base_account) in let retrieved_accounts = List.map ~f:snd @@ -428,8 +425,7 @@ module Make (Test : Test_intf) = struct Int.equal (List.length base_accounts) (List.length retrieved_accounts) ) ; - assert (List.equal Account.equal expected_accounts retrieved_accounts) - ) + assert (List.equal Account.equal expected_accounts retrieved_accounts)) let%test_unit "removing accounts from mask restores Merkle root" = Test.with_instances (fun maskable mask -> @@ -443,14 +439,14 @@ module Make (Test : Test_intf) = struct let accounts = List.map2_exn account_ids balances ~f:Account.create in let merkle_root0 = Mask.Attached.merkle_root attached_mask in List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn attached_mask account ) ; + ignore @@ create_new_account_exn attached_mask account) ; let merkle_root1 = Mask.Attached.merkle_root attached_mask in (* adding accounts should change the Merkle root *) assert (not (Hash.equal merkle_root0 merkle_root1)) ; Mask.Attached.remove_accounts_exn attached_mask account_ids ; (* should see original Merkle root after removing the accounts *) let merkle_root2 = Mask.Attached.merkle_root attached_mask in - assert (Hash.equal merkle_root2 merkle_root0) ) + assert (Hash.equal merkle_root2 merkle_root0)) let%test_unit "removing accounts from parent restores Merkle root" = Test.with_instances (fun maskable mask -> @@ -465,7 +461,7 @@ module Make (Test : Test_intf) = struct let merkle_root0 = Mask.Attached.merkle_root attached_mask in (* add accounts to parent *) List.iter accounts ~f:(fun account -> - ignore @@ parent_create_new_account_exn maskable account ) ; + ignore @@ parent_create_new_account_exn maskable account) ; (* observe Merkle root in mask *) let merkle_root1 = Mask.Attached.merkle_root attached_mask in (* adding accounts should change the Merkle root *) @@ -473,7 +469,7 @@ module Make (Test : Test_intf) = struct Mask.Attached.remove_accounts_exn attached_mask account_ids ; (* should see original Merkle root after removing the accounts *) let merkle_root2 = Mask.Attached.merkle_root attached_mask in - assert (Hash.equal merkle_root2 merkle_root0) ) + assert (Hash.equal merkle_root2 merkle_root0)) let%test_unit "removing accounts from parent and mask restores Merkle root" = Test.with_instances (fun maskable mask -> @@ -493,10 +489,10 @@ module Make (Test : Test_intf) = struct let merkle_root0 = Mask.Attached.merkle_root attached_mask in (* add accounts to parent *) List.iter parent_accounts ~f:(fun account -> - ignore @@ parent_create_new_account_exn maskable account ) ; + ignore @@ parent_create_new_account_exn maskable account) ; (* add accounts to mask *) List.iter mask_accounts ~f:(fun account -> - ignore @@ create_new_account_exn attached_mask account ) ; + ignore @@ create_new_account_exn attached_mask account) ; (* observe Merkle root in mask *) let merkle_root1 = Mask.Attached.merkle_root attached_mask in (* adding accounts should change the Merkle root *) @@ -505,7 +501,7 @@ module Make (Test : Test_intf) = struct Mask.Attached.remove_accounts_exn attached_mask account_ids ; (* should see original Merkle root after removing the accounts *) let merkle_root2 = Mask.Attached.merkle_root attached_mask in - assert (Hash.equal merkle_root2 merkle_root0) ) + assert (Hash.equal merkle_root2 merkle_root0)) let%test_unit "fold of addition over account balances in parent and mask" = Test.with_instances (fun maskable mask -> @@ -521,24 +517,24 @@ module Make (Test : Test_intf) = struct let accounts = List.map2_exn account_ids balances ~f:Account.create in let total = List.fold balances ~init:0 ~f:(fun accum balance -> - Balance.to_int balance + accum ) + Balance.to_int balance + accum) in let parent_accounts, mask_accounts = List.split_n accounts num_accounts_parent in (* add accounts to parent *) List.iter parent_accounts ~f:(fun account -> - ignore @@ parent_create_new_account_exn maskable account ) ; + ignore @@ parent_create_new_account_exn maskable account) ; (* add accounts to mask *) List.iter mask_accounts ~f:(fun account -> - ignore @@ create_new_account_exn attached_mask account ) ; + ignore @@ create_new_account_exn attached_mask account) ; (* folding over mask also folds over maskable *) let retrieved_total = Mask.Attached.foldi attached_mask ~init:0 ~f:(fun _addr total account -> - Balance.to_int (Account.balance account) + total ) + Balance.to_int (Account.balance account) + total) in - assert (Int.equal retrieved_total total) ) + assert (Int.equal retrieved_total total)) let%test_unit "masking in to_list" = Test.with_instances (fun maskable mask -> @@ -554,7 +550,7 @@ module Make (Test : Test_intf) = struct in (* add accounts to parent *) List.iter parent_accounts ~f:(fun account -> - ignore @@ parent_create_new_account_exn maskable account ) ; + ignore @@ parent_create_new_account_exn maskable account) ; (* all accounts in parent to_list *) let parent_list = Maskable.to_list maskable in let zero_balance account = @@ -563,7 +559,7 @@ module Make (Test : Test_intf) = struct (* put same accounts in mask, but with zero balance *) let mask_accounts = List.map parent_accounts ~f:zero_balance in List.iter mask_accounts ~f:(fun account -> - ignore @@ create_existing_account_exn attached_mask account ) ; + ignore @@ create_existing_account_exn attached_mask account) ; let mask_list = Mask.Attached.to_list attached_mask in (* same number of accounts after adding them to mask *) assert (Int.equal (List.length parent_list) (List.length mask_list)) ; @@ -573,12 +569,12 @@ module Make (Test : Test_intf) = struct ~f:(fun parent_account mask_account -> Account_id.equal (Account.identifier parent_account) - (Account.identifier mask_account) ) + (Account.identifier mask_account)) in assert is_in_same_order ; assert ( List.for_all mask_list ~f:(fun account -> - Balance.equal (Account.balance account) Balance.zero ) ) ) + Balance.equal (Account.balance account) Balance.zero) )) let%test_unit "masking in foldi" = Test.with_instances (fun maskable mask -> @@ -594,7 +590,7 @@ module Make (Test : Test_intf) = struct in (* add accounts to parent *) List.iter parent_accounts ~f:(fun account -> - ignore @@ parent_create_new_account_exn maskable account ) ; + ignore @@ parent_create_new_account_exn maskable account) ; let balance_summer _addr accum acct = accum + Balance.to_int (Account.balance acct) in @@ -607,12 +603,12 @@ module Make (Test : Test_intf) = struct (* put same accounts in mask, but with zero balance *) let mask_accounts = List.map parent_accounts ~f:zero_balance in List.iter mask_accounts ~f:(fun account -> - ignore @@ create_existing_account_exn attached_mask account ) ; + ignore @@ create_existing_account_exn attached_mask account) ; let mask_sum = Mask.Attached.foldi attached_mask ~init:0 ~f:balance_summer in (* sum should not include any parent balances *) - assert (Int.equal mask_sum 0) ) + assert (Int.equal mask_sum 0)) let%test_unit "create_empty doesn't modify the hash" = Test.with_instances (fun maskable mask -> @@ -627,7 +623,7 @@ module Make (Test : Test_intf) = struct failwith "create_empty with empty ledger somehow already has that key?" | `Added, _new_loc -> - [%test_eq: Hash.t] start_hash (merkle_root ledger) ) + [%test_eq: Hash.t] start_hash (merkle_root ledger)) let%test_unit "reuse of locations for removed accounts" = Test.with_instances (fun maskable mask -> @@ -644,7 +640,7 @@ module Make (Test : Test_intf) = struct (Mask.Attached.For_testing.current_location attached_mask) ) ; (* add accounts to mask *) List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn attached_mask account ) ; + ignore @@ create_new_account_exn attached_mask account) ; assert ( Option.is_some (Mask.Attached.For_testing.current_location attached_mask) ) ; @@ -652,7 +648,7 @@ module Make (Test : Test_intf) = struct Mask.Attached.remove_accounts_exn attached_mask account_ids ; assert ( Option.is_none - (Mask.Attached.For_testing.current_location attached_mask) ) ) + (Mask.Attached.For_testing.current_location attached_mask) )) let%test_unit "num_accounts for unique keys in mask and parent" = Test.with_instances (fun maskable mask -> @@ -666,13 +662,13 @@ module Make (Test : Test_intf) = struct let accounts = List.map2_exn account_ids balances ~f:Account.create in (* add accounts to mask *) List.iter accounts ~f:(fun account -> - ignore @@ create_new_account_exn attached_mask account ) ; + ignore @@ create_new_account_exn attached_mask account) ; let mask_num_accounts_before = Mask.Attached.num_accounts attached_mask in (* add same accounts to parent *) List.iter accounts ~f:(fun account -> - ignore @@ parent_create_new_account_exn maskable account ) ; + ignore @@ parent_create_new_account_exn maskable account) ; let parent_num_accounts = Maskable.num_accounts maskable in (* should not change number of accounts in mask, since they have the same keys *) let mask_num_accounts_after = @@ -682,7 +678,7 @@ module Make (Test : Test_intf) = struct assert ( Int.equal parent_num_accounts (List.length accounts) && Int.equal parent_num_accounts mask_num_accounts_before - && Int.equal parent_num_accounts mask_num_accounts_after ) ) + && Int.equal parent_num_accounts mask_num_accounts_after )) let%test_unit "Mask reparenting works" = Test.with_chain (fun base ~mask:m1 ~mask_as_base ~mask2:m2 -> @@ -694,16 +690,16 @@ module Make (Test : Test_intf) = struct in let accounts = List.map2_exn account_ids balances ~f:Account.create in match accounts with - | [a1; a2; a3] -> + | [ a1; a2; a3 ] -> let loc1 = parent_create_new_account_exn base a1 in let loc2 = create_new_account_exn m1 a2 in let loc3 = create_new_account_exn m2 a3 in - let locs = [(loc1, a1); (loc2, a2); (loc3, a3)] in + let locs = [ (loc1, a1); (loc2, a2); (loc3, a3) ] in (* all accounts are here *) List.iter locs ~f:(fun (loc, a) -> [%test_result: Account.t option] ~message:"All accounts are accessible from m2" - ~expect:(Some a) (Mask.Attached.get m2 loc) ) ; + ~expect:(Some a) (Mask.Attached.get m2 loc)) ; [%test_result: Account.t option] ~message:"a1 is in base" ~expect:(Some a1) (Test.Base.get base loc1) ; Mask.Attached.commit m1 ; @@ -718,9 +714,9 @@ module Make (Test : Test_intf) = struct List.iter locs ~f:(fun (loc, a) -> [%test_result: Account.t option] ~message:"All accounts are accessible from m2" - ~expect:(Some a) (Mask.Attached.get m2 loc) ) + ~expect:(Some a) (Mask.Attached.get m2 loc)) | _ -> - failwith "unexpected" ) + failwith "unexpected") let%test_unit "setting an account in the parent doesn't remove the masked \ copy if the mask is still dirty for that account" = @@ -734,9 +730,9 @@ module Make (Test : Test_intf) = struct in let acct2 = Account.create k (Balance.of_int 5) in Maskable.set maskable loc acct2 ; - [%test_result: Account.t] - ~message:"account in mask should be unchanged" ~expect:acct1 - (Mask.Attached.get attached_mask loc |> Option.value_exn) ) + [%test_result: Account.t] ~message:"account in mask should be unchanged" + ~expect:acct1 + (Mask.Attached.get attached_mask loc |> Option.value_exn)) end module type Depth_S = sig @@ -763,8 +759,7 @@ module Make_maskable_and_mask_with_depth (Depth : Depth_S) = struct | Hash of Location.Addr.t [@@deriving hash, sexp, compare] - include Hashable.Make_binable (Arg) [@@deriving - sexp, compare, hash, yojson] + include Hashable.Make_binable (Arg) [@@deriving sexp, compare, hash, yojson] end module Inputs = struct @@ -778,16 +773,16 @@ module Make_maskable_and_mask_with_depth (Depth : Depth_S) = struct (* underlying Merkle tree *) module Base_db : Merkle_ledger.Database_intf.S - with module Location = Location - and module Addr = Location.Addr - and type account := Account.t - and type root_hash := Hash.t - and type hash := Hash.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t = + with module Location = Location + and module Addr = Location.Addr + and type account := Account.t + and type root_hash := Hash.t + and type hash := Hash.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t = Database.Make (Inputs) module Any_base = Merkle_ledger.Any_ledger.Make_base (Inputs) @@ -796,17 +791,17 @@ module Make_maskable_and_mask_with_depth (Depth : Depth_S) = struct (* the mask tree *) module Mask : Merkle_mask.Masking_merkle_tree_intf.S - with module Location = Location - and module Attached.Addr = Location.Addr - with type account := Account.t - and type location := Location.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Hash.t - and type parent := Base.t = Merkle_mask.Masking_merkle_tree.Make (struct + with module Location = Location + and module Attached.Addr = Location.Addr + with type account := Account.t + and type location := Location.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Hash.t + and type parent := Base.t = Merkle_mask.Masking_merkle_tree.Make (struct include Inputs module Base = Base end) @@ -814,19 +809,19 @@ module Make_maskable_and_mask_with_depth (Depth : Depth_S) = struct (* tree that can register masks *) module Maskable : Merkle_mask.Maskable_merkle_tree_intf.S - with module Addr = Location.Addr - and module Location = Location - with type account := Account.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type root_hash := Hash.t - and type hash := Hash.t - and type unattached_mask := Mask.t - and type attached_mask := Mask.Attached.t - and type t := Base.t = Merkle_mask.Maskable_merkle_tree.Make (struct + with module Addr = Location.Addr + and module Location = Location + with type account := Account.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type root_hash := Hash.t + and type hash := Hash.t + and type unattached_mask := Mask.t + and type attached_mask := Mask.Attached.t + and type t := Base.t = Merkle_mask.Maskable_merkle_tree.Make (struct include Inputs module Base = Base module Mask = Mask @@ -852,7 +847,7 @@ module Make_maskable_and_mask_with_depth (Depth : Depth_S) = struct let mask2 = Mask.create ~depth:Depth.depth () in let attached2 = Maskable.register_mask attached1_as_base mask2 in f maskable ~mask:attached1 ~mask_as_base:attached1_as_base - ~mask2:attached2 ) + ~mask2:attached2) end module Make_maskable_and_mask (Depth : Depth_S) = diff --git a/src/lib/merkle_ledger_tests/test_stubs.ml b/src/lib/merkle_ledger_tests/test_stubs.ml index f30a6e572da..4a42100efda 100644 --- a/src/lib/merkle_ledger_tests/test_stubs.ml +++ b/src/lib/merkle_ledger_tests/test_stubs.ml @@ -22,13 +22,13 @@ module Account = struct let create = Mina_base.Account.create - let balance Mina_base.Account.Poly.{balance; _} = balance + let balance Mina_base.Account.Poly.{ balance; _ } = balance - let update_balance t bal = {t with Mina_base.Account.Poly.balance= bal} + let update_balance t bal = { t with Mina_base.Account.Poly.balance = bal } - let token Mina_base.Account.Poly.{token_id; _} = token_id + let token Mina_base.Account.Poly.{ token_id; _ } = token_id - let token_owner Mina_base.Account.Poly.{token_permissions; _} = + let token_owner Mina_base.Account.Poly.{ token_permissions; _ } = match token_permissions with | Mina_base.Token_permissions.Token_owned _ -> true @@ -81,8 +81,8 @@ struct include Bigstring.Stable.V1 (* we're not mutating Bigstrings, which would invalidate hashes - OK to use these hash functions - *) + OK to use these hash functions + *) let hash = hash_t_frozen let hash_fold_t = hash_fold_t_frozen @@ -93,7 +93,9 @@ struct end type t = - {uuid: Uuid.Stable.V1.t; table: Bigstring_frozen.t Bigstring_frozen.Table.t} + { uuid : Uuid.Stable.V1.t + ; table : Bigstring_frozen.t Bigstring_frozen.Table.t + } [@@deriving sexp] let to_alist t = @@ -106,13 +108,14 @@ struct let get_uuid t = t.uuid let create _ = - {uuid= Uuid_unix.create (); table= Bigstring_frozen.Table.create ()} + { uuid = Uuid_unix.create (); table = Bigstring_frozen.Table.create () } let create_checkpoint t _ = - { uuid= Uuid_unix.create () - ; table= + { uuid = Uuid_unix.create () + ; table = Bigstring_frozen.Table.of_alist_exn - @@ Bigstring_frozen.Table.to_alist t.table } + @@ Bigstring_frozen.Table.to_alist t.table + } let close _ = () @@ -123,7 +126,7 @@ struct let set_batch t ?(remove_keys = []) ~key_data_pairs = List.iter key_data_pairs ~f:(fun (key, data) -> set t ~key ~data) ; List.iter remove_keys ~f:(fun key -> - Bigstring_frozen.Table.remove t.table key ) + Bigstring_frozen.Table.remove t.table key) let remove t ~key = Bigstring_frozen.Table.remove t.table key end @@ -151,8 +154,7 @@ module Key = struct let empty : t = Account.empty.public_key let gen_keys num_keys = - Quickcheck.random_value - (Quickcheck.Generator.list_with_length num_keys gen) + Quickcheck.random_value (Quickcheck.Generator.list_with_length num_keys gen) include Hashable.Make_binable (Stable.Latest) include Comparable.Make (Stable.Latest) diff --git a/src/lib/merkle_list_prover/merkle_list_prover.ml b/src/lib/merkle_list_prover/merkle_list_prover.ml index 4305cd6870f..6acf7ed7608 100644 --- a/src/lib/merkle_list_prover/merkle_list_prover.ml +++ b/src/lib/merkle_list_prover/merkle_list_prover.ml @@ -14,8 +14,7 @@ module type Inputs_intf = sig val get_previous : context:context -> value -> value option M.t end -module Make_intf (M : Monad.S) (Input : Inputs_intf with module M := M) = -struct +module Make_intf (M : Monad.S) (Input : Inputs_intf with module M := M) = struct module type S = sig val prove : ?length:int diff --git a/src/lib/merkle_list_verifier/merkle_list_verifier.ml b/src/lib/merkle_list_verifier/merkle_list_verifier.ml index e9e689f3ec2..d7f2b3c2320 100644 --- a/src/lib/merkle_list_verifier/merkle_list_verifier.ml +++ b/src/lib/merkle_list_verifier/merkle_list_verifier.ml @@ -25,8 +25,7 @@ module Make (Input : Inputs_intf) : Make_intf(Input).S = struct let hashes = List.fold merkle_list ~init:(Non_empty_list.singleton init) ~f:(fun acc proof_elem -> - Non_empty_list.cons (hash (Non_empty_list.head acc) proof_elem) acc - ) + Non_empty_list.cons (hash (Non_empty_list.head acc) proof_elem) acc) in if equal_hash target_hash (Non_empty_list.head hashes) then Some hashes else None diff --git a/src/lib/merkle_mask/inputs_intf.ml b/src/lib/merkle_mask/inputs_intf.ml index 45bca0e54ff..761b5768ddb 100644 --- a/src/lib/merkle_mask/inputs_intf.ml +++ b/src/lib/merkle_mask/inputs_intf.ml @@ -5,16 +5,16 @@ module type S = sig module Account_id : Merkle_ledger.Intf.Account_id - with type key := Key.t - and type token_id := Token_id.t + with type key := Key.t + and type token_id := Token_id.t module Balance : Merkle_ledger.Intf.Balance module Account : Merkle_ledger.Intf.Account - with type token_id := Token_id.t - and type account_id := Account_id.t - and type balance := Balance.t + with type token_id := Token_id.t + and type account_id := Account_id.t + and type balance := Balance.t module Hash : Merkle_ledger.Intf.Hash with type account := Account.t @@ -25,14 +25,14 @@ module type S = sig module Base : Base_merkle_tree_intf.S - with module Addr = Location.Addr - and module Location = Location - and type account := Account.t - and type root_hash := Hash.t - and type hash := Hash.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t + with module Addr = Location.Addr + and module Location = Location + and type account := Account.t + and type root_hash := Hash.t + and type hash := Hash.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t end diff --git a/src/lib/merkle_mask/maskable_merkle_tree.ml b/src/lib/merkle_mask/maskable_merkle_tree.ml index 9a8375a37ad..040e879dce8 100644 --- a/src/lib/merkle_mask/maskable_merkle_tree.ml +++ b/src/lib/merkle_mask/maskable_merkle_tree.ml @@ -7,16 +7,16 @@ module type Inputs_intf = sig module Mask : Masking_merkle_tree_intf.S - with module Location = Location - and type account := Account.t - and type location := Location.t - and type hash := Hash.t - and type key := Key.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type parent := Base.t + with module Location = Location + and type account := Account.t + and type location := Location.t + and type hash := Hash.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type parent := Base.t val mask_to_base : Mask.Attached.t -> Base.t end @@ -32,14 +32,13 @@ module Make (Inputs : Inputs_intf) = struct module Node = struct type t = Mask.Attached.t - type attached = - {hash: string; uuid: string; total_currency: int; num_accounts: int} + { hash : string; uuid : string; total_currency : int; num_accounts : int } [@@deriving yojson] - type dangling = {uuid: string; nulled_at: string} [@@deriving yojson] + type dangling = { uuid : string; nulled_at : string } [@@deriving yojson] - type display = [`Attached of attached | `Dangling_parent of dangling] + type display = [ `Attached of attached | `Dangling_parent of dangling ] [@@deriving yojson] let format_uuid mask = @@ -56,18 +55,20 @@ module Make (Inputs : Inputs_intf) = struct (* only default token matters for total currency *) if Token_id.equal (Account.token account) Token_id.default then total_currency + (Balance.to_int @@ Account.balance account) - else total_currency ) + else total_currency) in let uuid = format_uuid mask in - { hash= Visualization.display_prefix_of_string @@ Hash.to_string root_hash + { hash = + Visualization.display_prefix_of_string @@ Hash.to_string root_hash ; num_accounts ; total_currency - ; uuid } + ; uuid + } let display mask = try `Attached (display_attached_mask mask) with Mask.Attached.Dangling_parent_reference (_, nulled_at) -> - `Dangling_parent {uuid= format_uuid mask; nulled_at} + `Dangling_parent { uuid = format_uuid mask; nulled_at } let equal mask1 mask2 = let open Mask.Attached in @@ -96,18 +97,18 @@ module Make (Inputs : Inputs_intf) = struct |> Option.value_map ~default:graph_with_mask ~f:(fun children_masks -> List.fold ~init:graph_with_mask children_masks ~f:(fun graph_with_mask_and_child -> - add_edge graph_with_mask_and_child mask ) ) ) + add_edge graph_with_mask_and_child mask))) module Debug = struct let visualize ~filename = Out_channel.with_file filename ~f:(fun output_channel -> let graph = to_graph () in - Graphviz.output_graph output_channel graph ) + Graphviz.output_graph output_channel graph) end module Visualize = struct module Summary = struct - type t = [`Uuid of Uuid.t] * [`Hash of Hash.t] [@@deriving sexp_of] + type t = [ `Uuid of Uuid.t ] * [ `Hash of Hash.t ] [@@deriving sexp_of] end type t = Leaf of Summary.t | Node of Summary.t * t list @@ -147,7 +148,7 @@ module Make (Inputs : Inputs_intf) = struct ~message: "We've already registered a mask with this UUID; you have a bug" ~expect:false - (Uuid.equal (Mask.Attached.get_uuid m) (Mask.get_uuid mask)) ) ) ; + (Uuid.equal (Mask.Attached.get_uuid m) (Mask.get_uuid mask)))) ; (* handles cases where no entries for t, or where there are existing entries *) Uuid.Table.add_multi registered_masks ~key:(get_uuid t) ~data:attached_mask ; attached_mask @@ -170,15 +171,15 @@ module Make (Inputs : Inputs_intf) = struct in ( match grandchildren with | `Check -> ( - match Hashtbl.find registered_masks (Mask.Attached.get_uuid mask) with - | Some children -> - failwith @@ error_msg - @@ sprintf - !"mask has children that must be unregistered first: %{sexp: \ - Uuid.t list}" - (List.map ~f:Mask.Attached.get_uuid children) - | None -> - () ) + match Hashtbl.find registered_masks (Mask.Attached.get_uuid mask) with + | Some children -> + failwith @@ error_msg + @@ sprintf + !"mask has children that must be unregistered first: %{sexp: \ + Uuid.t list}" + (List.map ~f:Mask.Attached.get_uuid children) + | None -> + () ) | `I_promise_I_am_reparenting_this_mask -> () | `Recursive -> @@ -190,8 +191,7 @@ module Make (Inputs : Inputs_intf) = struct |> Option.value ~default:[] ) ~f:(fun child_mask -> ignore - @@ unregister_mask_exn ~loc ~grandchildren:`Recursive child_mask ) - ) ; + @@ unregister_mask_exn ~loc ~grandchildren:`Recursive child_mask) ) ; match Uuid.Table.find registered_masks parent_uuid with | None -> failwith @@ error_msg "parent not in registered_masks" @@ -222,7 +222,7 @@ module Make (Inputs : Inputs_intf) = struct () | Some masks -> List.iter masks ~f:(fun mask -> - Mask.Attached.parent_set_notify mask account ) + Mask.Attached.parent_set_notify mask account) let remove_and_reparent_exn t t_as_mask = let parent = Mask.Attached.get_parent t_as_mask in @@ -235,11 +235,11 @@ module Make (Inputs : Inputs_intf) = struct let dangling_masks = List.map children ~f:(fun c -> unregister_mask_exn ~loc:__LOC__ - ~grandchildren:`I_promise_I_am_reparenting_this_mask c ) + ~grandchildren:`I_promise_I_am_reparenting_this_mask c) in ignore (unregister_mask_exn ~loc:__LOC__ t_as_mask : Mask.unattached) ; List.iter dangling_masks ~f:(fun m -> - ignore (register_mask parent m : Mask.Attached.t) ) + ignore (register_mask parent m : Mask.Attached.t)) let batch_notify_mask_children t accounts = match Uuid.Table.find registered_masks (get_uuid t) with @@ -248,7 +248,7 @@ module Make (Inputs : Inputs_intf) = struct | Some masks -> List.iter masks ~f:(fun mask -> List.iter accounts ~f:(fun account -> - Mask.Attached.parent_set_notify mask account ) ) + Mask.Attached.parent_set_notify mask account)) let set_batch t locations_and_accounts = Base.set_batch t locations_and_accounts ; diff --git a/src/lib/merkle_mask/maskable_merkle_tree_intf.ml b/src/lib/merkle_mask/maskable_merkle_tree_intf.ml index d5a63687fd0..b73974bbd5a 100644 --- a/src/lib/merkle_mask/maskable_merkle_tree_intf.ml +++ b/src/lib/merkle_mask/maskable_merkle_tree_intf.ml @@ -17,9 +17,8 @@ module type S = sig (** raises an exception if mask is not registered *) val unregister_mask_exn : - ?grandchildren:[ `Check - | `Recursive - | `I_promise_I_am_reparenting_this_mask ] + ?grandchildren: + [ `Check | `Recursive | `I_promise_I_am_reparenting_this_mask ] -> loc:string -> attached_mask -> unattached_mask diff --git a/src/lib/merkle_mask/masking_merkle_tree.ml b/src/lib/merkle_mask/masking_merkle_tree.ml index 0a58562e115..f2d80b65ba8 100644 --- a/src/lib/merkle_mask/masking_merkle_tree.ml +++ b/src/lib/merkle_mask/masking_merkle_tree.ml @@ -38,33 +38,35 @@ module Make (Inputs : Inputs_intf.S) = struct end type t = - { uuid: Uuid.Stable.V1.t - ; account_tbl: Account.t Location_binable.Table.t - ; token_owners: Key.Stable.Latest.t Token_id.Table.t - ; mutable next_available_token: Token_id.t option - ; mutable parent: Parent.t - ; detached_parent_signal: Detached_parent_signal.t - ; hash_tbl: Hash.t Addr.Table.t - ; location_tbl: Location.t Account_id.Table.t - ; mutable current_location: Location.t option - ; depth: int } + { uuid : Uuid.Stable.V1.t + ; account_tbl : Account.t Location_binable.Table.t + ; token_owners : Key.Stable.Latest.t Token_id.Table.t + ; mutable next_available_token : Token_id.t option + ; mutable parent : Parent.t + ; detached_parent_signal : Detached_parent_signal.t + ; hash_tbl : Hash.t Addr.Table.t + ; location_tbl : Location.t Account_id.Table.t + ; mutable current_location : Location.t option + ; depth : int + } [@@deriving sexp] type unattached = t [@@deriving sexp] let create ~depth () = - { uuid= Uuid_unix.create () - ; parent= Error __LOC__ - ; detached_parent_signal= Async.Ivar.create () - ; account_tbl= Location_binable.Table.create () - ; token_owners= Token_id.Table.create () - ; next_available_token= None - ; hash_tbl= Addr.Table.create () - ; location_tbl= Account_id.Table.create () - ; current_location= None - ; depth } - - let get_uuid {uuid; _} = uuid + { uuid = Uuid_unix.create () + ; parent = Error __LOC__ + ; detached_parent_signal = Async.Ivar.create () + ; account_tbl = Location_binable.Table.create () + ; token_owners = Token_id.Table.create () + ; next_available_token = None + ; hash_tbl = Addr.Table.create () + ; location_tbl = Account_id.Table.create () + ; current_location = None + ; depth + } + + let get_uuid { uuid; _ } = uuid let depth t = t.depth @@ -95,8 +97,8 @@ module Make (Inputs : Inputs_intf.S) = struct let create () = failwith - "Mask.Attached.create: cannot create an attached mask; use \ - Mask.create and Mask.set_parent" + "Mask.Attached.create: cannot create an attached mask; use Mask.create \ + and Mask.set_parent" let with_ledger ~f:_ = failwith @@ -119,7 +121,7 @@ module Make (Inputs : Inputs_intf.S) = struct let detached_signal t = Async.Ivar.read t.detached_parent_signal - let get_parent ({parent= opt; _} as t) = + let get_parent ({ parent = opt; _ } as t) = assert_is_attached t ; Result.ok_or_failwith opt let get_uuid t = assert_is_attached t ; t.uuid @@ -188,9 +190,7 @@ module Make (Inputs : Inputs_intf.S) = struct let curr_element = List.hd_exn path in let merkle_node_address = Addr.sibling address in let mask_hash = self_find_hash t merkle_node_address in - let parent_hash = - match curr_element with `Left h | `Right h -> h - in + let parent_hash = match curr_element with `Left h | `Right h -> h in let new_hash = Option.value mask_hash ~default:parent_hash in let new_element = match curr_element with @@ -246,7 +246,7 @@ module Make (Inputs : Inputs_intf.S) = struct (next_address, next_hash) :: accum in List.foldi merkle_path - ~init:[(starting_address, starting_hash)] + ~init:[ (starting_address, starting_hash) ] ~f:get_addresses_hashes (* use mask Merkle root, if it exists, else get from parent *) @@ -290,7 +290,7 @@ module Make (Inputs : Inputs_intf.S) = struct | Some prev_loc -> t.current_location <- Some prev_loc | None -> - t.current_location <- None ) ; + t.current_location <- None) ; (* update hashes *) let account_address = Location.to_path_exn location in let account_hash = Hash.empty_account in @@ -300,7 +300,7 @@ module Make (Inputs : Inputs_intf.S) = struct account_hash in List.iter addresses_and_hashes ~f:(fun (addr, hash) -> - self_set_hash t addr hash ) + self_set_hash t addr hash) (* a write writes only to the mask, parent is not involved need to update both account and hash pieces of the mask *) @@ -323,7 +323,7 @@ module Make (Inputs : Inputs_intf.S) = struct account_hash in List.iter addresses_and_hashes ~f:(fun (addr, hash) -> - self_set_hash t addr hash ) + self_set_hash t addr hash) (* if the mask's parent sets an account, we can prune an entry in the mask if the account in the parent is the same in the mask *) @@ -333,12 +333,12 @@ module Make (Inputs : Inputs_intf.S) = struct | None -> () | Some location -> ( - match self_find_account t location with - | Some existing_account -> - if Account.equal account existing_account then - remove_account_and_update_hashes t location - | None -> - () ) + match self_find_account t location with + | Some existing_account -> + if Account.equal account existing_account then + remove_account_and_update_hashes t location + | None -> + () ) (* as for accounts, we see if we have it in the mask, else delegate to parent *) @@ -348,10 +348,10 @@ module Make (Inputs : Inputs_intf.S) = struct | Some hash -> Some hash | None -> ( - try - let hash = Base.get_inner_hash_at_addr_exn (get_parent t) addr in - Some hash - with _ -> None ) + try + let hash = Base.get_inner_hash_at_addr_exn (get_parent t) addr in + Some hash + with _ -> None ) (* batch operations TODO: rely on availability of batch operations in Base for speed *) @@ -368,8 +368,8 @@ module Make (Inputs : Inputs_intf.S) = struct | Some account -> Some account | None -> ( - try Some (Base.get_inner_hash_at_addr_exn (get_parent t) addr) - with _ -> None ) ) + try Some (Base.get_inner_hash_at_addr_exn (get_parent t) addr) + with _ -> None )) (* transfer state from mask to parent; flush local state *) let commit t = @@ -380,7 +380,7 @@ module Make (Inputs : Inputs_intf.S) = struct Option.iter t.next_available_token ~f:(fun tid -> if Token_id.(tid > Base.next_available_token (get_parent t)) then Base.set_next_available_token (get_parent t) tid ; - t.next_available_token <- None ) ; + t.next_available_token <- None) ; Location_binable.Table.clear t.account_tbl ; Addr.Table.clear t.hash_tbl ; Debug_assert.debug_assert (fun () -> @@ -391,23 +391,23 @@ module Make (Inputs : Inputs_intf.S) = struct ~expect:old_root_hash (Base.merkle_root (get_parent t)) ; [%test_result: Hash.t] - ~message: - "Merkle root of the mask should delegate to the parent now" + ~message:"Merkle root of the mask should delegate to the parent now" ~expect:(merkle_root t) - (Base.merkle_root (get_parent t)) ) + (Base.merkle_root (get_parent t))) (* copy tables in t; use same parent *) let copy t = - { uuid= Uuid_unix.create () - ; parent= Ok (get_parent t) - ; detached_parent_signal= Async.Ivar.create () - ; account_tbl= Location_binable.Table.copy t.account_tbl - ; token_owners= Token_id.Table.copy t.token_owners - ; next_available_token= t.next_available_token - ; location_tbl= Account_id.Table.copy t.location_tbl - ; hash_tbl= Addr.Table.copy t.hash_tbl - ; current_location= t.current_location - ; depth= t.depth } + { uuid = Uuid_unix.create () + ; parent = Ok (get_parent t) + ; detached_parent_signal = Async.Ivar.create () + ; account_tbl = Location_binable.Table.copy t.account_tbl + ; token_owners = Token_id.Table.copy t.token_owners + ; next_available_token = t.next_available_token + ; location_tbl = Account_id.Table.copy t.location_tbl + ; hash_tbl = Addr.Table.copy t.hash_tbl + ; current_location = t.current_location + ; depth = t.depth + } let last_filled t = assert_is_attached t ; @@ -419,18 +419,18 @@ module Make (Inputs : Inputs_intf.S) = struct | None -> Some parent_loc | Some our_loc -> ( - match (parent_loc, our_loc) with - | Account parent_addr, Account our_addr -> - (* Addr.compare is Bitstring.compare, essentially String.compare *) - let loc = - if Addr.compare parent_addr our_addr >= 0 then parent_loc - else our_loc - in - Some loc - | _ -> - failwith - "last_filled: expected account locations for the parent and \ - mask" ) ) + match (parent_loc, our_loc) with + | Account parent_addr, Account our_addr -> + (* Addr.compare is Bitstring.compare, essentially String.compare *) + let loc = + if Addr.compare parent_addr our_addr >= 0 then parent_loc + else our_loc + in + Some loc + | _ -> + failwith + "last_filled: expected account locations for the parent \ + and mask" )) include Merkle_ledger.Util.Make (struct module Location = Location @@ -461,12 +461,12 @@ module Make (Inputs : Inputs_intf.S) = struct let set_raw_hash_batch t locations_and_hashes = List.iter locations_and_hashes ~f:(fun (location, hash) -> - self_set_hash t (Location.to_path_exn location) hash ) + self_set_hash t (Location.to_path_exn location) hash) let set_location_batch ~last_location t account_to_location_list = t.current_location <- Some last_location ; Non_empty_list.iter account_to_location_list ~f:(fun (key, data) -> - Account_id.Table.set t.location_tbl ~key ~data ) + Account_id.Table.set t.location_tbl ~key ~data) let set_raw_account_batch t locations_and_accounts = let next_available_token = next_available_token t in @@ -478,8 +478,7 @@ module Make (Inputs : Inputs_intf.S) = struct Token_id.Table.set t.token_owners ~key:account_token ~data:(Account_id.public_key (Account.identifier account)) ; self_set_account t location account ; - Token_id.max next_available_token (Token_id.next account_token) - ) + Token_id.max next_available_token (Token_id.next account_token)) in if Token_id.(next_available_token < new_next_available_token) then set_next_available_token t new_next_available_token @@ -529,7 +528,7 @@ module Make (Inputs : Inputs_intf.S) = struct |> List.filter_map ~f:(fun aid -> if Key.equal pk (Account_id.public_key aid) then Some (Account_id.token_id aid) - else None ) + else None) |> Token_id.Set.of_list in Set.union mask_tokens (Base.tokens (get_parent t) pk) @@ -564,11 +563,11 @@ module Make (Inputs : Inputs_intf.S) = struct | [] -> (parent_keys, mask_locations) | key :: rest -> ( - match self_find_location t key with - | None -> - loop rest (key :: parent_keys) mask_locations - | Some loc -> - loop rest parent_keys (loc :: mask_locations) ) + match self_find_location t key with + | None -> + loop rest (key :: parent_keys) mask_locations + | Some loc -> + loop rest parent_keys (loc :: mask_locations) ) in (* parent_keys not in mask, may be in parent mask_locations definitely in mask *) @@ -584,7 +583,7 @@ module Make (Inputs : Inputs_intf.S) = struct List.sort mask_locations ~compare:(fun loc1 loc2 -> let loc1 = Location.to_path_exn loc1 in let loc2 = Location.to_path_exn loc2 in - Location.Addr.compare loc2 loc1 ) + Location.Addr.compare loc2 loc1) in List.iter rev_sorted_mask_locations ~f:(remove_account_and_update_hashes t) @@ -624,9 +623,9 @@ module Make (Inputs : Inputs_intf.S) = struct | Account addr -> (Addr.to_int addr, get t location |> Option.value_exn) | location -> - raise (Location_is_not_account location) ) + raise (Location_is_not_account location)) |> List.sort ~compare:(fun (addr1, _) (addr2, _) -> - Int.compare addr1 addr2 ) + Int.compare addr1 addr2) |> List.map ~f:(fun (_, account) -> account) let iteri t ~f = @@ -651,15 +650,15 @@ module Make (Inputs : Inputs_intf.S) = struct %{sexp: Uuid.t} account id: %{sexp: Account_id.t}" (get_uuid t) acct_id) | Some loc -> ( - match get t loc with - | None -> - failwith - (sprintf - !"iter: get returned None, mask uuid: %{sexp: Uuid.t} \ - account id: %{sexp: Account_id.t}" - (get_uuid t) acct_id) - | Some acct -> - (idx, acct) ) ) + match get t loc with + | None -> + failwith + (sprintf + !"iter: get returned None, mask uuid: %{sexp: Uuid.t} \ + account id: %{sexp: Account_id.t}" + (get_uuid t) acct_id) + | Some acct -> + (idx, acct) )) in (* in case iteration order matters *) let idx_account_pairs = @@ -676,7 +675,7 @@ module Make (Inputs : Inputs_intf.S) = struct (* parent should ignore accounts in this mask *) let mask_accounts = List.map locations_and_accounts ~f:(fun (_loc, acct) -> - Account.identifier acct ) + Account.identifier acct) in let mask_ignored_accounts = Account_id.Set.of_list mask_accounts in let all_ignored_accounts = @@ -689,8 +688,8 @@ module Make (Inputs : Inputs_intf.S) = struct in let f' accum (location, account) = (* for mask, ignore just passed-in ignored accounts *) - if Account_id.Set.mem ignored_accounts (Account.identifier account) - then accum + if Account_id.Set.mem ignored_accounts (Account.identifier account) then + accum else let address = Location.to_path_exn location in f address accum account @@ -739,27 +738,27 @@ module Make (Inputs : Inputs_intf.S) = struct assert_is_attached t ; match self_find_location t account_id with | None -> ( - (* not in mask, maybe in parent *) - match Base.location_of_account (get_parent t) account_id with - | Some location -> - Ok (`Existed, location) - | None -> ( - (* not in parent, create new location *) - let maybe_location = - match last_filled t with + (* not in mask, maybe in parent *) + match Base.location_of_account (get_parent t) account_id with + | Some location -> + Ok (`Existed, location) + | None -> ( + (* not in parent, create new location *) + let maybe_location = + match last_filled t with + | None -> + Some (first_location ~ledger_depth:t.depth) + | Some loc -> + Location.next loc + in + match maybe_location with | None -> - Some (first_location ~ledger_depth:t.depth) - | Some loc -> - Location.next loc - in - match maybe_location with - | None -> - Or_error.error_string "Db_error.Out_of_leaves" - | Some location -> - set t location account ; - self_set_location t account_id location ; - t.current_location <- Some location ; - Ok (`Added, location) ) ) + Or_error.error_string "Db_error.Out_of_leaves" + | Some location -> + set t location account ; + self_set_location t account_id location ; + t.current_location <- Some location ; + Ok (`Added, location) ) ) | Some location -> Ok (`Existed, location) diff --git a/src/lib/merkle_mask/masking_merkle_tree_intf.ml b/src/lib/merkle_mask/masking_merkle_tree_intf.ml index b4d1b81d512..84bc6badfd5 100644 --- a/src/lib/merkle_mask/masking_merkle_tree_intf.ml +++ b/src/lib/merkle_mask/masking_merkle_tree_intf.ml @@ -35,16 +35,16 @@ module type S = sig module Attached : sig include Base_merkle_tree_intf.S - with module Addr = Addr - with module Location = Location - with type account := account - and type root_hash := hash - and type hash := hash - and type key := key - and type token_id := token_id - and type token_id_set := token_id_set - and type account_id := account_id - and type account_id_set := account_id_set + with module Addr = Addr + with module Location = Location + with type account := account + and type root_hash := hash + and type hash := hash + and type key := key + and type token_id := token_id + and type token_id_set := token_id_set + and type account_id := account_id + and type account_id_set := account_id_set exception Dangling_parent_reference of diff --git a/src/lib/mina_base/account.ml b/src/lib/mina_base/account.ml index b964c1b6b04..f0e1f2e8ef9 100644 --- a/src/lib/mina_base/account.ml +++ b/src/lib/mina_base/account.ml @@ -1,12 +1,10 @@ (* account.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params open Tick @@ -54,23 +52,21 @@ module Index = struct let to_bits ~ledger_depth t = List.init ledger_depth ~f:(Vector.get t) - let of_bits = - List.foldi ~init:Vector.empty ~f:(fun i t b -> Vector.set t i b) + let of_bits = List.foldi ~init:Vector.empty ~f:(fun i t b -> Vector.set t i b) let fold_bits ~ledger_depth t = - { Fold.fold= + { Fold.fold = (fun ~init ~f -> let rec go acc i = - if i = ledger_depth then acc - else go (f acc (Vector.get t i)) (i + 1) + if i = ledger_depth then acc else go (f acc (Vector.get t i)) (i + 1) in - go init 0 ) } + go init 0) + } let fold ~ledger_depth t = Fold.group3 ~default:false (fold_bits ~ledger_depth t) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Unpacked = struct type var = Tick.Boolean.var list @@ -104,17 +100,18 @@ module Poly = struct , 'permissions , 'snapp_opt ) t = - { public_key: 'pk - ; token_id: 'tid - ; token_permissions: 'token_permissions - ; balance: 'amount - ; nonce: 'nonce - ; receipt_chain_hash: 'receipt_chain_hash - ; delegate: 'delegate - ; voting_for: 'state_hash - ; timing: 'timing - ; permissions: 'permissions - ; snapp: 'snapp_opt } + { public_key : 'pk + ; token_id : 'tid + ; token_permissions : 'token_permissions + ; balance : 'amount + ; nonce : 'nonce + ; receipt_chain_hash : 'receipt_chain_hash + ; delegate : 'delegate + ; voting_for : 'state_hash + ; timing : 'timing + ; permissions : 'permissions + ; snapp : 'snapp_opt + } [@@deriving sexp, equal, compare, hash, yojson, fields, hlist] end end] @@ -166,8 +163,7 @@ end let check = Fn.id -[%%if -not feature_snapps] +[%%if not feature_snapps] let check (t : Binable_arg.t) = let t = check t in @@ -179,8 +175,7 @@ let check (t : Binable_arg.t) = [%%endif] -[%%if -not feature_tokens] +[%%if not feature_tokens] let check (t : Binable_arg.t) = let t = check t in @@ -211,12 +206,11 @@ module Stable = struct end end] -[%%define_locally -Stable.Latest.(public_key)] +[%%define_locally Stable.Latest.(public_key)] -let token {Poly.token_id; _} = token_id +let token { Poly.token_id; _ } = token_id -let identifier ({public_key; token_id; _} : t) = +let identifier ({ public_key; token_id; _ } : t) = Account_id.create public_key token_id type value = @@ -245,15 +239,16 @@ let initialize account_id : t = in { public_key ; token_id - ; token_permissions= Token_permissions.default - ; balance= Balance.zero - ; nonce= Nonce.zero - ; receipt_chain_hash= Receipt.Chain_hash.empty + ; token_permissions = Token_permissions.default + ; balance = Balance.zero + ; nonce = Nonce.zero + ; receipt_chain_hash = Receipt.Chain_hash.empty ; delegate - ; voting_for= State_hash.dummy - ; timing= Timing.Untimed - ; permissions= Permissions.user_default - ; snapp= None } + ; voting_for = State_hash.dummy + ; timing = Timing.Untimed + ; permissions = Permissions.user_default + ; snapp = None + } let hash_snapp_account_opt = function | None -> @@ -285,8 +280,7 @@ let crypto_hash t = Random_oracle.hash ~init:crypto_hash_prefix (Random_oracle.pack_input (to_input t)) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = ( Public_key.Compressed.var @@ -304,7 +298,7 @@ type var = ) Poly.t -let identifier_of_var ({public_key; token_id; _} : var) = +let identifier_of_var ({ public_key; token_id; _ } : var) = Account_id.Checked.create public_key token_id let typ : (var, value) Typ.t = @@ -313,8 +307,7 @@ let typ : (var, value) Typ.t = , Snapp_account.t option ) Typ.t = let account : - (Snapp_account.t option As_prover.Ref.t, Snapp_account.t option) Typ.t - = + (Snapp_account.t option As_prover.Ref.t, Snapp_account.t option) Typ.t = Typ.Internal.ref () in let alloc = @@ -330,7 +323,7 @@ let typ : (var, value) Typ.t = (x, y) in let check (x, _) = Typ.field.check x in - {alloc; read; store; check} + { alloc; read; store; check } in let spec = Data_spec. @@ -343,11 +336,12 @@ let typ : (var, value) Typ.t = ; Typ.transport Public_key.Compressed.typ ~there:delegate_opt ~back:(fun delegate -> if Public_key.Compressed.(equal empty) delegate then None - else Some delegate ) + else Some delegate) ; State_hash.typ ; Timing.typ ; Permissions.typ - ; snapp ] + ; snapp + ] in Typ.of_hlistable spec ~var_to_hlist:Poly.to_hlist ~var_of_hlist:Poly.of_hlist ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist @@ -363,19 +357,21 @@ let var_of_t ; voting_for ; timing ; permissions - ; snapp } : + ; snapp + } : value) = - { Poly.public_key= Public_key.Compressed.var_of_t public_key - ; token_id= Token_id.var_of_t token_id - ; token_permissions= Token_permissions.var_of_t token_permissions - ; balance= Balance.var_of_t balance - ; nonce= Nonce.Checked.constant nonce - ; receipt_chain_hash= Receipt.Chain_hash.var_of_t receipt_chain_hash - ; delegate= Public_key.Compressed.var_of_t (delegate_opt delegate) - ; voting_for= State_hash.var_of_t voting_for - ; timing= Timing.var_of_t timing - ; permissions= Permissions.Checked.constant permissions - ; snapp= Field.Var.constant (hash_snapp_account_opt snapp) } + { Poly.public_key = Public_key.Compressed.var_of_t public_key + ; token_id = Token_id.var_of_t token_id + ; token_permissions = Token_permissions.var_of_t token_permissions + ; balance = Balance.var_of_t balance + ; nonce = Nonce.Checked.constant nonce + ; receipt_chain_hash = Receipt.Chain_hash.var_of_t receipt_chain_hash + ; delegate = Public_key.Compressed.var_of_t (delegate_opt delegate) + ; voting_for = State_hash.var_of_t voting_for + ; timing = Timing.var_of_t timing + ; permissions = Permissions.Checked.constant permissions + ; snapp = Field.Var.constant (hash_snapp_account_opt snapp) + } module Checked = struct module Unhashed = struct @@ -400,7 +396,7 @@ module Checked = struct let open Random_oracle.Input in let bits conv = f (fun x -> - bitstring (Bitstring_lib.Bitstring.Lsb_first.to_list (conv x)) ) + bitstring (Bitstring_lib.Bitstring.Lsb_first.to_list (conv x))) in make_checked (fun () -> List.reduce_exn ~f:append @@ -419,16 +415,16 @@ module Checked = struct ~receipt_chain_hash:(f Receipt.Chain_hash.var_to_input) ~delegate:(f Public_key.Compressed.Checked.to_input) ~voting_for:(f State_hash.var_to_input) - ~timing:(bits Timing.var_to_bits)) ) + ~timing:(bits Timing.var_to_bits))) let digest t = make_checked (fun () -> Random_oracle.Checked.( hash ~init:crypto_hash_prefix - (pack_input (Run.run_checked (to_input t)))) ) + (pack_input (Run.run_checked (to_input t))))) - let min_balance_at_slot ~global_slot ~cliff_time ~cliff_amount - ~vesting_period ~vesting_increment ~initial_minimum_balance = + let min_balance_at_slot ~global_slot ~cliff_time ~cliff_amount ~vesting_period + ~vesting_increment ~initial_minimum_balance = let%bind before_cliff = Global_slot.Checked.(global_slot < cliff_time) in let balance_to_int balance = Snarky_integer.Integer.of_bits ~m @@ Balance.var_to_bits balance @@ -438,9 +434,7 @@ module Checked = struct make_checked (fun () -> if_ ~m before_cliff ~then_:initial_minimum_balance_int ~else_: - (let global_slot_int = - Global_slot.Checked.to_integer global_slot - in + (let global_slot_int = Global_slot.Checked.to_integer global_slot in let cliff_time_int = Global_slot.Checked.to_integer cliff_time in let _, slot_diff = subtract_unpacking_or_zero ~m global_slot_int cliff_time_int @@ -459,23 +453,22 @@ module Checked = struct let vesting_increment_int = Amount.var_to_bits vesting_increment |> of_bits ~m in - let vesting_decrement = - mul ~m num_periods vesting_increment_int - in + let vesting_decrement = mul ~m num_periods vesting_increment_int in let _, min_balance_less_cliff_and_vesting_decrements = subtract_unpacking_or_zero ~m min_balance_less_cliff_decrement vesting_decrement in - min_balance_less_cliff_and_vesting_decrements) ) + min_balance_less_cliff_and_vesting_decrements)) let has_locked_tokens ~global_slot (t : var) = let open Timing.As_record in - let { is_timed= _ + let { is_timed = _ ; initial_minimum_balance ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } = + ; vesting_increment + } = t.timing in let%bind cur_min_balance = @@ -488,7 +481,7 @@ module Checked = struct (Bigint.of_field Field.zero |> Bigint.to_bignum_bigint) in make_checked (fun () -> - Snarky_integer.Integer.equal ~m cur_min_balance zero_int ) + Snarky_integer.Integer.equal ~m cur_min_balance zero_int) in (*Note: Untimed accounts will always have zero min balance*) Boolean.not zero_min_balance @@ -499,19 +492,20 @@ end let digest = crypto_hash let empty = - { Poly.public_key= Public_key.Compressed.empty - ; token_id= Token_id.default - ; token_permissions= Token_permissions.default - ; balance= Balance.zero - ; nonce= Nonce.zero - ; receipt_chain_hash= Receipt.Chain_hash.empty - ; delegate= None - ; voting_for= State_hash.dummy - ; timing= Timing.Untimed - ; permissions= + { Poly.public_key = Public_key.Compressed.empty + ; token_id = Token_id.default + ; token_permissions = Token_permissions.default + ; balance = Balance.zero + ; nonce = Nonce.zero + ; receipt_chain_hash = Receipt.Chain_hash.empty + ; delegate = None + ; voting_for = State_hash.dummy + ; timing = Timing.Untimed + ; permissions = Permissions.user_default (* TODO: This should maybe be Permissions.empty *) - ; snapp= None } + ; snapp = None + } let empty_digest = digest empty @@ -524,15 +518,16 @@ let create account_id balance = in { Poly.public_key ; token_id - ; token_permissions= Token_permissions.default + ; token_permissions = Token_permissions.default ; balance - ; nonce= Nonce.zero - ; receipt_chain_hash= Receipt.Chain_hash.empty + ; nonce = Nonce.zero + ; receipt_chain_hash = Receipt.Chain_hash.empty ; delegate - ; voting_for= State_hash.dummy - ; timing= Timing.Untimed - ; permissions= Permissions.user_default - ; snapp= None } + ; voting_for = State_hash.dummy + ; timing = Timing.Untimed + ; permissions = Permissions.user_default + ; snapp = None + } let create_timed account_id balance ~initial_minimum_balance ~cliff_time ~cliff_amount ~vesting_period ~vesting_increment = @@ -551,25 +546,26 @@ let create_timed account_id balance ~initial_minimum_balance ~cliff_time Or_error.return { Poly.public_key ; token_id - ; token_permissions= Token_permissions.default + ; token_permissions = Token_permissions.default ; balance - ; nonce= Nonce.zero - ; receipt_chain_hash= Receipt.Chain_hash.empty + ; nonce = Nonce.zero + ; receipt_chain_hash = Receipt.Chain_hash.empty ; delegate - ; voting_for= State_hash.dummy - ; snapp= None - ; permissions= Permissions.user_default - ; timing= + ; voting_for = State_hash.dummy + ; snapp = None + ; permissions = Permissions.user_default + ; timing = Timing.Timed { initial_minimum_balance ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } } + ; vesting_increment + } + } (* no vesting after cliff time + 1 slot *) -let create_time_locked public_key balance ~initial_minimum_balance ~cliff_time - = +let create_time_locked public_key balance ~initial_minimum_balance ~cliff_time = create_timed public_key balance ~initial_minimum_balance ~cliff_time ~vesting_period:Global_slot.(succ zero) ~vesting_increment:initial_minimum_balance @@ -626,7 +622,8 @@ let has_locked_tokens ~global_slot (account : t) = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } -> + ; vesting_increment + } -> let curr_min_balance = min_balance_at_slot ~global_slot ~cliff_time ~cliff_amount ~vesting_period ~vesting_increment ~initial_minimum_balance diff --git a/src/lib/mina_base/account_id.ml b/src/lib/mina_base/account_id.ml index f2137d9d691..d6a682ad37f 100644 --- a/src/lib/mina_base/account_id.ml +++ b/src/lib/mina_base/account_id.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Import @@ -35,8 +34,7 @@ let gen = include Comparable.Make_binable (Stable.Latest) include Hashable.Make_binable (Stable.Latest) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = Public_key.Compressed.var * Token_id.var diff --git a/src/lib/mina_base/account_timing.ml b/src/lib/mina_base/account_timing.ml index efc650f7407..13322af5242 100644 --- a/src/lib/mina_base/account_timing.ml +++ b/src/lib/mina_base/account_timing.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params open Tick @@ -29,11 +27,12 @@ module Poly = struct type ('slot, 'balance, 'amount) t = | Untimed | Timed of - { initial_minimum_balance: 'balance - ; cliff_time: 'slot - ; cliff_amount: 'amount - ; vesting_period: 'slot - ; vesting_increment: 'amount } + { initial_minimum_balance : 'balance + ; cliff_time : 'slot + ; cliff_amount : 'amount + ; vesting_period : 'slot + ; vesting_increment : 'amount + } [@@deriving sexp, equal, hash, compare, yojson] end end] @@ -56,21 +55,23 @@ end] type ('slot, 'balance, 'amount) tt = ('slot, 'balance, 'amount) Poly.t = | Untimed | Timed of - { initial_minimum_balance: 'balance - ; cliff_time: 'slot - ; cliff_amount: 'amount - ; vesting_period: 'slot - ; vesting_increment: 'amount } + { initial_minimum_balance : 'balance + ; cliff_time : 'slot + ; cliff_amount : 'amount + ; vesting_period : 'slot + ; vesting_increment : 'amount + } [@@deriving sexp, equal, hash, compare, yojson] module As_record = struct type ('bool, 'slot, 'balance, 'amount) t = - { is_timed: 'bool - ; initial_minimum_balance: 'balance - ; cliff_time: 'slot - ; cliff_amount: 'amount - ; vesting_period: 'slot - ; vesting_increment: 'amount } + { is_timed : 'bool + ; initial_minimum_balance : 'balance + ; cliff_time : 'slot + ; cliff_amount : 'amount + ; vesting_period : 'slot + ; vesting_increment : 'amount + } [@@deriving hlist] end @@ -83,25 +84,28 @@ let to_record t = let balance_unused = Balance.zero in let amount_unused = Amount.zero in As_record. - { is_timed= false - ; initial_minimum_balance= balance_unused - ; cliff_time= slot_unused - ; cliff_amount= amount_unused - ; vesting_period= slot_one (* avoid division by zero *) - ; vesting_increment= amount_unused } + { is_timed = false + ; initial_minimum_balance = balance_unused + ; cliff_time = slot_unused + ; cliff_amount = amount_unused + ; vesting_period = slot_one (* avoid division by zero *) + ; vesting_increment = amount_unused + } | Timed { initial_minimum_balance ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } -> + ; vesting_increment + } -> As_record. - { is_timed= true + { is_timed = true ; initial_minimum_balance ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } + ; vesting_increment + } let to_bits t = let As_record. @@ -110,7 +114,8 @@ let to_bits t = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } = + ; vesting_increment + } = to_record t in is_timed @@ -120,8 +125,7 @@ let to_bits t = @ Global_slot.to_bits vesting_period @ Amount.to_bits vesting_increment ) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = (Boolean.var, Global_slot.Checked.var, Balance.var, Amount.var) As_record.t @@ -133,7 +137,8 @@ let var_to_bits ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } = + ; vesting_increment + } = let open Bitstring_lib.Bitstring.Lsb_first in let initial_minimum_balance = to_list @@ Balance.var_to_bits initial_minimum_balance @@ -154,16 +159,18 @@ let var_of_t (t : t) : var = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } = + ; vesting_increment + } = to_record t in As_record. - { is_timed= Boolean.var_of_value is_timed - ; initial_minimum_balance= Balance.var_of_t initial_minimum_balance - ; cliff_time= Global_slot.Checked.constant cliff_time - ; cliff_amount= Amount.var_of_t cliff_amount - ; vesting_period= Global_slot.Checked.constant vesting_period - ; vesting_increment= Amount.var_of_t vesting_increment } + { is_timed = Boolean.var_of_value is_timed + ; initial_minimum_balance = Balance.var_of_t initial_minimum_balance + ; cliff_time = Global_slot.Checked.constant cliff_time + ; cliff_amount = Amount.var_of_t cliff_amount + ; vesting_period = Global_slot.Checked.constant vesting_period + ; vesting_increment = Amount.var_of_t vesting_increment + } let untimed_var = var_of_t Untimed @@ -175,7 +182,8 @@ let typ : (var, t) Typ.t = ; Global_slot.typ ; Amount.typ ; Global_slot.typ - ; Amount.typ ] + ; Amount.typ + ] in (* because we represent the types t (a sum type) and var (a record) differently, we can't use the trick, used elsewhere, of polymorphic to_hlist and of_hlist @@ -198,14 +206,16 @@ let typ : (var, t) Typ.t = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment ] -> + ; vesting_increment + ] -> if is_timed then Timed { initial_minimum_balance ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } + ; vesting_increment + } else Untimed in let value_to_hlist (t : t) = @@ -215,7 +225,8 @@ let typ : (var, t) Typ.t = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } = + ; vesting_increment + } = to_record t in H_list. @@ -224,7 +235,8 @@ let typ : (var, t) Typ.t = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment ] + ; vesting_increment + ] in let var_of_hlist = As_record.of_hlist in let var_to_hlist = As_record.to_hlist in @@ -262,6 +274,7 @@ let if_ b ~(then_ : var) ~(else_ : var) = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } + ; vesting_increment + } [%%endif] diff --git a/src/lib/mina_base/coinbase.ml b/src/lib/mina_base/coinbase.ml index 15e5cb01048..f8b8f4a8d51 100644 --- a/src/lib/mina_base/coinbase.ml +++ b/src/lib/mina_base/coinbase.ml @@ -6,9 +6,10 @@ module Fee_transfer = Coinbase_fee_transfer module Stable = struct module V1 = struct type t = - { receiver: Public_key.Compressed.Stable.V1.t - ; amount: Currency.Amount.Stable.V1.t - ; fee_transfer: Fee_transfer.Stable.V1.t option } + { receiver : Public_key.Compressed.Stable.V1.t + ; amount : Currency.Amount.Stable.V1.t + ; fee_transfer : Fee_transfer.Stable.V1.t option + } [@@deriving sexp, compare, equal, hash, yojson] let to_latest = Fn.id @@ -24,8 +25,7 @@ module Base58_check = Codable.Make_base58_check (Stable.Latest) [%%define_locally Base58_check.(to_base58_check, of_base58_check, of_base58_check_exn)] -[%%define_locally -Base58_check.String_ops.(to_string, of_string)] +[%%define_locally Base58_check.String_ops.(to_string, of_string)] let receiver_pk t = t.receiver @@ -45,15 +45,15 @@ let accounts_accessed t = receiver t :: List.map ~f:Fee_transfer.receiver (Option.to_list t.fee_transfer) -let is_valid {amount; fee_transfer; _} = +let is_valid { amount; fee_transfer; _ } = match fee_transfer with | None -> true - | Some {fee; _} -> + | Some { fee; _ } -> Currency.Amount.(of_fee fee <= amount) let create ~amount ~receiver ~fee_transfer = - let t = {receiver; amount; fee_transfer} in + let t = { receiver; amount; fee_transfer } in if is_valid t then let adjusted_fee_transfer = Option.bind fee_transfer ~f:(fun fee_transfer -> @@ -61,16 +61,16 @@ let create ~amount ~receiver ~fee_transfer = (not (Public_key.Compressed.equal receiver (Fee_transfer.receiver_pk fee_transfer))) - fee_transfer ) + fee_transfer) in - Ok {t with fee_transfer= adjusted_fee_transfer} + Ok { t with fee_transfer = adjusted_fee_transfer } else Or_error.error_string "Coinbase.create: invalid coinbase" -let supply_increase {receiver= _; amount; fee_transfer} = +let supply_increase { receiver = _; amount; fee_transfer } = match fee_transfer with | None -> Ok amount - | Some {fee; _} -> + | Some { fee; _ } -> Currency.Amount.sub amount (Currency.Amount.of_fee fee) |> Option.value_map ~f:(fun _ -> Ok amount) @@ -112,14 +112,14 @@ module Gen = struct in let fee_transfer = match fee_transfer with - | Some {Fee_transfer.receiver_pk; _} + | Some { Fee_transfer.receiver_pk; _ } when Public_key.Compressed.equal receiver receiver_pk -> (* Erase fee transfer, to mirror [create]. *) None | _ -> fee_transfer in - ( {receiver; amount; fee_transfer} + ( { receiver; amount; fee_transfer } , `Supercharged_coinbase supercharged_coinbase ) let with_random_receivers ~keys ~min_amount ~max_amount ~fee_transfer = @@ -136,12 +136,12 @@ module Gen = struct in let fee_transfer = match fee_transfer with - | Some {Fee_transfer.receiver_pk; _} + | Some { Fee_transfer.receiver_pk; _ } when Public_key.Compressed.equal receiver receiver_pk -> (* Erase fee transfer, to mirror [create]. *) None | _ -> fee_transfer in - {receiver; amount; fee_transfer} + { receiver; amount; fee_transfer } end diff --git a/src/lib/mina_base/coinbase.mli b/src/lib/mina_base/coinbase.mli index ebd8a5d5a3f..250d06c23ff 100644 --- a/src/lib/mina_base/coinbase.mli +++ b/src/lib/mina_base/coinbase.mli @@ -5,9 +5,10 @@ module Fee_transfer = Coinbase_fee_transfer module Stable : sig module V1 : sig type t = private - { receiver: Public_key.Compressed.Stable.V1.t - ; amount: Currency.Amount.Stable.V1.t - ; fee_transfer: Fee_transfer.Stable.V1.t option } + { receiver : Public_key.Compressed.Stable.V1.t + ; amount : Currency.Amount.Stable.V1.t + ; fee_transfer : Fee_transfer.Stable.V1.t option + } [@@deriving sexp, bin_io, compare, equal, version, hash, yojson] end @@ -16,9 +17,10 @@ end (* bin_io intentionally omitted in deriving list *) type t = Stable.Latest.t = private - { receiver: Public_key.Compressed.t - ; amount: Currency.Amount.t - ; fee_transfer: Fee_transfer.t option } + { receiver : Public_key.Compressed.t + ; amount : Currency.Amount.t + ; fee_transfer : Fee_transfer.t option + } [@@deriving sexp, compare, equal, hash, yojson] include Codable.Base58_check_intf with type t := t @@ -48,14 +50,15 @@ val fee_excess : t -> Fee_excess.t Or_error.t module Gen : sig val gen : constraint_constants:Genesis_constants.Constraint_constants.t - -> (t * [`Supercharged_coinbase of bool]) Quickcheck.Generator.t + -> (t * [ `Supercharged_coinbase of bool ]) Quickcheck.Generator.t (** Creates coinbase with reward between [min_amount] and [max_amount]. The generated amount[coinbase_amount] is then used as the upper bound for the fee transfer. *) val with_random_receivers : keys:Signature_keypair.t array -> min_amount:int -> max_amount:int - -> fee_transfer:( coinbase_amount:Currency.Amount.t - -> Fee_transfer.t Quickcheck.Generator.t) + -> fee_transfer: + ( coinbase_amount:Currency.Amount.t + -> Fee_transfer.t Quickcheck.Generator.t) -> t Quickcheck.Generator.t end diff --git a/src/lib/mina_base/coinbase_fee_transfer.ml b/src/lib/mina_base/coinbase_fee_transfer.ml index a46d756452f..1dbdfec5fbc 100644 --- a/src/lib/mina_base/coinbase_fee_transfer.ml +++ b/src/lib/mina_base/coinbase_fee_transfer.ml @@ -5,8 +5,9 @@ open Import module Stable = struct module V1 = struct type t = - { receiver_pk: Public_key.Compressed.Stable.V1.t - ; fee: Currency.Fee.Stable.V1.t } + { receiver_pk : Public_key.Compressed.Stable.V1.t + ; fee : Currency.Fee.Stable.V1.t + } [@@deriving sexp, compare, equal, yojson, hash] let to_latest = Fn.id @@ -17,7 +18,7 @@ module Stable = struct end end] -let create ~receiver_pk ~fee = {receiver_pk; fee} +let create ~receiver_pk ~fee = { receiver_pk; fee } include Comparable.Make (Stable.Latest) module Base58_check = Codable.Make_base58_check (Stable.Latest) @@ -25,16 +26,15 @@ module Base58_check = Codable.Make_base58_check (Stable.Latest) [%%define_locally Base58_check.(to_base58_check, of_base58_check, of_base58_check_exn)] -[%%define_locally -Base58_check.String_ops.(to_string, of_string)] +[%%define_locally Base58_check.String_ops.(to_string, of_string)] -let receiver_pk {receiver_pk; _} = receiver_pk +let receiver_pk { receiver_pk; _ } = receiver_pk -let receiver {receiver_pk; _} = Account_id.create receiver_pk Token_id.default +let receiver { receiver_pk; _ } = Account_id.create receiver_pk Token_id.default -let fee {fee; _} = fee +let fee { fee; _ } = fee -let to_fee_transfer {receiver_pk; fee} = +let to_fee_transfer { receiver_pk; fee } = Fee_transfer.Single.create ~receiver_pk ~fee ~fee_token:Token_id.default module Gen = struct @@ -42,7 +42,7 @@ module Gen = struct let open Quickcheck.Generator.Let_syntax in let%bind receiver_pk = Public_key.Compressed.gen in let%map fee = Currency.Fee.gen_incl min_fee max_fee in - {receiver_pk; fee} + { receiver_pk; fee } let with_random_receivers ~keys ?(min_fee = Currency.Fee.zero) ~coinbase_amount : t Quickcheck.Generator.t = @@ -53,5 +53,5 @@ module Gen = struct Quickcheck_lib.of_array keys >>| fun keypair -> Public_key.compress keypair.Keypair.public_key and fee = Currency.Fee.gen_incl min_fee max_fee in - {receiver_pk; fee} + { receiver_pk; fee } end diff --git a/src/lib/mina_base/coinbase_fee_transfer.mli b/src/lib/mina_base/coinbase_fee_transfer.mli index 4d71003d5af..2ff1a30198e 100644 --- a/src/lib/mina_base/coinbase_fee_transfer.mli +++ b/src/lib/mina_base/coinbase_fee_transfer.mli @@ -5,8 +5,9 @@ open Import module Stable : sig module V1 : sig type t = private - { receiver_pk: Public_key.Compressed.Stable.V1.t - ; fee: Currency.Fee.Stable.V1.t } + { receiver_pk : Public_key.Compressed.Stable.V1.t + ; fee : Currency.Fee.Stable.V1.t + } [@@deriving sexp, compare, equal, yojson, hash] end end] diff --git a/src/lib/mina_base/control.ml b/src/lib/mina_base/control.ml index 84844e7d746..5b93c4b31ac 100644 --- a/src/lib/mina_base/control.ml +++ b/src/lib/mina_base/control.ml @@ -1,11 +1,9 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel (* TODO: temporary hack *) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] [%%versioned module Stable = struct @@ -14,8 +12,9 @@ module Stable = struct | Proof of Pickles.Side_loaded.Proof.Stable.V1.t | Signature of Signature.Stable.V1.t | Both of - { signature: Signature.Stable.V1.t - ; proof: Pickles.Side_loaded.Proof.Stable.V1.t } + { signature : Signature.Stable.V1.t + ; proof : Pickles.Side_loaded.Proof.Stable.V1.t + } | None_given [@@deriving sexp, equal, yojson, hash, compare] @@ -31,7 +30,7 @@ module Stable = struct type t = | Proof of unit | Signature of Signature.Stable.V1.t - | Both of {signature: Signature.Stable.V1.t; proof: unit} + | Both of { signature : Signature.Stable.V1.t; proof : unit } | None_given [@@deriving sexp, equal, yojson, hash, compare] diff --git a/src/lib/mina_base/data_hash.ml b/src/lib/mina_base/data_hash.ml index 113c80372f8..cbea18b6fb6 100644 --- a/src/lib/mina_base/data_hash.ml +++ b/src/lib/mina_base/data_hash.ml @@ -1,10 +1,8 @@ (* data_hash.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] include Data_hash_lib.Data_hash diff --git a/src/lib/mina_base/epoch_data.ml b/src/lib/mina_base/epoch_data.ml index b78d86c23cc..4d5ec343ebd 100644 --- a/src/lib/mina_base/epoch_data.ml +++ b/src/lib/mina_base/epoch_data.ml @@ -11,13 +11,14 @@ module Poly = struct , 'lock_checkpoint , 'length ) t = - { ledger: 'epoch_ledger - ; seed: 'epoch_seed - ; start_checkpoint: 'start_checkpoint + { ledger : 'epoch_ledger + ; seed : 'epoch_seed + ; start_checkpoint : 'start_checkpoint (* The lock checkpoint is the hash of the latest state in the seed update range, not including the current state. *) - ; lock_checkpoint: 'lock_checkpoint - ; epoch_length: 'length } + ; lock_checkpoint : 'lock_checkpoint + ; epoch_length : 'length + } [@@deriving hlist, sexp, equal, compare, hash, yojson, fields] end end] @@ -33,8 +34,7 @@ type var = let if_ cond ~(then_ : var) ~(else_ : var) = let open Snark_params.Tick.Checked.Let_syntax in - let%map ledger = - Epoch_ledger.if_ cond ~then_:then_.ledger ~else_:else_.ledger + let%map ledger = Epoch_ledger.if_ cond ~then_:then_.ledger ~else_:else_.ledger and seed = Epoch_seed.if_ cond ~then_:then_.seed ~else_:else_.seed and start_checkpoint = State_hash.if_ cond ~then_:then_.start_checkpoint @@ -45,7 +45,7 @@ let if_ cond ~(then_ : var) ~(else_ : var) = and epoch_length = Length.Checked.if_ cond ~then_:then_.epoch_length ~else_:else_.epoch_length in - {Poly.ledger; seed; start_checkpoint; lock_checkpoint; epoch_length} + { Poly.ledger; seed; start_checkpoint; lock_checkpoint; epoch_length } module Value = struct [%%versioned diff --git a/src/lib/mina_base/epoch_ledger.ml b/src/lib/mina_base/epoch_ledger.ml index 77553e62ec9..9fa3673e9cd 100644 --- a/src/lib/mina_base/epoch_ledger.ml +++ b/src/lib/mina_base/epoch_ledger.ml @@ -8,7 +8,7 @@ module Poly = struct module Stable = struct module V1 = struct type ('ledger_hash, 'amount) t = - {hash: 'ledger_hash; total_currency: 'amount} + { hash : 'ledger_hash; total_currency : 'amount } [@@deriving sexp, equal, compare, hash, yojson, hlist] end end] @@ -27,25 +27,27 @@ module Value = struct end] end -let to_input ({hash; total_currency} : Value.t) = +let to_input ({ hash; total_currency } : Value.t) = let open Snark_params.Tick in - { Random_oracle.Input.field_elements= [|(hash :> Field.t)|] - ; bitstrings= [|Amount.to_bits total_currency|] } + { Random_oracle.Input.field_elements = [| (hash :> Field.t) |] + ; bitstrings = [| Amount.to_bits total_currency |] + } type var = (Frozen_ledger_hash0.var, Amount.var) Poly.t -let data_spec = Data_spec.[Frozen_ledger_hash0.typ; Amount.typ] +let data_spec = Data_spec.[ Frozen_ledger_hash0.typ; Amount.typ ] let typ : (var, Value.t) Typ.t = Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist ~var_of_hlist:Poly.of_hlist ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist -let var_to_input ({Poly.hash; total_currency} : var) = - { Random_oracle.Input.field_elements= - [|Frozen_ledger_hash0.var_to_hash_packed hash|] - ; bitstrings= - [|Bitstring.Lsb_first.to_list (Amount.var_to_bits total_currency)|] } +let var_to_input ({ Poly.hash; total_currency } : var) = + { Random_oracle.Input.field_elements = + [| Frozen_ledger_hash0.var_to_hash_packed hash |] + ; bitstrings = + [| Bitstring.Lsb_first.to_list (Amount.var_to_bits total_currency) |] + } let if_ cond ~(then_ : (Frozen_ledger_hash0.var, Amount.var) Poly.t) ~(else_ : (Frozen_ledger_hash0.var, Amount.var) Poly.t) = @@ -56,4 +58,4 @@ let if_ cond ~(then_ : (Frozen_ledger_hash0.var, Amount.var) Poly.t) Amount.Checked.if_ cond ~then_:then_.total_currency ~else_:else_.total_currency in - {Poly.hash; total_currency} + { Poly.hash; total_currency } diff --git a/src/lib/mina_base/epoch_seed.ml b/src/lib/mina_base/epoch_seed.ml index 982c96e4a50..f94e6673a0f 100644 --- a/src/lib/mina_base/epoch_seed.ml +++ b/src/lib/mina_base/epoch_seed.ml @@ -15,15 +15,14 @@ module Stable = struct module V1 = struct module T = struct type t = Snark_params.Tick.Field.t - [@@deriving sexp, compare, hash, version {asserted}] + [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Core.Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) diff --git a/src/lib/mina_base/fee_excess.ml b/src/lib/mina_base/fee_excess.ml index 283794ae8af..1376920e78a 100644 --- a/src/lib/mina_base/fee_excess.ml +++ b/src/lib/mina_base/fee_excess.ml @@ -28,13 +28,11 @@ implementation. *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] open Import @@ -42,8 +40,7 @@ open Import open Currency -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params open Tick @@ -55,48 +52,51 @@ module Poly = struct module Stable = struct module V1 = struct type ('token, 'fee) t = - { fee_token_l: 'token - ; fee_excess_l: 'fee - ; fee_token_r: 'token - ; fee_excess_r: 'fee } + { fee_token_l : 'token + ; fee_excess_l : 'fee + ; fee_token_r : 'token + ; fee_excess_r : 'fee + } [@@deriving compare, equal, hash, sexp, hlist] let to_yojson token_to_yojson fee_to_yojson - {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} = + { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } = `List [ `Assoc [ ("token", token_to_yojson fee_token_l) - ; ("amount", fee_to_yojson fee_excess_l) ] + ; ("amount", fee_to_yojson fee_excess_l) + ] ; `Assoc [ ("token", token_to_yojson fee_token_r) - ; ("amount", fee_to_yojson fee_excess_r) ] ] + ; ("amount", fee_to_yojson fee_excess_r) + ] + ] let of_yojson token_of_yojson fee_of_yojson = function | `List - [ `Assoc [("token", fee_token_l); ("amount", fee_excess_l)] - ; `Assoc [("token", fee_token_r); ("amount", fee_excess_r)] ] -> + [ `Assoc [ ("token", fee_token_l); ("amount", fee_excess_l) ] + ; `Assoc [ ("token", fee_token_r); ("amount", fee_excess_r) ] + ] -> let open Result.Let_syntax in let%map fee_token_l = token_of_yojson fee_token_l and fee_excess_l = fee_of_yojson fee_excess_l and fee_token_r = token_of_yojson fee_token_r and fee_excess_r = fee_of_yojson fee_excess_r in - {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} + { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } | _ -> Error "Fee_excess.Poly.Stable.V1.t" end end] - [%%define_locally - Stable.Latest.(to_yojson, of_yojson)] + [%%define_locally Stable.Latest.(to_yojson, of_yojson)] - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let typ (token_typ : ('token_var, 'token) Typ.t) (fee_typ : ('fee_var, 'fee) Typ.t) : (('token_var, 'fee_var) t, ('token, 'fee) t) Typ.t = Typ.of_hlistable - [token_typ; fee_typ; token_typ; fee_typ] + [ token_typ; fee_typ; token_typ; fee_typ ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -117,66 +117,69 @@ module Stable = struct end] type ('token, 'fee) poly = ('token, 'fee) Poly.t = - { fee_token_l: 'token - ; fee_excess_l: 'fee - ; fee_token_r: 'token - ; fee_excess_r: 'fee } + { fee_token_l : 'token + ; fee_excess_l : 'fee + ; fee_token_r : 'token + ; fee_excess_r : 'fee + } [@@deriving compare, equal, hash, sexp] let poly_to_yojson = Poly.to_yojson let poly_of_yojson = Poly.of_yojson -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = (Token_id.var, Fee.Signed.var) poly let typ : (var, t) Typ.t = Poly.typ Token_id.typ Fee.Signed.typ -let var_of_t ({fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} : t) : var - = - { fee_token_l= Token_id.var_of_t fee_token_l - ; fee_excess_l= Fee.Signed.Checked.constant fee_excess_l - ; fee_token_r= Token_id.var_of_t fee_token_r - ; fee_excess_r= Fee.Signed.Checked.constant fee_excess_r } +let var_of_t ({ fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } : t) : + var = + { fee_token_l = Token_id.var_of_t fee_token_l + ; fee_excess_l = Fee.Signed.Checked.constant fee_excess_l + ; fee_token_r = Token_id.var_of_t fee_token_r + ; fee_excess_r = Fee.Signed.Checked.constant fee_excess_r + } [%%endif] -let to_input {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} = +let to_input { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } = let open Random_oracle.Input in List.reduce_exn ~f:append [ Token_id.to_input fee_token_l ; Fee.Signed.to_input fee_excess_l ; Token_id.to_input fee_token_r - ; Fee.Signed.to_input fee_excess_r ] + ; Fee.Signed.to_input fee_excess_r + ] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] -let to_input_checked {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} = +let to_input_checked { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } = let%map fee_token_l = Token_id.Checked.to_input fee_token_l and fee_token_r = Token_id.Checked.to_input fee_token_r in List.reduce_exn ~f:Random_oracle.Input.append [ fee_token_l ; Fee.Signed.Checked.to_input fee_excess_l ; fee_token_r - ; Fee.Signed.Checked.to_input fee_excess_r ] + ; Fee.Signed.Checked.to_input fee_excess_r + ] let assert_equal_checked (t1 : var) (t2 : var) = Checked.all_unit [ Token_id.Checked.Assert.equal t1.fee_token_l t2.fee_token_l ; Fee.Signed.Checked.assert_equal t1.fee_excess_l t2.fee_excess_l ; Token_id.Checked.Assert.equal t1.fee_token_r t2.fee_token_r - ; Fee.Signed.Checked.assert_equal t1.fee_excess_r t2.fee_excess_r ] + ; Fee.Signed.Checked.assert_equal t1.fee_excess_r t2.fee_excess_r + ] [%%endif] (** Eliminate a fee excess, either by combining it with one to the left/right, or by checking that it is zero. *) -let eliminate_fee_excess (fee_token_l, fee_excess_l) - (fee_token_m, fee_excess_m) (fee_token_r, fee_excess_r) = +let eliminate_fee_excess (fee_token_l, fee_excess_l) (fee_token_m, fee_excess_m) + (fee_token_r, fee_excess_r) = let add_err x y = match Fee.Signed.add x y with | Some z -> @@ -205,8 +208,7 @@ let eliminate_fee_excess (fee_token_l, fee_excess_l) %{sexp: Fee.Signed.t} was nonzero" fee_token_m fee_excess_m -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] (* We use field elements instead of a currency type here, under the following assumptions: @@ -273,12 +275,11 @@ let%snarkydef eliminate_fee_excess_checked (fee_token_l, fee_excess_l) - any zero fee excess has the default token - if the fee tokens are the same, the excesses are combined *) -let rebalance ({fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} : t) = +let rebalance ({ fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } : t) = let open Or_error.Let_syntax in (* Use the same token for both if [fee_excess_l] is zero. *) let fee_token_l = - if Fee.(equal zero) fee_excess_l.magnitude then fee_token_r - else fee_token_l + if Fee.(equal zero) fee_excess_l.magnitude then fee_token_r else fee_token_l in (* Rebalancing. *) let%map fee_excess_l, fee_excess_r = @@ -302,12 +303,11 @@ let rebalance ({fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} : t) = if Fee.(equal zero) fee_excess_r.magnitude then Token_id.default else fee_token_r in - {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} + { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] -let rebalance_checked {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} = +let rebalance_checked { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } = let open Checked.Let_syntax in (* Use the same token for both if [fee_excess_l] is zero. *) let%bind fee_token_l = @@ -343,20 +343,22 @@ let rebalance_checked {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} = ~then_:Token_id.(var_of_t default) ~else_:fee_token_r in - {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} + { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } [%%endif] (** Combine the fee excesses from two transitions. *) let combine - { fee_token_l= fee_token1_l - ; fee_excess_l= fee_excess1_l - ; fee_token_r= fee_token1_r - ; fee_excess_r= fee_excess1_r } - { fee_token_l= fee_token2_l - ; fee_excess_l= fee_excess2_l - ; fee_token_r= fee_token2_r - ; fee_excess_r= fee_excess2_r } = + { fee_token_l = fee_token1_l + ; fee_excess_l = fee_excess1_l + ; fee_token_r = fee_token1_r + ; fee_excess_r = fee_excess1_r + } + { fee_token_l = fee_token2_l + ; fee_excess_l = fee_excess2_l + ; fee_token_r = fee_token2_r + ; fee_excess_r = fee_excess2_r + } = let open Or_error.Let_syntax in (* Eliminate fee_excess1_r. *) let%bind (fee_token1_l, fee_excess1_l), (fee_token2_l, fee_excess2_l) = @@ -375,23 +377,25 @@ let combine (fee_token2_r, fee_excess2_r) in rebalance - { fee_token_l= fee_token1_l - ; fee_excess_l= fee_excess1_l - ; fee_token_r= fee_token2_r - ; fee_excess_r= fee_excess2_r } + { fee_token_l = fee_token1_l + ; fee_excess_l = fee_excess1_l + ; fee_token_r = fee_token2_r + ; fee_excess_r = fee_excess2_r + } -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let%snarkydef combine_checked - { fee_token_l= fee_token1_l - ; fee_excess_l= fee_excess1_l - ; fee_token_r= fee_token1_r - ; fee_excess_r= fee_excess1_r } - { fee_token_l= fee_token2_l - ; fee_excess_l= fee_excess2_l - ; fee_token_r= fee_token2_r - ; fee_excess_r= fee_excess2_r } = + { fee_token_l = fee_token1_l + ; fee_excess_l = fee_excess1_l + ; fee_token_r = fee_token1_r + ; fee_excess_r = fee_excess1_r + } + { fee_token_l = fee_token2_l + ; fee_excess_l = fee_excess2_l + ; fee_token_r = fee_token2_r + ; fee_excess_r = fee_excess2_r + } = let open Checked.Let_syntax in (* Represent amounts as field elements. *) let%bind fee_excess1_l = Fee.Signed.Checked.to_field_var fee_excess1_l in @@ -415,12 +419,13 @@ let%snarkydef combine_checked (fee_token2_l, fee_excess2_l) (fee_token2_r, fee_excess2_r)) in - let%bind {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} = + let%bind { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } = rebalance_checked - { fee_token_l= fee_token1_l - ; fee_excess_l= fee_excess1_l - ; fee_token_r= fee_token2_r - ; fee_excess_r= fee_excess2_r } + { fee_token_l = fee_token1_l + ; fee_excess_l = fee_excess1_l + ; fee_token_r = fee_token2_r + ; fee_excess_r = fee_excess2_r + } in let convert_to_currency excess = let%bind currency_excess = @@ -468,17 +473,18 @@ let%snarkydef combine_checked [%with_label "Check for overflow in fee_excess_r"] (convert_to_currency fee_excess_r) in - {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} + { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } [%%endif] let empty = - { fee_token_l= Token_id.default - ; fee_excess_l= Fee.Signed.zero - ; fee_token_r= Token_id.default - ; fee_excess_r= Fee.Signed.zero } + { fee_token_l = Token_id.default + ; fee_excess_l = Fee.Signed.zero + ; fee_token_r = Token_id.default + ; fee_excess_r = Fee.Signed.zero + } -let is_empty {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} = +let is_empty { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } = Fee.Signed.(equal zero) fee_excess_l && Fee.Signed.(equal zero) fee_excess_r && Token_id.(equal default) fee_token_l @@ -494,8 +500,9 @@ let of_single (fee_token_l, fee_excess_l) = @@ rebalance { fee_token_l ; fee_excess_l - ; fee_token_r= Token_id.default - ; fee_excess_r= Fee.Signed.zero } + ; fee_token_r = Token_id.default + ; fee_excess_r = Fee.Signed.zero + } let of_one_or_two excesses = let unreduced = @@ -503,21 +510,21 @@ let of_one_or_two excesses = | `One (fee_token_l, fee_excess_l) -> { fee_token_l ; fee_excess_l - ; fee_token_r= Token_id.default - ; fee_excess_r= Fee.Signed.zero } + ; fee_token_r = Token_id.default + ; fee_excess_r = Fee.Signed.zero + } | `Two ((fee_token_l, fee_excess_l), (fee_token_r, fee_excess_r)) -> - {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} + { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } in rebalance unreduced -let to_one_or_two ({fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} : t) +let to_one_or_two ({ fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } : t) = if Fee.(equal zero) fee_excess_r.magnitude then `One (fee_token_l, fee_excess_l) else `Two ((fee_token_l, fee_excess_l), (fee_token_r, fee_excess_r)) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let gen = let open Quickcheck.Generator.Let_syntax in @@ -528,14 +535,15 @@ let gen = | Ok ret -> ret | Error _ -> ( - (* There is an overflow, just choose the first excess. *) - match excesses with - | `One (fee_token_l, fee_excess_l) | `Two ((fee_token_l, fee_excess_l), _) - -> - { fee_token_l - ; fee_excess_l - ; fee_token_r= Token_id.default - ; fee_excess_r= Fee.Signed.zero } ) + (* There is an overflow, just choose the first excess. *) + match excesses with + | `One (fee_token_l, fee_excess_l) | `Two ((fee_token_l, fee_excess_l), _) + -> + { fee_token_l + ; fee_excess_l + ; fee_token_r = Token_id.default + ; fee_excess_r = Fee.Signed.zero + } ) let%test_unit "Checked and unchecked behaviour is consistent" = Quickcheck.test (Quickcheck.Generator.tuple2 gen gen) ~f:(fun (fe1, fe2) -> @@ -546,7 +554,7 @@ let%test_unit "Checked and unchecked behaviour is consistent" = Typ.(typ * typ) typ (fun (fe1, fe2) -> combine_checked fe1 fe2) - (fe1, fe2) ) + (fe1, fe2)) in match (fe, fe_checked) with | Ok fe, Ok fe_checked -> @@ -554,7 +562,7 @@ let%test_unit "Checked and unchecked behaviour is consistent" = | Error _, Error _ -> () | _ -> - [%test_eq: t Or_error.t] fe fe_checked ) + [%test_eq: t Or_error.t] fe fe_checked) let%test_unit "Combine succeeds when the middle excess is zero" = Quickcheck.test (Quickcheck.Generator.tuple3 gen Token_id.gen Fee.Signed.gen) @@ -580,6 +588,6 @@ let%test_unit "Combine succeeds when the middle excess is zero" = (* The token is the same, and rebalancing causes an overflow. *) of_single (fe1.fee_token_r, Fee.Signed.negate fe1.fee_excess_r) in - ignore @@ Or_error.ok_exn (combine fe1 fe2) ) + ignore @@ Or_error.ok_exn (combine fe1 fe2)) [%%endif] diff --git a/src/lib/mina_base/fee_transfer.ml b/src/lib/mina_base/fee_transfer.ml index 13a6e27e1d5..41239e0244a 100644 --- a/src/lib/mina_base/fee_transfer.ml +++ b/src/lib/mina_base/fee_transfer.ml @@ -6,9 +6,10 @@ module Single = struct module Stable = struct module V1 = struct type t = - { receiver_pk: Public_key.Compressed.Stable.V1.t - ; fee: Currency.Fee.Stable.V1.t - ; fee_token: Token_id.Stable.V1.t } + { receiver_pk : Public_key.Compressed.Stable.V1.t + ; fee : Currency.Fee.Stable.V1.t + ; fee_token : Token_id.Stable.V1.t + } [@@deriving sexp, compare, equal, yojson, hash] let to_latest = Fn.id @@ -25,19 +26,18 @@ module Single = struct [%%define_locally Base58_check.(to_base58_check, of_base58_check, of_base58_check_exn)] - [%%define_locally - Base58_check.String_ops.(to_string, of_string)] + [%%define_locally Base58_check.String_ops.(to_string, of_string)] - let create ~receiver_pk ~fee ~fee_token = {receiver_pk; fee; fee_token} + let create ~receiver_pk ~fee ~fee_token = { receiver_pk; fee; fee_token } - let receiver_pk {receiver_pk; _} = receiver_pk + let receiver_pk { receiver_pk; _ } = receiver_pk - let receiver {receiver_pk; fee_token; _} = + let receiver { receiver_pk; fee_token; _ } = Account_id.create receiver_pk fee_token - let fee {fee; _} = fee + let fee { fee; _ } = fee - let fee_token {fee_token; _} = fee_token + let fee_token { fee_token; _ } = fee_token module Gen = struct let with_random_receivers ?(min_fee = 0) ~keys ~max_fee ~token : @@ -49,7 +49,7 @@ module Single = struct >>| fun keypair -> Public_key.compress keypair.Keypair.public_key and fee = Int.gen_incl min_fee max_fee >>| Currency.Fee.of_int and fee_token = token in - {receiver_pk; fee; fee_token} + { receiver_pk; fee; fee_token } end end @@ -64,9 +64,10 @@ module Stable = struct end] type single = Single.t = - { receiver_pk: Public_key.Compressed.t - ; fee: Currency.Fee.t - ; fee_token: Token_id.t } + { receiver_pk : Public_key.Compressed.t + ; fee : Currency.Fee.t + ; fee_token : Token_id.t + } [@@deriving sexp, compare, yojson, hash] let to_singles = Fn.id @@ -98,12 +99,11 @@ include Comparable.Make (Stable.Latest) let fee_excess ft = ft - |> One_or_two.map ~f:(fun {fee_token; fee; _} -> - (fee_token, Currency.Fee.Signed.(negate (of_unsigned fee))) ) + |> One_or_two.map ~f:(fun { fee_token; fee; _ } -> + (fee_token, Currency.Fee.Signed.(negate (of_unsigned fee)))) |> Fee_excess.of_one_or_two -let receiver_pks t = - One_or_two.to_list (One_or_two.map ~f:Single.receiver_pk t) +let receiver_pks t = One_or_two.to_list (One_or_two.map ~f:Single.receiver_pk t) let receivers t = One_or_two.to_list (One_or_two.map ~f:Single.receiver t) diff --git a/src/lib/mina_base/fee_transfer.mli b/src/lib/mina_base/fee_transfer.mli index 70bf21922f9..83b23589cfd 100644 --- a/src/lib/mina_base/fee_transfer.mli +++ b/src/lib/mina_base/fee_transfer.mli @@ -5,9 +5,10 @@ module Single : sig module Stable : sig module V1 : sig type t = private - { receiver_pk: Public_key.Compressed.Stable.V1.t - ; fee: Currency.Fee.Stable.V1.t - ; fee_token: Token_id.Stable.V1.t } + { receiver_pk : Public_key.Compressed.Stable.V1.t + ; fee : Currency.Fee.Stable.V1.t + ; fee_token : Token_id.Stable.V1.t + } [@@deriving bin_io, sexp, compare, equal, yojson, version, hash] end @@ -15,9 +16,10 @@ module Single : sig end type t = Stable.Latest.t = private - { receiver_pk: Public_key.Compressed.t - ; fee: Currency.Fee.t - ; fee_token: Token_id.t } + { receiver_pk : Public_key.Compressed.t + ; fee : Currency.Fee.t + ; fee_token : Token_id.t + } [@@deriving sexp, compare, yojson, hash] include Comparable.S with type t := t @@ -60,9 +62,10 @@ end type t = Stable.Latest.t [@@deriving sexp, compare, yojson, hash] type single = Single.t = private - { receiver_pk: Public_key.Compressed.t - ; fee: Currency.Fee.t - ; fee_token: Token_id.t } + { receiver_pk : Public_key.Compressed.t + ; fee : Currency.Fee.t + ; fee_token : Token_id.t + } [@@deriving sexp, compare, yojson, hash] include Comparable.S with type t := t diff --git a/src/lib/mina_base/fee_with_prover.ml b/src/lib/mina_base/fee_with_prover.ml index de219f43211..a80011abd4f 100644 --- a/src/lib/mina_base/fee_with_prover.ml +++ b/src/lib/mina_base/fee_with_prover.ml @@ -5,7 +5,9 @@ open Signature_lib module Stable = struct module V1 = struct type t = - {fee: Currency.Fee.Stable.V1.t; prover: Public_key.Compressed.Stable.V1.t} + { fee : Currency.Fee.Stable.V1.t + ; prover : Public_key.Compressed.Stable.V1.t + } [@@deriving sexp, yojson, hash] let to_latest = Fn.id @@ -30,4 +32,4 @@ include Comparable.Make (Stable.V1.T) let gen = Quickcheck.Generator.map2 Currency.Fee.gen Public_key.Compressed.gen - ~f:(fun fee prover -> {fee; prover}) + ~f:(fun fee prover -> { fee; prover }) diff --git a/src/lib/mina_base/gen/gen.ml b/src/lib/mina_base/gen/gen.ml index 0d2043c2db3..b2eae8e22fa 100644 --- a/src/lib/mina_base/gen/gen.ml +++ b/src/lib/mina_base/gen/gen.ml @@ -30,7 +30,7 @@ let expr ~loc = let open E in let earray = E.pexp_array - (List.map keypairs ~f:(fun {public_key; private_key} -> + (List.map keypairs ~f:(fun { public_key; private_key } -> E.pexp_tuple [ estring (Binable.to_string @@ -39,7 +39,8 @@ let expr ~loc = ; estring (Binable.to_string (module Private_key.Stable.Latest) - private_key) ] )) + private_key) + ])) in let%expr conv (pk, sk) = ( Core_kernel.Binable.of_string @@ -67,15 +68,15 @@ let json = Public_key.( Compressed.to_base58_check (compress kp.public_key)) ) ; ( "private_key" - , `String (Private_key.to_base58_check kp.private_key) ) ] )) + , `String (Private_key.to_base58_check kp.private_key) ) + ])) let main () = Out_channel.with_file "sample_keypairs.ml" ~f:(fun ml_file -> let fmt = Format.formatter_of_out_channel ml_file in - Pprintast.top_phrase fmt (Ptop_def (structure ~loc:Ppxlib.Location.none)) - ) ; + Pprintast.top_phrase fmt (Ptop_def (structure ~loc:Ppxlib.Location.none))) ; Out_channel.with_file "sample_keypairs.json" ~f:(fun json_file -> - Yojson.pretty_to_channel json_file json ) ; + Yojson.pretty_to_channel json_file json) ; exit 0 let () = main () diff --git a/src/lib/mina_base/hack_snarky_tests.ml b/src/lib/mina_base/hack_snarky_tests.ml index badec1dd5ce..44b62bbc88d 100644 --- a/src/lib/mina_base/hack_snarky_tests.ml +++ b/src/lib/mina_base/hack_snarky_tests.ml @@ -8,7 +8,7 @@ let%test_module "merkle_tree" = let hash = Option.value_map ~default:Free_hash.Hash_empty ~f:(fun x -> - Free_hash.Hash_value x ) + Free_hash.Hash_value x) let create_tree n = let tree = create ~hash ~merge 0 in diff --git a/src/lib/mina_base/hash_prefix.ml b/src/lib/mina_base/hash_prefix.ml index f15ed5a599c..f8776658466 100644 --- a/src/lib/mina_base/hash_prefix.ml +++ b/src/lib/mina_base/hash_prefix.ml @@ -1,10 +1,8 @@ (* hash_prefix.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] include Hash_prefix_states diff --git a/src/lib/mina_base/how_to_obtain_keys.ml b/src/lib/mina_base/how_to_obtain_keys.ml index 3cb860bb1db..520ceedcd15 100644 --- a/src/lib/mina_base/how_to_obtain_keys.ml +++ b/src/lib/mina_base/how_to_obtain_keys.ml @@ -2,7 +2,7 @@ open Core open Snark_params module T = struct - type t = Load_both of {step: string; wrap: string} | Generate_both + type t = Load_both of { step : string; wrap : string } | Generate_both [@@deriving sexp] end @@ -25,5 +25,5 @@ let obtain_keys (type vk pk kp) | Generate_both -> let ks = f () in (Impl.Keypair.vk ks, Impl.Keypair.pk ks) - | Load_both {step; _} -> + | Load_both { step; _ } -> Sexp.load_sexp_conv_exn step keypair_of_sexp ) diff --git a/src/lib/mina_base/import.ml b/src/lib/mina_base/import.ml index 6770ee40708..b41cffa8fe4 100644 --- a/src/lib/mina_base/import.ml +++ b/src/lib/mina_base/import.ml @@ -1,10 +1,8 @@ (* account.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] module Signature_lib = Signature_lib_nonconsensus module Mina_compile_config = diff --git a/src/lib/mina_base/ledger.ml b/src/lib/mina_base/ledger.ml index 890f88dfb7d..a8fcffedd15 100644 --- a/src/lib/mina_base/ledger.ml +++ b/src/lib/mina_base/ledger.ml @@ -21,8 +21,7 @@ module Ledger_inner = struct | Hash of Location_at_depth.Addr.t [@@deriving hash, sexp, compare] - include Hashable.Make_binable (Arg) [@@deriving - sexp, compare, hash, yojson] + include Hashable.Make_binable (Arg) [@@deriving sexp, compare, hash, yojson] end module Kvdb : Intf.Key_value_database with type config := string = @@ -71,13 +70,13 @@ module Ledger_inner = struct let identifier = Account.identifier - let balance Account.Poly.{balance; _} = balance + let balance Account.Poly.{ balance; _ } = balance - let token Account.Poly.{token_id; _} = token_id + let token Account.Poly.{ token_id; _ } = token_id let empty = Account.empty - let token_owner ({token_permissions; _} : t) = + let token_owner ({ token_permissions; _ } : t) = match token_permissions with | Token_owned _ -> true @@ -106,45 +105,45 @@ module Ledger_inner = struct module Db : Merkle_ledger.Database_intf.S - with module Location = Location_at_depth - with module Addr = Location_at_depth.Addr - with type root_hash := Ledger_hash.t - and type hash := Ledger_hash.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account := Account.t - and type account_id_set := Account_id.Set.t - and type account_id := Account_id.t = + with module Location = Location_at_depth + with module Addr = Location_at_depth.Addr + with type root_hash := Ledger_hash.t + and type hash := Ledger_hash.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account := Account.t + and type account_id_set := Account_id.Set.t + and type account_id := Account_id.t = Database.Make (Inputs) module Null = Null_ledger.Make (Inputs) module Any_ledger : Merkle_ledger.Any_ledger.S - with module Location = Location_at_depth - with type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Hash.t = + with module Location = Location_at_depth + with type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Hash.t = Merkle_ledger.Any_ledger.Make_base (Inputs) module Mask : Merkle_mask.Masking_merkle_tree_intf.S - with module Location = Location_at_depth - and module Attached.Addr = Location_at_depth.Addr - with type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Hash.t - and type location := Location_at_depth.t - and type parent := Any_ledger.M.t = + with module Location = Location_at_depth + and module Attached.Addr = Location_at_depth.Addr + with type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Hash.t + and type location := Location_at_depth.t + and type parent := Any_ledger.M.t = Merkle_mask.Masking_merkle_tree.Make (struct include Inputs module Base = Any_ledger.M @@ -152,19 +151,19 @@ module Ledger_inner = struct module Maskable : Merkle_mask.Maskable_merkle_tree_intf.S - with module Location = Location_at_depth - with module Addr = Location_at_depth.Addr - with type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Hash.t - and type root_hash := Hash.t - and type unattached_mask := Mask.t - and type attached_mask := Mask.Attached.t - and type t := Any_ledger.M.t = + with module Location = Location_at_depth + with module Addr = Location_at_depth.Addr + with type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Hash.t + and type root_hash := Hash.t + and type unattached_mask := Mask.t + and type attached_mask := Mask.Attached.t + and type t := Any_ledger.M.t = Merkle_mask.Maskable_merkle_tree.Make (struct include Inputs module Base = Any_ledger.M @@ -184,7 +183,7 @@ module Ledger_inner = struct Maskable.register_mask casted mask (* Mask.Attached.create () fails, can't create an attached mask directly - shadow create in order to create an attached mask + shadow create in order to create an attached mask *) let create ?directory_name ~depth () = of_database (Db.create ?directory_name ~depth ()) @@ -236,7 +235,7 @@ module Ledger_inner = struct type attached_mask = Mask.Attached.t (* inside MaskedLedger, the functor argument has assigned to location, account, and path - but the module signature for the functor result wants them, so we declare them here *) + but the module signature for the functor result wants them, so we declare them here *) type location = Location.t (* TODO: Don't allocate: see Issue #1191 *) @@ -248,7 +247,7 @@ module Ledger_inner = struct let action, _ = get_or_create_account t account_id account |> Or_error.ok_exn in - if [%equal: [`Existed | `Added]] action `Existed then + if [%equal: [ `Existed | `Added ]] action `Existed then failwith (sprintf !"Could not create a new account with pk \ @@ -281,7 +280,7 @@ module Ledger_inner = struct failwith "create_empty for a key already present" | `Added, new_loc -> Debug_assert.debug_assert (fun () -> - [%test_eq: Ledger_hash.t] start_hash (merkle_root ledger) ) ; + [%test_eq: Ledger_hash.t] start_hash (merkle_root ledger)) ; (merkle_path ledger new_loc, Account.empty) let _handler t = @@ -291,9 +290,9 @@ module Ledger_inner = struct | `Left h -> h | `Right h -> - h ) + h) in - stage (fun (With {request; respond}) -> + stage (fun (With { request; respond }) -> match request with | Ledger_hash.Get_element idx -> let elt = get_at_index_exn t idx in @@ -309,7 +308,7 @@ module Ledger_inner = struct let index = index_of_account_exn t pk in respond (Provide index) | _ -> - unhandled ) + unhandled) end include Ledger_inner @@ -359,8 +358,9 @@ let apply_initial_ledger_state : t -> init_state -> unit = let account = Account.initialize account_id in let account' = { account with - balance= Currency.Balance.of_int (Currency.Amount.to_int balance) + balance = Currency.Balance.of_int (Currency.Amount.to_int balance) ; nonce - ; timing } + ; timing + } in - create_new_account_exn t account_id account' ) + create_new_account_exn t account_id account') diff --git a/src/lib/mina_base/ledger.mli b/src/lib/mina_base/ledger.mli index 0fe4a2203e7..71e08ccfd4b 100644 --- a/src/lib/mina_base/ledger.mli +++ b/src/lib/mina_base/ledger.mli @@ -5,73 +5,73 @@ module Location : Merkle_ledger.Location_intf.S module Db : Merkle_ledger.Database_intf.S - with module Location = Location - with module Addr = Location.Addr - with type root_hash := Ledger_hash.t - and type hash := Ledger_hash.t - and type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t + with module Location = Location + with module Addr = Location.Addr + with type root_hash := Ledger_hash.t + and type hash := Ledger_hash.t + and type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t module Any_ledger : Merkle_ledger.Any_ledger.S - with module Location = Location - with type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Ledger_hash.t + with module Location = Location + with type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Ledger_hash.t module Mask : Merkle_mask.Masking_merkle_tree_intf.S - with module Location = Location - and module Attached.Addr = Location.Addr - with type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Ledger_hash.t - and type location := Location.t - and type parent := Any_ledger.M.t + with module Location = Location + and module Attached.Addr = Location.Addr + with type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Ledger_hash.t + and type location := Location.t + and type parent := Any_ledger.M.t module Maskable : Merkle_mask.Maskable_merkle_tree_intf.S - with module Location = Location - with module Addr = Location.Addr - with type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Ledger_hash.t - and type root_hash := Ledger_hash.t - and type unattached_mask := Mask.t - and type attached_mask := Mask.Attached.t - and type t := Any_ledger.M.t + with module Location = Location + with module Addr = Location.Addr + with type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Ledger_hash.t + and type root_hash := Ledger_hash.t + and type unattached_mask := Mask.t + and type attached_mask := Mask.Attached.t + and type t := Any_ledger.M.t include Merkle_mask.Maskable_merkle_tree_intf.S - with module Location := Location - with module Addr = Location.Addr - with type root_hash := Ledger_hash.t - and type hash := Ledger_hash.t - and type account := Account.t - and type key := Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type t = Mask.Attached.t - and type attached_mask = Mask.Attached.t - and type unattached_mask = Mask.t + with module Location := Location + with module Addr = Location.Addr + with type root_hash := Ledger_hash.t + and type hash := Ledger_hash.t + and type account := Account.t + and type key := Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type t = Mask.Attached.t + and type attached_mask = Mask.Attached.t + and type unattached_mask = Mask.t (* We override the type of unregister_mask_exn that comes from Merkle_mask.Maskable_merkle_tree_intf.S because at this level callers aren't @@ -106,19 +106,20 @@ module Transaction_applied : sig module Signed_command_applied : sig module Common : sig type t = Transaction_applied.Signed_command_applied.Common.t = - { user_command: Signed_command.t With_status.t - ; previous_receipt_chain_hash: Receipt.Chain_hash.t - ; fee_payer_timing: Account.Timing.t - ; source_timing: Account.Timing.t option } + { user_command : Signed_command.t With_status.t + ; previous_receipt_chain_hash : Receipt.Chain_hash.t + ; fee_payer_timing : Account.Timing.t + ; source_timing : Account.Timing.t option + } [@@deriving sexp] end module Body : sig type t = Transaction_applied.Signed_command_applied.Body.t = - | Payment of {previous_empty_accounts: Account_id.t list} + | Payment of { previous_empty_accounts : Account_id.t list } | Stake_delegation of - { previous_delegate: Public_key.Compressed.t option } - | Create_new_token of {created_token: Token_id.t} + { previous_delegate : Public_key.Compressed.t option } + | Create_new_token of { created_token : Token_id.t } | Create_token_account | Mint_tokens | Failed @@ -126,14 +127,15 @@ module Transaction_applied : sig end type t = Transaction_applied.Signed_command_applied.t = - {common: Common.t; body: Body.t} + { common : Common.t; body : Body.t } [@@deriving sexp] end module Snapp_command_applied : sig type t = Transaction_applied.Snapp_command_applied.t = - { accounts: (Account_id.t * Account.t option) list - ; command: Snapp_command.t With_status.t } + { accounts : (Account_id.t * Account.t option) list + ; command : Snapp_command.t With_status.t + } [@@deriving sexp] end @@ -146,19 +148,21 @@ module Transaction_applied : sig module Fee_transfer_applied : sig type t = Transaction_applied.Fee_transfer_applied.t = - { fee_transfer: Fee_transfer.t - ; previous_empty_accounts: Account_id.t list - ; receiver_timing: Account.Timing.t - ; balances: Transaction_status.Fee_transfer_balance_data.t } + { fee_transfer : Fee_transfer.t + ; previous_empty_accounts : Account_id.t list + ; receiver_timing : Account.Timing.t + ; balances : Transaction_status.Fee_transfer_balance_data.t + } [@@deriving sexp] end module Coinbase_applied : sig type t = Transaction_applied.Coinbase_applied.t = - { coinbase: Coinbase.t - ; previous_empty_accounts: Account_id.t list - ; receiver_timing: Account.Timing.t - ; balances: Transaction_status.Coinbase_balance_data.t } + { coinbase : Coinbase.t + ; previous_empty_accounts : Account_id.t list + ; receiver_timing : Account.Timing.t + ; balances : Transaction_status.Coinbase_balance_data.t + } [@@deriving sexp] end @@ -171,7 +175,7 @@ module Transaction_applied : sig end type t = Transaction_applied.t = - {previous_hash: Ledger_hash.t; varying: Varying.t} + { previous_hash : Ledger_hash.t; varying : Varying.t } [@@deriving sexp] val transaction : t -> Transaction.t With_status.t @@ -229,14 +233,14 @@ val merkle_root_after_snapp_command_exn : -> txn_state_view:Snapp_predicate.Protocol_state.View.t -> t -> Snapp_command.Valid.t - -> Ledger_hash.t * [`Next_available_token of Token_id.t] + -> Ledger_hash.t * [ `Next_available_token of Token_id.t ] val merkle_root_after_user_command_exn : constraint_constants:Genesis_constants.Constraint_constants.t -> txn_global_slot:Mina_numbers.Global_slot.t -> t -> Signed_command.With_valid_signature.t - -> Ledger_hash.t * [`Next_available_token of Token_id.t] + -> Ledger_hash.t * [ `Next_available_token of Token_id.t ] (** Raises if the ledger is full. *) val create_empty_exn : t -> Account_id.t -> Path.t * Account.t diff --git a/src/lib/mina_base/ledger_hash.ml b/src/lib/mina_base/ledger_hash.ml index d26db9df571..179774c5f21 100644 --- a/src/lib/mina_base/ledger_hash.ml +++ b/src/lib/mina_base/ledger_hash.ml @@ -19,7 +19,7 @@ module Merkle_tree = Tick.make_checked (fun () -> Random_oracle.Checked.hash ~init:(Hash_prefix.merkle_tree height) - [|h1; h2|] ) + [| h1; h2 |]) let assert_equal h1 h2 = Field.Checked.Assert.equal h1 h2 @@ -37,7 +37,7 @@ include Ledger_hash0 let merge ~height (h1 : t) (h2 : t) = Random_oracle.hash ~init:(Hash_prefix.merkle_tree height) - [|(h1 :> field); (h2 :> field)|] + [| (h1 :> field); (h2 :> field) |] |> of_hash let empty_hash = of_hash Outside_hash_image.t @@ -55,7 +55,7 @@ type _ Request.t += | Set : Account.Index.t * Account.t -> unit Request.t | Find_index : Account_id.t -> Account.Index.t Request.t -let reraise_merkle_requests (With {request; respond}) = +let reraise_merkle_requests (With { request; respond }) = match request with | Merkle_tree.Get_path addr -> respond (Delegate (Get_path addr)) @@ -92,7 +92,7 @@ let%snarkydef modify_account ~depth t aid (Merkle_tree.modify_req ~depth (var_to_hash_packed t) addr ~f:(fun account -> let%bind x = filter account in - f x account )) + f x account)) reraise_merkle_requests >>| var_of_hash_packed @@ -121,9 +121,9 @@ let%snarkydef modify_account_send ~depth t aid ~is_writeable ~f = let%bind () = [%with_label "account is either present or empty and writeable"] (Boolean.Assert.any - [account_already_there; not_there_but_writeable]) + [ account_already_there; not_there_but_writeable ]) in - return not_there_but_writeable) ) + return not_there_but_writeable)) ~f:(fun is_empty_and_writeable x -> f ~is_empty_and_writeable x) (* @@ -147,7 +147,7 @@ let%snarkydef modify_account_recv ~depth t aid ~f = in let%bind () = [%with_label "account is either present or empty"] - (Boolean.Assert.any [account_already_there; account_not_there]) + (Boolean.Assert.any [ account_already_there; account_not_there ]) in - return account_not_there) ) + return account_not_there)) ~f:(fun is_empty_and_writeable x -> f ~is_empty_and_writeable x) diff --git a/src/lib/mina_base/ledger_hash0.ml b/src/lib/mina_base/ledger_hash0.ml index add954ad4bd..2e92220da52 100644 --- a/src/lib/mina_base/ledger_hash0.ml +++ b/src/lib/mina_base/ledger_hash0.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -28,15 +26,14 @@ module Stable = struct module V1 = struct module T = struct - type t = Field.t [@@deriving sexp, compare, hash, version {asserted}] + type t = Field.t [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) diff --git a/src/lib/mina_base/ledger_hash_intf.ml b/src/lib/mina_base/ledger_hash_intf.ml index 1c2c31fd075..e0e38d6f07c 100644 --- a/src/lib/mina_base/ledger_hash_intf.ml +++ b/src/lib/mina_base/ledger_hash_intf.ml @@ -14,10 +14,7 @@ module type S = sig | Find_index : Account_id.t -> Account.Index.t Request.t val get : - depth:int - -> var - -> Account.Index.Unpacked.var - -> (Account.var, _) Checked.t + depth:int -> var -> Account.Index.Unpacked.var -> (Account.var, _) Checked.t val merge : height:int -> t -> t -> t @@ -43,7 +40,8 @@ module type S = sig -> var -> Account_id.var -> is_writeable:Boolean.var - -> f:( is_empty_and_writeable:Boolean.var + -> f: + ( is_empty_and_writeable:Boolean.var -> Account.var -> (Account.var, 's) Checked.t) -> (var, 's) Checked.t @@ -52,7 +50,8 @@ module type S = sig depth:int -> var -> Account_id.var - -> f:( is_empty_and_writeable:Boolean.var + -> f: + ( is_empty_and_writeable:Boolean.var -> Account.var -> (Account.var, 's) Checked.t) -> (var, 's) Checked.t diff --git a/src/lib/mina_base/ledger_transfer.ml b/src/lib/mina_base/ledger_transfer.ml index 6d28019b1ba..c2d2e73207d 100644 --- a/src/lib/mina_base/ledger_transfer.ml +++ b/src/lib/mina_base/ledger_transfer.ml @@ -2,14 +2,14 @@ open Core_kernel module type Base_ledger_intf = Merkle_ledger.Base_ledger_intf.S - with type account := Account.t - and type key := Signature_lib.Public_key.Compressed.t - and type token_id := Token_id.t - and type token_id_set := Token_id.Set.t - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Ledger_hash.t - and type root_hash := Ledger_hash.t + with type account := Account.t + and type key := Signature_lib.Public_key.Compressed.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Ledger_hash.t + and type root_hash := Ledger_hash.t module Make (Source : Base_ledger_intf) @@ -19,7 +19,7 @@ end = struct let transfer_accounts ~src ~dest = let accounts = Source.foldi src ~init:[] ~f:(fun addr acc account -> - (addr, account) :: acc ) + (addr, account) :: acc) in Dest.set_batch_accounts dest accounts ; let src_hash = Source.merkle_root src in @@ -44,9 +44,8 @@ end = struct Sparse_ledger.iteri src ~f:(fun _idx account -> let id = Account.identifier account in ignore - ( Dest.get_or_create_account dest id account - |> Or_error.ok_exn - : [`Added | `Existed] * Dest.Location.t ) ) ) + ( Dest.get_or_create_account dest id account |> Or_error.ok_exn + : [ `Added | `Existed ] * Dest.Location.t ))) in let src_hash = Sparse_ledger.merkle_root src in let dest_hash = Dest.merkle_root dest in diff --git a/src/lib/mina_base/minting_payload.ml b/src/lib/mina_base/minting_payload.ml index 125d2fe182d..9886a4238bf 100644 --- a/src/lib/mina_base/minting_payload.ml +++ b/src/lib/mina_base/minting_payload.ml @@ -5,27 +5,28 @@ open Import module Stable = struct module V1 = struct type t = - { token_id: Token_id.Stable.V1.t - ; token_owner_pk: Public_key.Compressed.Stable.V1.t - ; receiver_pk: Public_key.Compressed.Stable.V1.t - ; amount: Currency.Amount.Stable.V1.t } + { token_id : Token_id.Stable.V1.t + ; token_owner_pk : Public_key.Compressed.Stable.V1.t + ; receiver_pk : Public_key.Compressed.Stable.V1.t + ; amount : Currency.Amount.Stable.V1.t + } [@@deriving compare, equal, sexp, hash, yojson] let to_latest = Fn.id end end] -let receiver_pk {receiver_pk; _} = receiver_pk +let receiver_pk { receiver_pk; _ } = receiver_pk -let receiver {token_id; receiver_pk; _} = +let receiver { token_id; receiver_pk; _ } = Account_id.create receiver_pk token_id -let source_pk {token_owner_pk; _} = token_owner_pk +let source_pk { token_owner_pk; _ } = token_owner_pk -let source {token_id; token_owner_pk; _} = +let source { token_id; token_owner_pk; _ } = Account_id.create token_owner_pk token_id -let token {token_id; _} = token_id +let token { token_id; _ } = token_id let gen = let open Quickcheck.Generator.Let_syntax in @@ -33,4 +34,4 @@ let gen = let%bind token_owner_pk = Public_key.Compressed.gen in let%bind receiver_pk = Public_key.Compressed.gen in let%map amount = Currency.Amount.gen in - {token_id; token_owner_pk; receiver_pk; amount} + { token_id; token_owner_pk; receiver_pk; amount } diff --git a/src/lib/mina_base/new_account_payload.ml b/src/lib/mina_base/new_account_payload.ml index e0f9ffe85f3..e0145f8fe11 100644 --- a/src/lib/mina_base/new_account_payload.ml +++ b/src/lib/mina_base/new_account_payload.ml @@ -5,27 +5,28 @@ open Import module Stable = struct module V1 = struct type t = - { token_id: Token_id.Stable.V1.t - ; token_owner_pk: Public_key.Compressed.Stable.V1.t - ; receiver_pk: Public_key.Compressed.Stable.V1.t - ; account_disabled: bool } + { token_id : Token_id.Stable.V1.t + ; token_owner_pk : Public_key.Compressed.Stable.V1.t + ; receiver_pk : Public_key.Compressed.Stable.V1.t + ; account_disabled : bool + } [@@deriving compare, equal, sexp, hash, yojson] let to_latest = Fn.id end end] -let receiver_pk {receiver_pk; _} = receiver_pk +let receiver_pk { receiver_pk; _ } = receiver_pk -let receiver {token_id; receiver_pk; _} = +let receiver { token_id; receiver_pk; _ } = Account_id.create receiver_pk token_id -let source_pk {token_owner_pk; _} = token_owner_pk +let source_pk { token_owner_pk; _ } = token_owner_pk -let source {token_id; token_owner_pk; _} = +let source { token_id; token_owner_pk; _ } = Account_id.create token_owner_pk token_id -let token {token_id; _} = token_id +let token { token_id; _ } = token_id let gen = let open Quickcheck.Generator.Let_syntax in @@ -33,4 +34,4 @@ let gen = let%bind token_owner_pk = Public_key.Compressed.gen in let%bind receiver_pk = Public_key.Compressed.gen in let%map account_disabled = Quickcheck.Generator.bool in - {token_id; token_owner_pk; receiver_pk; account_disabled} + { token_id; token_owner_pk; receiver_pk; account_disabled } diff --git a/src/lib/mina_base/new_token_payload.ml b/src/lib/mina_base/new_token_payload.ml index d04b7e14b4f..73ad3c3adec 100644 --- a/src/lib/mina_base/new_token_payload.ml +++ b/src/lib/mina_base/new_token_payload.ml @@ -5,22 +5,23 @@ open Import module Stable = struct module V1 = struct type t = - { token_owner_pk: Public_key.Compressed.Stable.V1.t - ; disable_new_accounts: bool } + { token_owner_pk : Public_key.Compressed.Stable.V1.t + ; disable_new_accounts : bool + } [@@deriving compare, equal, sexp, hash, yojson] let to_latest = Fn.id end end] -let receiver_pk {token_owner_pk; _} = token_owner_pk +let receiver_pk { token_owner_pk; _ } = token_owner_pk -let receiver ~next_available_token {token_owner_pk; _} = +let receiver ~next_available_token { token_owner_pk; _ } = Account_id.create token_owner_pk next_available_token -let source_pk {token_owner_pk; _} = token_owner_pk +let source_pk { token_owner_pk; _ } = token_owner_pk -let source ~next_available_token {token_owner_pk; _} = +let source ~next_available_token { token_owner_pk; _ } = Account_id.create token_owner_pk next_available_token let token (_ : t) = Token_id.invalid @@ -29,4 +30,4 @@ let gen = let open Quickcheck.Generator.Let_syntax in let%bind token_owner_pk = Public_key.Compressed.gen in let%map disable_new_accounts = Quickcheck.Generator.bool in - {token_owner_pk; disable_new_accounts} + { token_owner_pk; disable_new_accounts } diff --git a/src/lib/mina_base/other_fee_payer.ml b/src/lib/mina_base/other_fee_payer.ml index c5ceaedb51d..fec2cea71bc 100644 --- a/src/lib/mina_base/other_fee_payer.ml +++ b/src/lib/mina_base/other_fee_payer.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Signature_lib @@ -22,7 +20,7 @@ module Payload = struct module Stable = struct module V1 = struct type ('pk, 'token_id, 'nonce, 'fee) t = - {pk: 'pk; token_id: 'token_id; nonce: 'nonce; fee: 'fee} + { pk : 'pk; token_id : 'token_id; nonce : 'nonce; fee : 'fee } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] @@ -51,14 +49,15 @@ module Payload = struct , Currency.Fee.Checked.t ) Poly.t - let to_input ({pk; token_id; nonce; fee} : t) = + let to_input ({ pk; token_id; nonce; fee } : t) = let ( ! ) = Impl.run_checked in let open Random_oracle_input in List.reduce_exn ~f:append [ Public_key.Compressed.Checked.to_input pk ; !(Token_id.Checked.to_input token_id) ; !(Mina_numbers.Account_nonce.Checked.to_input nonce) - ; Currency.Fee.var_to_input fee ] + ; Currency.Fee.var_to_input fee + ] end open Pickles.Impls.Step @@ -69,29 +68,33 @@ module Payload = struct [ Public_key.Compressed.typ ; Token_id.typ ; Mina_numbers.Account_nonce.typ - ; Currency.Fee.typ ] + ; Currency.Fee.typ + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let dummy : t = - { pk= Public_key.Compressed.empty - ; token_id= Token_id.invalid - ; nonce= Mina_numbers.Account_nonce.zero - ; fee= Currency.Fee.zero } + { pk = Public_key.Compressed.empty + ; token_id = Token_id.invalid + ; nonce = Mina_numbers.Account_nonce.zero + ; fee = Currency.Fee.zero + } - let to_input ({pk; token_id; nonce; fee} : t) = + let to_input ({ pk; token_id; nonce; fee } : t) = let open Random_oracle_input in List.reduce_exn ~f:append [ Public_key.Compressed.to_input pk ; Token_id.to_input token_id ; Mina_numbers.Account_nonce.to_input nonce - ; Currency.Fee.to_input fee ] + ; Currency.Fee.to_input fee + ] end [%%versioned module Stable = struct module V1 = struct - type t = {payload: Payload.Stable.V1.t; signature: Signature.Stable.V1.t} + type t = + { payload : Payload.Stable.V1.t; signature : Signature.Stable.V1.t } [@@deriving sexp, equal, yojson, hash, compare] let to_latest = Fn.id diff --git a/src/lib/mina_base/payment_payload.ml b/src/lib/mina_base/payment_payload.ml index 386dbeb7b4e..55cba502bd8 100644 --- a/src/lib/mina_base/payment_payload.ml +++ b/src/lib/mina_base/payment_payload.ml @@ -1,12 +1,10 @@ (* payment_payload.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Signature_lib @@ -26,10 +24,11 @@ module Poly = struct module Stable = struct module V1 = struct type ('public_key, 'token_id, 'amount) t = - { source_pk: 'public_key - ; receiver_pk: 'public_key - ; token_id: 'token_id - ; amount: 'amount } + { source_pk : 'public_key + ; receiver_pk : 'public_key + ; token_id : 'token_id + ; amount : 'amount + } [@@deriving equal, sexp, hash, yojson, compare, hlist] end end] @@ -51,15 +50,15 @@ end] let dummy = Poly. - { source_pk= Public_key.Compressed.empty - ; receiver_pk= Public_key.Compressed.empty - ; token_id= Token_id.invalid - ; amount= Amount.zero } + { source_pk = Public_key.Compressed.empty + ; receiver_pk = Public_key.Compressed.empty + ; token_id = Token_id.invalid + ; amount = Amount.zero + } -let token {Poly.token_id; _} = token_id +let token { Poly.token_id; _ } = token_id -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = (Public_key.Compressed.var, Token_id.var, Amount.var) Poly.t @@ -69,31 +68,35 @@ let typ : (var, t) Typ.t = [ Public_key.Compressed.typ ; Public_key.Compressed.typ ; Token_id.typ - ; Amount.typ ] + ; Amount.typ + ] in Typ.of_hlistable spec ~var_to_hlist:Poly.to_hlist ~var_of_hlist:Poly.of_hlist ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist -let to_input {Poly.source_pk; receiver_pk; token_id; amount} = +let to_input { Poly.source_pk; receiver_pk; token_id; amount } = Array.reduce_exn ~f:Random_oracle.Input.append [| Public_key.Compressed.to_input source_pk ; Public_key.Compressed.to_input receiver_pk ; Token_id.to_input token_id - ; Amount.to_input amount |] + ; Amount.to_input amount + |] -let var_to_input {Poly.source_pk; receiver_pk; token_id; amount} = +let var_to_input { Poly.source_pk; receiver_pk; token_id; amount } = let%map token_id = Token_id.Checked.to_input token_id in Array.reduce_exn ~f:Random_oracle.Input.append [| Public_key.Compressed.Checked.to_input source_pk ; Public_key.Compressed.Checked.to_input receiver_pk ; token_id - ; Amount.var_to_input amount |] + ; Amount.var_to_input amount + |] -let var_of_t ({source_pk; receiver_pk; token_id; amount} : t) : var = - { source_pk= Public_key.Compressed.var_of_t source_pk - ; receiver_pk= Public_key.Compressed.var_of_t receiver_pk - ; token_id= Token_id.var_of_t token_id - ; amount= Amount.var_of_t amount } +let var_of_t ({ source_pk; receiver_pk; token_id; amount } : t) : var = + { source_pk = Public_key.Compressed.var_of_t source_pk + ; receiver_pk = Public_key.Compressed.var_of_t receiver_pk + ; token_id = Token_id.var_of_t token_id + ; amount = Amount.var_of_t amount + } [%%endif] @@ -108,7 +111,7 @@ let gen_aux ?source_pk ~token_id ~max_amount = in let%bind receiver_pk = Public_key.Compressed.gen in let%map amount = Amount.gen_incl Amount.zero max_amount in - Poly.{source_pk; receiver_pk; token_id; amount} + Poly.{ source_pk; receiver_pk; token_id; amount } let gen ?source_pk ~max_amount = let open Quickcheck.Generator.Let_syntax in diff --git a/src/lib/mina_base/payment_payload.mli b/src/lib/mina_base/payment_payload.mli index 33379ab40a0..ff8c86f5f02 100644 --- a/src/lib/mina_base/payment_payload.mli +++ b/src/lib/mina_base/payment_payload.mli @@ -4,22 +4,22 @@ open Import module Poly : sig type ('public_key, 'token_id, 'amount) t = - { source_pk: 'public_key - ; receiver_pk: 'public_key - ; token_id: 'token_id - ; amount: 'amount } + { source_pk : 'public_key + ; receiver_pk : 'public_key + ; token_id : 'token_id + ; amount : 'amount + } [@@deriving equal, sexp, hash, yojson] - module Stable : - sig - module V1 : sig - type nonrec ('pk, 'tid, 'amount) t - [@@deriving bin_io, equal, sexp, hash, yojson, version] - end - - module Latest = V1 + module Stable : sig + module V1 : sig + type nonrec ('pk, 'tid, 'amount) t + [@@deriving bin_io, equal, sexp, hash, yojson, version] end - with type ('pk, 'tid, 'amount) V1.t = ('pk, 'tid, 'amount) t + + module Latest = V1 + end + with type ('pk, 'tid, 'amount) V1.t = ('pk, 'tid, 'amount) t end [%%versioned: @@ -53,8 +53,7 @@ val gen_non_default_token : -> max_amount:Currency.Amount.t -> t Quickcheck.Generator.t -type var = - (Public_key.Compressed.var, Token_id.var, Currency.Amount.var) Poly.t +type var = (Public_key.Compressed.var, Token_id.var, Currency.Amount.var) Poly.t val typ : (var, t) Typ.t diff --git a/src/lib/mina_base/pending_coinbase.ml b/src/lib/mina_base/pending_coinbase.ml index fe5c9d4de13..4b9fbf39f3e 100644 --- a/src/lib/mina_base/pending_coinbase.ml +++ b/src/lib/mina_base/pending_coinbase.ml @@ -40,7 +40,7 @@ module Coinbase_data = struct let to_input (pk, amount) = let open Random_oracle.Input in List.reduce_exn ~f:append - [Public_key.Compressed.to_input pk; bitstring (Amount.to_bits amount)] + [ Public_key.Compressed.to_input pk; bitstring (Amount.to_bits amount) ] module Checked = struct let to_input (public_key, amount) = @@ -49,22 +49,23 @@ module Coinbase_data = struct [ Public_key.Compressed.Checked.to_input public_key ; bitstring (Bitstring_lib.Bitstring.Lsb_first.to_list - (Amount.var_to_bits amount)) ] + (Amount.var_to_bits amount)) + ] end let typ : (var, t) Typ.t = let spec = let open Data_spec in - [Public_key.Compressed.typ; Amount.typ] + [ Public_key.Compressed.typ; Amount.typ ] in let of_hlist : 'public_key 'amount. ( unit , 'public_key -> 'amount -> unit ) H_list.t -> 'public_key * 'amount = let open H_list in - fun [public_key; amount] -> (public_key, amount) + fun [ public_key; amount ] -> (public_key, amount) in - let to_hlist (public_key, amount) = H_list.[public_key; amount] in + let to_hlist (public_key, amount) = H_list.[ public_key; amount ] in Typ.of_hlistable spec ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -106,8 +107,7 @@ end = struct end end] - [%%define_locally - Int.(( > ), to_string, zero, to_int, of_int, equal)] + [%%define_locally Int.(( > ), to_string, zero, to_int, of_int, equal)] let incr_by_one t1 = let t2 = t1 + 1 in @@ -151,15 +151,14 @@ module Coinbase_stack = struct module V1 = struct module T = struct - type t = Field.t [@@deriving sexp, compare, hash, version {asserted}] + type t = Field.t [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Core.Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) @@ -188,7 +187,7 @@ module Coinbase_stack = struct (Random_oracle.Input.append (Coinbase_data.Checked.to_input cb) (var_to_input h))) - |> var_of_hash_packed ) + |> var_of_hash_packed) let check_merge (_, t1) (s2, _) = equal_var t1 s2 @@ -211,15 +210,14 @@ module Stack_hash = struct module V1 = struct module T = struct - type t = Field.t [@@deriving sexp, compare, hash, version {asserted}] + type t = Field.t [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Core.Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) @@ -237,7 +235,7 @@ module State_stack = struct [%%versioned module Stable = struct module V1 = struct - type 'stack_hash t = {init: 'stack_hash; curr: 'stack_hash} + type 'stack_hash t = { init : 'stack_hash; curr : 'stack_hash } [@@deriving sexp, compare, hash, yojson, equal, hlist] end end] @@ -260,7 +258,7 @@ module State_stack = struct let%map init, curr = Quickcheck.Generator.tuple2 Stack_hash.gen Stack_hash.gen in - {Poly.init; curr} + { Poly.init; curr } let to_input (t : t) = Random_oracle.Input.append @@ -273,9 +271,11 @@ module State_stack = struct (Stack_hash.var_to_input t.curr) let var_of_t (t : t) = - {Poly.init= Stack_hash.var_of_t t.init; curr= Stack_hash.var_of_t t.curr} + { Poly.init = Stack_hash.var_of_t t.init + ; curr = Stack_hash.var_of_t t.curr + } - let data_spec = Snark_params.Tick.Data_spec.[Stack_hash.typ; Stack_hash.typ] + let data_spec = Snark_params.Tick.Data_spec.[ Stack_hash.typ; Stack_hash.typ ] let typ : (var, t) Typ.t = Snark_params.Tick.Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist @@ -284,8 +284,7 @@ module State_stack = struct let to_bits (t : t) = Stack_hash.to_bits t.init @ Stack_hash.to_bits t.curr - let to_bytes (t : t) = - Stack_hash.to_bytes t.init ^ Stack_hash.to_bytes t.curr + let to_bytes (t : t) = Stack_hash.to_bytes t.init ^ Stack_hash.to_bytes t.curr let equal_var (v1 : var) (v2 : var) = let open Tick.Checked.Let_syntax in @@ -297,22 +296,23 @@ module State_stack = struct (var, 'a) Tick0.Checked.t = let%bind init = Stack_hash.if_ cond ~then_:then_.init ~else_:else_.init in let%map curr = Stack_hash.if_ cond ~then_:then_.curr ~else_:else_.curr in - {Poly.init; curr} + { Poly.init; curr } let push (t : t) (state_body_hash : State_body_hash.t) : t = (* this is the same computation for combining state hashes and state body hashes as `Protocol_state.hash_abstract', not available here because it would create a module dependency cycle - *) + *) { t with - curr= + curr = Random_oracle.hash ~init:Hash_prefix.protocol_state - [|(t.curr :> Field.t); (state_body_hash :> Field.t)|] - |> Stack_hash.of_hash } + [| (t.curr :> Field.t); (state_body_hash :> Field.t) |] + |> Stack_hash.of_hash + } - let empty : t = {Poly.init= Stack_hash.dummy; curr= Stack_hash.dummy} + let empty : t = { Poly.init = Stack_hash.dummy; curr = Stack_hash.dummy } - let create ~init = {Poly.init; curr= init} + let create ~init = { Poly.init; curr = init } module Checked = struct type t = var @@ -322,20 +322,21 @@ module State_stack = struct let curr = Random_oracle.Checked.hash ~init:Hash_prefix.protocol_state [| Stack_hash.var_to_hash_packed t.curr - ; State_body_hash.var_to_hash_packed state_body_hash |] + ; State_body_hash.var_to_hash_packed state_body_hash + |] |> Stack_hash.var_of_hash_packed in - {t with curr} ) + { t with curr }) let check_merge (s1, t1) (s2, t2) = (*state stacks are updated for every transaction in transaction snark but - only once for every blockchain snark. Therefore, source stacks (and - target stacks) will be equal for transactions in the same block*) + only once for every blockchain snark. Therefore, source stacks (and + target stacks) will be equal for transactions in the same block*) let%bind eq_src = equal_var s1 s2 and eq_target = equal_var t1 t2 and correct_transition = equal_var t1 s2 in let%bind same_update = Boolean.(eq_src &&& eq_target) in - Boolean.any [same_update; correct_transition] + Boolean.any [ same_update; correct_transition ] end end @@ -355,15 +356,14 @@ module Hash_builder = struct module V1 = struct module T = struct - type t = Field.t [@@deriving sexp, compare, hash, version {asserted}] + type t = Field.t [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) @@ -375,7 +375,7 @@ module Hash_builder = struct let merge ~height (h1 : t) (h2 : t) = Random_oracle.hash ~init:(Hash_prefix.coinbase_merkle_tree height) - [|(h1 :> field); (h2 :> field)|] + [| (h1 :> field); (h2 :> field) |] |> of_hash let empty_hash = @@ -427,9 +427,7 @@ module Update = struct Boolean.(var_of_value x, var_of_value y) let typ = - Typ.transport - Typ.(Boolean.typ * Boolean.typ) - ~there:to_bits ~back:of_bits + Typ.transport Typ.(Boolean.typ * Boolean.typ) ~there:to_bits ~back:of_bits module Checked = struct let no_update (b0, b1) = Boolean.((not b0) &&& not b1) @@ -446,7 +444,7 @@ module Update = struct module Stable = struct module V1 = struct type ('action, 'coinbase_amount) t = - {action: 'action; coinbase_amount: 'coinbase_amount} + { action : 'action; coinbase_amount : 'coinbase_amount } [@@deriving sexp, to_yojson, hlist] end end] @@ -462,22 +460,23 @@ module Update = struct end end] - [%%define_locally - Poly.(to_hlist, of_hlist)] + [%%define_locally Poly.(to_hlist, of_hlist)] type var = (Action.var, Amount.var) Poly.t let typ = let open Snark_params.Tick.Typ in of_hlistable ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist - ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist [Action.typ; Amount.typ] + ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist + [ Action.typ; Amount.typ ] let genesis : t = - {coinbase_amount= Currency.Amount.zero; action= Action.Update_none} + { coinbase_amount = Currency.Amount.zero; action = Action.Update_none } let var_of_t (t : t) : var = - { action= Action.var_of_t t.action - ; coinbase_amount= Amount.var_of_t t.coinbase_amount } + { action = Action.var_of_t t.action + ; coinbase_amount = Amount.var_of_t t.coinbase_amount + } end (* Sparse_ledger.Make is applied more than once in the code, so @@ -493,7 +492,7 @@ module Stack_versioned = struct module Stable = struct module V1 = struct type ('data_stack, 'state_stack) t = - {data: 'data_stack; state: 'state_stack} + { data : 'data_stack; state : 'state_stack } [@@deriving yojson, hash, sexp, equal, compare] end end] @@ -562,7 +561,7 @@ module T = struct module Poly = struct type ('data_stack, 'state_stack) t = ('data_stack, 'state_stack) Stack_versioned.Poly.t = - {data: 'data_stack; state: 'state_stack} + { data : 'data_stack; state : 'state_stack } [@@deriving yojson, hash, sexp, compare, hlist] end @@ -572,7 +571,7 @@ module T = struct type var = (Coinbase_stack.var, State_stack.var) Poly.t - let to_input ({data; state} : t) = + let to_input ({ data; state } : t) = Random_oracle.Input.append (Coinbase_stack.to_input data) (State_stack.to_input state) @@ -582,7 +581,7 @@ module T = struct hash ~init:Hash_prefix_states.coinbase_stack (pack_input (to_input t))) |> Hash_builder.of_digest - let var_to_input ({data; state} : var) = + let var_to_input ({ data; state } : var) = Random_oracle.Input.append (Coinbase_stack.var_to_input data) (State_stack.var_to_input state) @@ -591,20 +590,21 @@ module T = struct make_checked (fun () -> Random_oracle.Checked.( hash ~init:Hash_prefix_states.coinbase_stack - (pack_input (var_to_input t))) ) + (pack_input (var_to_input t)))) let var_of_t t = - { Poly.data= Coinbase_stack.var_of_t t.Poly.data - ; state= State_stack.var_of_t t.state } + { Poly.data = Coinbase_stack.var_of_t t.Poly.data + ; state = State_stack.var_of_t t.state + } let gen = let open Base_quickcheck.Generator.Let_syntax in let%bind data = Coinbase_stack.gen in let%map state = State_stack.gen in - {Poly.data; state} + { Poly.data; state } let data_spec = - Snark_params.Tick.Data_spec.[Coinbase_stack.typ; State_stack.typ] + Snark_params.Tick.Data_spec.[ Coinbase_stack.typ; State_stack.typ ] let typ : (var, t) Typ.t = Snark_params.Tick.Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist @@ -633,10 +633,10 @@ module T = struct let open Tick0.Boolean in b1 &&& b2 - let empty = {Poly.data= Coinbase_stack.empty; state= State_stack.empty} + let empty = { Poly.data = Coinbase_stack.empty; state = State_stack.empty } let create_with (t : t) = - {empty with state= State_stack.create ~init:t.state.curr} + { empty with state = State_stack.create ~init:t.state.curr } let equal_state_hash t1 t2 = State_stack.equal t1.Poly.state t2.Poly.state @@ -644,10 +644,10 @@ module T = struct let push_coinbase (cb : Coinbase.t) t = let data = Coinbase_stack.push t.Poly.data cb in - {t with data} + { t with data } let push_state (state_body_hash : State_body_hash.t) (t : t) = - {t with state= State_stack.push t.state state_body_hash} + { t with state = State_stack.push t.state state_body_hash } let if_ (cond : Tick0.Boolean.var) ~(then_ : var) ~(else_ : var) : (var, 'a) Tick0.Checked.t = @@ -657,7 +657,7 @@ module T = struct let%map state = State_stack.if_ cond ~then_:then_.state ~else_:else_.state in - {Poly.data; state} + { Poly.data; state } module Checked = struct type t = var @@ -665,11 +665,11 @@ module T = struct let push_coinbase (coinbase : Coinbase_data.var) (t : t) : (t, 'a) Tick0.Checked.t = let%map data = Coinbase_stack.Checked.push t.data coinbase in - {t with data} + { t with data } let push_state (state_body_hash : State_body_hash.var) (t : t) = let%map state = State_stack.Checked.push t.state state_body_hash in - {t with state} + { t with state } let check_merge ~transition1:((s, t) : t * t) ~transition2:((s', t') : t * t) : (Boolean.var, _) Tick0.Checked.t = @@ -677,15 +677,14 @@ module T = struct Coinbase_stack.Checked.check_merge (s.data, t.data) (s'.data, t'.data) in let%bind valid_state_stacks = - State_stack.Checked.check_merge (s.state, t.state) - (s'.state, t'.state) + State_stack.Checked.check_merge (s.state, t.state) (s'.state, t'.state) in Boolean.(valid_coinbase_stacks && valid_state_stacks) let empty = var_of_t empty let create_with (t : var) = - {empty with state= State_stack.create ~init:t.state.init} + { empty with state = State_stack.create ~init:t.state.init } let if_ = if_ end @@ -764,7 +763,7 @@ module T = struct Tick.make_checked (fun () -> Random_oracle.Checked.hash ~init:(Hash_prefix.coinbase_merkle_tree height) - [|h1; h2|] ) + [| h1; h2 |]) let assert_equal h1 h2 = Field.Checked.Assert.equal h1 h2 @@ -797,7 +796,7 @@ module T = struct | Find_index_of_oldest_stack : Address.value Request.t | Get_previous_stack : State_stack.t Request.t - let reraise_merkle_requests (With {request; respond}) = + let reraise_merkle_requests (With { request; respond }) = match request with | Merkle_tree.Get_path addr -> respond (Delegate (Coinbase_stack_path addr)) @@ -815,7 +814,7 @@ module T = struct let%snarkydef add_coinbase ~(constraint_constants : Genesis_constants.Constraint_constants.t) t - ({action; coinbase_amount= amount} : Update.var) ~coinbase_receiver + ({ action; coinbase_amount = amount } : Update.var) ~coinbase_receiver ~supercharge_coinbase state_body_hash = let depth = constraint_constants.pending_coinbase_depth in let%bind addr1, addr2 = @@ -823,7 +822,7 @@ module T = struct Typ.(Address.typ ~depth * Address.typ ~depth) As_prover.( map (read Update.Action.typ action) ~f:(fun act -> - Find_index_of_newest_stacks act )) + Find_index_of_newest_stacks act)) in let equal_to_zero x = Amount.(equal_var x (var_of_t zero)) in let%bind no_update = Update.Action.Checked.no_update action in @@ -833,7 +832,7 @@ module T = struct request_witness State_stack.typ As_prover.(map (return ()) ~f:(fun () -> Get_previous_stack)) in - let stack_initialized = {stack with state= previous_state_stack} in + let stack_initialized = { stack with state = previous_state_stack } in let%bind stack_with_state_hash = Stack.Checked.push_state state_body_hash stack_initialized in @@ -889,8 +888,8 @@ module T = struct ~else_:stack_with_amount2) in (*This is for the second stack for when transactions in a block occupy - two trees of the scan state; the second tree will carry-forward the state - stack from the previous block, push the new state, and may or may not have a coinbase*) + two trees of the scan state; the second tree will carry-forward the state + stack from the previous block, push the new state, and may or may not have a coinbase*) let update_stack2 (init_stack : Stack.var) (stack0 : Stack.var) = let%bind add_coinbase = Update.Action.Checked.update_two_stacks_coinbase_in_second action @@ -905,8 +904,9 @@ module T = struct let%bind stack_with_state = Stack.Checked.push_state state_body_hash { stack0 with - state= - State_stack.create ~init:init_stack.Stack.Poly.state.curr } + state = + State_stack.create ~init:init_stack.Stack.Poly.state.curr + } in Stack.if_ update_state ~then_:stack_with_state ~else_:stack0 in @@ -974,7 +974,7 @@ module T = struct module Poly = struct type ('tree, 'stack_id) t = - {tree: 'tree; pos_list: 'stack_id list; new_pos: 'stack_id} + { tree : 'tree; pos_list : 'stack_id list; new_pos : 'stack_id } [@@deriving sexp, to_yojson] end @@ -983,7 +983,7 @@ module T = struct let init_hash = Stack.data_hash Stack.empty let hash_at_level = - let cached = ref [|init_hash|] in + let cached = ref [| init_hash |] in fun i -> let len = Array.length !cached in ( if i >= len then @@ -995,7 +995,7 @@ module T = struct ~f:(fun i -> cur_hash := Hash.merge ~height:(i + len - 1) !cur_hash !cur_hash ; - !cur_hash )) ) ; + !cur_hash)) ) ; !cached.(i) let create_exn' ~depth () = @@ -1016,15 +1016,15 @@ module T = struct (Or_error.ok_exn (Stack_id.incr_by_one key)) in let root_hash = hash_at_level depth in - { Poly.tree= + { Poly.tree = make_tree (Merkle_tree.of_hash ~depth ~next_available_token:() root_hash) Stack_id.zero - ; pos_list= [] - ; new_pos= Stack_id.zero } + ; pos_list = [] + ; new_pos = Stack_id.zero + } - [%%define_locally - Or_error.(try_with)] + [%%define_locally Or_error.(try_with)] let create ~depth () = try_with (fun () -> create_exn' ~depth ()) @@ -1053,7 +1053,7 @@ module T = struct let open Or_error.Let_syntax in if is_new_stack then let%map new_pos = next_index ~depth t in - {t with pos_list= t.new_pos :: t.pos_list; new_pos} + { t with pos_list = t.new_pos :: t.pos_list; new_pos } else Ok t let set_stack ~depth (t : t) index stack ~is_new_stack = @@ -1061,7 +1061,7 @@ module T = struct let%bind tree = try_with (fun () -> Merkle_tree.set_exn t.tree index stack) in - incr_index ~depth {t with tree} ~is_new_stack + incr_index ~depth { t with tree } ~is_new_stack let latest_stack_id (t : t) ~is_new_stack = if is_new_stack then t.new_pos @@ -1070,12 +1070,10 @@ module T = struct let curr_stack_id (t : t) = List.hd t.pos_list let current_stack t = - let prev_stack_id = - Option.value ~default:Stack_id.zero (curr_stack_id t) - in + let prev_stack_id = Option.value ~default:Stack_id.zero (curr_stack_id t) in Or_error.try_with (fun () -> let index = Merkle_tree.find_index_exn t.tree prev_stack_id in - Merkle_tree.get_exn t.tree index ) + Merkle_tree.get_exn t.tree index) let latest_stack (t : t) ~is_new_stack = let open Or_error.Let_syntax in @@ -1083,11 +1081,11 @@ module T = struct let%bind res = Or_error.try_with (fun () -> let index = Merkle_tree.find_index_exn t.tree key in - Merkle_tree.get_exn t.tree index ) + Merkle_tree.get_exn t.tree index) in if is_new_stack then let%map prev_stack = current_stack t in - {res with state= State_stack.create ~init:prev_stack.state.curr} + { res with state = State_stack.create ~init:prev_stack.state.curr } else Ok res let oldest_stack_id (t : t) = List.last t.pos_list @@ -1131,9 +1129,9 @@ module T = struct let%map t' = set_stack ~depth t stack_index Stack.empty ~is_new_stack:false in - (stack, {t' with pos_list= remaining}) + (stack, { t' with pos_list = remaining }) - let hash_extra ({pos_list; new_pos; _} : t) = + let hash_extra ({ pos_list; new_pos; _ } : t) = let h = Digestif.SHA256.init () in let h = Digestif.SHA256.feed_string h @@ -1149,7 +1147,7 @@ module T = struct (path !pending_coinbase idx |> Or_error.ok_exn) ~f:(function `Left h -> h | `Right h -> h) in - stage (fun (With {request; respond}) -> + stage (fun (With { request; respond }) -> match request with | Checked.Coinbase_stack_path idx -> let path = @@ -1203,8 +1201,9 @@ module T = struct let stack = current_stack !pending_coinbase |> Or_error.ok_exn in - { State_stack.Poly.init= stack.state.curr - ; curr= stack.state.curr } + { State_stack.Poly.init = stack.state.curr + ; curr = stack.state.curr + } else let stack = latest_stack !pending_coinbase ~is_new_stack @@ -1214,7 +1213,7 @@ module T = struct in respond (Provide prev_state) | _ -> - unhandled ) + unhandled) end include T @@ -1224,7 +1223,7 @@ module Poly_versioned = struct module Stable = struct module V1 = struct type ('tree, 'stack_id) t = ('tree, 'stack_id) T.Poly.t = - {tree: 'tree; pos_list: 'stack_id list; new_pos: 'stack_id} + { tree : 'tree; pos_list : 'stack_id list; new_pos : 'stack_id } [@@deriving sexp, to_yojson] end end] @@ -1255,8 +1254,7 @@ let%test_unit "add stack + remove stack = initial tree " = in let depth = constraint_constants.pending_coinbase_depth in let coinbases_gen = - Quickcheck.Generator.list_non_empty - (Coinbase.Gen.gen ~constraint_constants) + Quickcheck.Generator.list_non_empty (Coinbase.Gen.gen ~constraint_constants) in let pending_coinbases = ref (create ~depth () |> Or_error.ok_exn) in Quickcheck.test coinbases_gen ~trials:50 ~f:(fun cbs -> @@ -1270,14 +1268,14 @@ let%test_unit "add stack + remove stack = initial tree " = |> Or_error.ok_exn in is_new_stack := false ; - t ) + t) in let _, after_del = remove_coinbase_stack ~depth after_adding |> Or_error.ok_exn in pending_coinbases := after_del ; assert (Hash.equal (merkle_root after_del) init) ; - Async.Deferred.return () ) ) + Async.Deferred.return ())) module type Pending_coinbase_intf = sig type t [@@deriving sexp] @@ -1291,8 +1289,8 @@ end let add_coinbase_with_zero_checks (type t) (module T : Pending_coinbase_intf with type t = t) (t : t) - ~(constraint_constants : Genesis_constants.Constraint_constants.t) - ~coinbase ~supercharged_coinbase ~state_body_hash ~is_new_stack = + ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~coinbase + ~supercharged_coinbase ~state_body_hash ~is_new_stack = let depth = constraint_constants.pending_coinbase_depth in if Amount.equal coinbase.Coinbase.amount Amount.zero then t else @@ -1320,8 +1318,7 @@ let add_coinbase_with_zero_checks (type t) in if Amount.equal coinbase'.amount Amount.zero then interim_tree else - T.add_coinbase ~depth interim_tree ~coinbase:coinbase' - ~is_new_stack:false + T.add_coinbase ~depth interim_tree ~coinbase:coinbase' ~is_new_stack:false |> Or_error.ok_exn let%test_unit "Checked_stack = Unchecked_stack" = @@ -1346,7 +1343,7 @@ let%test_unit "Checked_stack = Unchecked_stack" = let (), x = Or_error.ok_exn (run_and_check comp ()) in x in - assert (Stack.equal unchecked checked) ) + assert (Stack.equal unchecked checked)) let%test_unit "Checked_tree = Unchecked_tree" = let open Quickcheck in @@ -1392,7 +1389,9 @@ let%test_unit "Checked_tree = Unchecked_tree" = handle (f_add_coinbase (Hash.var_of_t (merkle_root pending_coinbases)) - {Update.Poly.action= action_var; coinbase_amount= amount_var} + { Update.Poly.action = action_var + ; coinbase_amount = amount_var + } ~coinbase_receiver:coinbase_receiver_var ~supercharge_coinbase:supercharge_coinbase_var state_body_hash_var) @@ -1403,7 +1402,7 @@ let%test_unit "Checked_tree = Unchecked_tree" = let (), x = Or_error.ok_exn (run_and_check comp ()) in x in - assert (Hash.equal (merkle_root unchecked) checked_merkle_root) ) + assert (Hash.equal (merkle_root unchecked) checked_merkle_root)) let%test_unit "Checked_tree = Unchecked_tree after pop" = let open Quickcheck in @@ -1450,7 +1449,9 @@ let%test_unit "Checked_tree = Unchecked_tree after pop" = handle (f_add_coinbase (Hash.var_of_t (merkle_root pending_coinbases)) - {Update.Poly.action= action_var; coinbase_amount= amount_var} + { Update.Poly.action = action_var + ; coinbase_amount = amount_var + } ~coinbase_receiver:coinbase_receiver_var ~supercharge_coinbase:supercharge_coinbase_var state_body_hash_var) @@ -1486,14 +1487,15 @@ let%test_unit "Checked_tree = Unchecked_tree after pop" = assert ( Hash.equal (merkle_root unchecked_after_pop) - checked_merkle_root_after_pop ) ) + checked_merkle_root_after_pop )) let%test_unit "push and pop multiple stacks" = let open Quickcheck in let module Pending_coinbase = T in let constraint_constants = { Genesis_constants.Constraint_constants.for_unit_tests with - pending_coinbase_depth= 3 } + pending_coinbase_depth = 3 + } in let depth = constraint_constants.pending_coinbase_depth in let t_of_coinbases t = function @@ -1503,11 +1505,10 @@ let%test_unit "push and pop multiple stacks" = |> Or_error.ok_exn in (Pending_coinbase.Stack.empty, t') - | ((initial_coinbase, _supercharged_coinbase), state_body_hash) - :: coinbases -> + | ((initial_coinbase, _supercharged_coinbase), state_body_hash) :: coinbases + -> let t' = - Pending_coinbase.add_state ~depth t state_body_hash - ~is_new_stack:true + Pending_coinbase.add_state ~depth t state_body_hash ~is_new_stack:true |> Or_error.ok_exn |> Pending_coinbase.add_coinbase ~depth ~coinbase:initial_coinbase ~is_new_stack:false @@ -1515,14 +1516,15 @@ let%test_unit "push and pop multiple stacks" = in let updated = List.fold coinbases ~init:t' - ~f:(fun pending_coinbases - ( (coinbase, `Supercharged_coinbase supercharged_coinbase) - , state_body_hash ) + ~f:(fun + pending_coinbases + ( (coinbase, `Supercharged_coinbase supercharged_coinbase) + , state_body_hash ) -> add_coinbase_with_zero_checks ~constraint_constants (module Pending_coinbase) - pending_coinbases ~coinbase ~is_new_stack:false - ~state_body_hash ~supercharged_coinbase ) + pending_coinbases ~coinbase ~is_new_stack:false ~state_body_hash + ~supercharged_coinbase) in let new_stack = Or_error.ok_exn @@ -1535,7 +1537,7 @@ let%test_unit "push and pop multiple stacks" = List.fold ~init:([], pending_coinbases) coinbase_lists ~f:(fun (stacks, pc) coinbases -> let new_stack, pc = t_of_coinbases pc coinbases in - (new_stack :: stacks, pc) ) + (new_stack :: stacks, pc)) in (* remove the oldest stack and check if that's the expected one *) let remove_check t expected_stack = diff --git a/src/lib/mina_base/pending_coinbase_intf.ml b/src/lib/mina_base/pending_coinbase_intf.ml index 89fb5e8455c..cfa868eeaa7 100644 --- a/src/lib/mina_base/pending_coinbase_intf.ml +++ b/src/lib/mina_base/pending_coinbase_intf.ml @@ -187,7 +187,7 @@ module type S = sig module Stable : sig module V1 : sig type ('action, 'coinbase_amount) t = - {action: 'action; coinbase_amount: 'coinbase_amount} + { action : 'action; coinbase_amount : 'coinbase_amount } [@@deriving sexp] end end] diff --git a/src/lib/mina_base/pending_coinbase_witness.ml b/src/lib/mina_base/pending_coinbase_witness.ml index bb7edf88bf0..15c3a1d7708 100644 --- a/src/lib/mina_base/pending_coinbase_witness.ml +++ b/src/lib/mina_base/pending_coinbase_witness.ml @@ -4,7 +4,7 @@ open Core_kernel module Stable = struct module V1 = struct type t = - {pending_coinbases: Pending_coinbase.Stable.V1.t; is_new_stack: bool} + { pending_coinbases : Pending_coinbase.Stable.V1.t; is_new_stack : bool } [@@deriving sexp, to_yojson] let to_latest = Fn.id diff --git a/src/lib/mina_base/permissions.ml b/src/lib/mina_base/permissions.ml index 79689120b28..72d342f0059 100644 --- a/src/lib/mina_base/permissions.ml +++ b/src/lib/mina_base/permissions.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick module Mina_numbers = Mina_numbers @@ -65,9 +63,9 @@ module Auth_required = struct | Signature | Both | Impossible (* Both and either can both be subsumed in verification key. - It is good to have "Either" as a separate thing to spare the owner from - having to make a proof instead of a signature. Both, I'm not sure if there's - a good justification for. *) + It is good to have "Either" as a separate thing to spare the owner from + having to make a proof instead of a signature. Both, I'm not sure if there's + a good justification for. *) [@@deriving sexp, equal, compare, hash, yojson, enum] let to_latest = Fn.id @@ -108,83 +106,108 @@ module Auth_required = struct Impossible Signature sufficient None - *) + *) module Encoding = struct type 'bool t = - {constant: 'bool; signature_necessary: 'bool; signature_sufficient: 'bool} + { constant : 'bool + ; signature_necessary : 'bool + ; signature_sufficient : 'bool + } [@@deriving hlist, fields] let to_input t = - let [x; y; z] = to_hlist t in - Random_oracle.Input.bitstring [x; y; z] + let [ x; y; z ] = to_hlist t in + Random_oracle.Input.bitstring [ x; y; z ] let map t ~f = - { constant= f t.constant - ; signature_necessary= f t.signature_necessary - ; signature_sufficient= f t.signature_sufficient } + { constant = f t.constant + ; signature_necessary = f t.signature_necessary + ; signature_sufficient = f t.signature_sufficient + } let _ = map - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let if_ b ~then_:t ~else_:e = let open Pickles.Impls.Step in - { constant= Boolean.if_ b ~then_:t.constant ~else_:e.constant - ; signature_necessary= + { constant = Boolean.if_ b ~then_:t.constant ~else_:e.constant + ; signature_necessary = Boolean.if_ b ~then_:t.signature_necessary ~else_:e.signature_necessary - ; signature_sufficient= + ; signature_sufficient = Boolean.if_ b ~then_:t.signature_sufficient - ~else_:e.signature_sufficient } + ~else_:e.signature_sufficient + } [%%endif] end let encode : t -> bool Encoding.t = function | Impossible -> - {constant= true; signature_necessary= true; signature_sufficient= false} + { constant = true + ; signature_necessary = true + ; signature_sufficient = false + } | None -> - {constant= true; signature_necessary= false; signature_sufficient= true} + { constant = true + ; signature_necessary = false + ; signature_sufficient = true + } | Proof -> - { constant= false - ; signature_necessary= false - ; signature_sufficient= false } + { constant = false + ; signature_necessary = false + ; signature_sufficient = false + } | Signature -> - {constant= false; signature_necessary= true; signature_sufficient= true} + { constant = false + ; signature_necessary = true + ; signature_sufficient = true + } | Either -> - { constant= false - ; signature_necessary= false - ; signature_sufficient= true } + { constant = false + ; signature_necessary = false + ; signature_sufficient = true + } | Both -> - { constant= false - ; signature_necessary= true - ; signature_sufficient= false } + { constant = false + ; signature_necessary = true + ; signature_sufficient = false + } let decode : bool Encoding.t -> t = function - | {constant= true; signature_necessary= _; signature_sufficient= false} -> + | { constant = true; signature_necessary = _; signature_sufficient = false } + -> Impossible - | {constant= true; signature_necessary= _; signature_sufficient= true} -> - None - | {constant= false; signature_necessary= false; signature_sufficient= false} + | { constant = true; signature_necessary = _; signature_sufficient = true } -> + None + | { constant = false + ; signature_necessary = false + ; signature_sufficient = false + } -> Proof - | {constant= false; signature_necessary= true; signature_sufficient= true} - -> + | { constant = false + ; signature_necessary = true + ; signature_sufficient = true + } -> Signature - | {constant= false; signature_necessary= false; signature_sufficient= true} - -> + | { constant = false + ; signature_necessary = false + ; signature_sufficient = true + } -> Either - | {constant= false; signature_necessary= true; signature_sufficient= false} - -> + | { constant = false + ; signature_necessary = true + ; signature_sufficient = false + } -> Both let%test_unit "decode encode" = - List.iter [Impossible; Proof; Signature; Either; Both] ~f:(fun t -> - [%test_eq: t] t (decode (encode t)) ) + List.iter [ Impossible; Proof; Signature; Either; Both ] ~f:(fun t -> + [%test_eq: t] t (decode (encode t))) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked = struct type t = Boolean.var Encoding.t @@ -196,7 +219,7 @@ module Auth_required = struct let constant t = Encoding.map (encode t) ~f:Boolean.var_of_value let eval_no_proof - ({constant; signature_necessary= _; signature_sufficient} : t) + ({ constant; signature_necessary = _; signature_sufficient } : t) ~signature_verifies = (* ways authorization can succeed when no proof is present: - None @@ -210,7 +233,7 @@ module Auth_required = struct signature_sufficient &&& (constant ||| ((not constant) &&& signature_verifies)) - let spec_eval ({constant; signature_necessary; signature_sufficient} : t) + let spec_eval ({ constant; signature_necessary; signature_sufficient } : t) ~signature_verifies = let open Pickles.Impls.Step.Boolean in let impossible = constant &&& not signature_sufficient in @@ -222,15 +245,14 @@ module Auth_required = struct let didn't_fail_yet = result in (* If the transaction already failed to verify, we don't need to assert that the proof should verify. *) - ( result - , `proof_must_verify (didn't_fail_yet &&& not signature_sufficient) ) + (result, `proof_must_verify (didn't_fail_yet &&& not signature_sufficient)) end let typ = let t = let open Encoding in Typ.of_hlistable - [Boolean.typ; Boolean.typ; Boolean.typ] + [ Boolean.typ; Boolean.typ; Boolean.typ ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist in @@ -270,13 +292,14 @@ module Poly = struct module Stable = struct module V1 = struct type ('bool, 'controller) t = - { stake: 'bool - ; edit_state: 'controller - ; send: 'controller - ; receive: 'controller - ; set_delegate: 'controller - ; set_permissions: 'controller - ; set_verification_key: 'controller } + { stake : 'bool + ; edit_state : 'controller + ; send : 'controller + ; receive : 'controller + ; set_delegate : 'controller + ; set_permissions : 'controller + ; set_verification_key : 'controller + } [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] end end] @@ -284,7 +307,7 @@ module Poly = struct let to_input controller t = let f mk acc field = mk (Core_kernel.Field.get field t) :: acc in Stable.Latest.Fields.fold ~init:[] - ~stake:(f (fun x -> Random_oracle.Input.bitstring [x])) + ~stake:(f (fun x -> Random_oracle.Input.bitstring [ x ])) ~edit_state:(f controller) ~send:(f controller) ~set_delegate:(f controller) ~set_permissions:(f controller) ~set_verification_key:(f controller) ~receive:(f controller) @@ -301,8 +324,7 @@ module Stable = struct end end] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module Checked = struct type t = (Boolean.var, Auth_required.Checked.t) Poly.Stable.Latest.t @@ -339,7 +361,8 @@ let typ = ; Auth_required.typ ; Auth_required.typ ; Auth_required.typ - ; Auth_required.typ ] + ; Auth_required.typ + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -348,19 +371,21 @@ let typ = let to_input x = Poly.to_input Auth_required.to_input x let user_default : t = - { stake= true - ; edit_state= Signature - ; send= Signature - ; receive= None - ; set_delegate= Signature - ; set_permissions= Signature - ; set_verification_key= Signature } + { stake = true + ; edit_state = Signature + ; send = Signature + ; receive = None + ; set_delegate = Signature + ; set_permissions = Signature + ; set_verification_key = Signature + } let empty : t = - { stake= false - ; edit_state= None - ; send= None - ; receive= None - ; set_delegate= None - ; set_permissions= None - ; set_verification_key= None } + { stake = false + ; edit_state = None + ; send = None + ; receive = None + ; set_delegate = None + ; set_permissions = None + ; set_verification_key = None + } diff --git a/src/lib/mina_base/permissions.mli b/src/lib/mina_base/permissions.mli index a340439a726..f114f2d62ff 100644 --- a/src/lib/mina_base/permissions.mli +++ b/src/lib/mina_base/permissions.mli @@ -33,7 +33,7 @@ module Auth_required : sig val spec_eval : t -> signature_verifies:Boolean.var - -> Boolean.var * [`proof_must_verify of Boolean.var] + -> Boolean.var * [ `proof_must_verify of Boolean.var ] end val typ : (Checked.t, t) Typ.t @@ -46,13 +46,14 @@ module Poly : sig module Stable : sig module V1 : sig type ('bool, 'controller) t = - { stake: 'bool - ; edit_state: 'controller - ; send: 'controller - ; receive: 'controller (* TODO: Consider having fee *) - ; set_delegate: 'controller - ; set_permissions: 'controller - ; set_verification_key: 'controller } + { stake : 'bool + ; edit_state : 'controller + ; send : 'controller + ; receive : 'controller (* TODO: Consider having fee *) + ; set_delegate : 'controller + ; set_permissions : 'controller + ; set_verification_key : 'controller + } [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] end end] diff --git a/src/lib/mina_base/proof.ml b/src/lib/mina_base/proof.ml index 906f1e45e34..a6a564ab7a5 100644 --- a/src/lib/mina_base/proof.ml +++ b/src/lib/mina_base/proof.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel @@ -17,8 +16,7 @@ module Stable = struct end end] -[%%define_locally -Stable.Latest.(to_yojson, of_yojson)] +[%%define_locally Stable.Latest.(to_yojson, of_yojson)] let%test_module "proof-tests" = ( module struct @@ -26,8 +24,7 @@ let%test_module "proof-tests" = in Tock_backend.Proof, which is not versioned *) - [%%if - curve_size = 255] + [%%if curve_size = 255] let%test "proof serialization v1" = let proof = blockchain_dummy in diff --git a/src/lib/mina_base/proof.mli b/src/lib/mina_base/proof.mli index 7dd690e2dd6..3697f661fb3 100644 --- a/src/lib/mina_base/proof.mli +++ b/src/lib/mina_base/proof.mli @@ -1,7 +1,6 @@ open Pickles_types -type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t -[@@deriving sexp, compare, yojson] +type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t [@@deriving sexp, compare, yojson] val blockchain_dummy : t diff --git a/src/lib/mina_base/protocol_constants_checked.ml b/src/lib/mina_base/protocol_constants_checked.ml index a2b2d974a18..ebf909b7112 100644 --- a/src/lib/mina_base/protocol_constants_checked.ml +++ b/src/lib/mina_base/protocol_constants_checked.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -47,34 +45,37 @@ module Value = struct let%bind slots_per_epoch = Int.gen_incl k (8 * k) >>| ( * ) 3 >>| T.of_int and slots_per_sub_window = Int.gen_incl 1 ((k + 9) / 9) in (*TODO: Bug -> Block_time.(to_time x |> of_time) != x for certain values. - Eg: 34702788243129 <--> 34702788243128, 8094 <--> 8093*) + Eg: 34702788243129 <--> 34702788243128, 8094 <--> 8093*) let%bind ms = Int64.(gen_log_uniform_incl 0L 9999999999999L) in let end_time = Block_time.of_int64 999999999999999L in let%map genesis_state_timestamp = Block_time.(gen_incl (of_int64 ms) end_time) in - { Poly.k= T.of_int k - ; delta= T.of_int delta + { Poly.k = T.of_int k + ; delta = T.of_int delta ; slots_per_epoch - ; slots_per_sub_window= T.of_int slots_per_sub_window - ; genesis_state_timestamp } + ; slots_per_sub_window = T.of_int slots_per_sub_window + ; genesis_state_timestamp + } end type value = Value.t let value_of_t (t : Genesis_constants.Protocol.t) : value = - { k= T.of_int t.k - ; delta= T.of_int t.delta - ; slots_per_epoch= T.of_int t.slots_per_epoch - ; slots_per_sub_window= T.of_int t.slots_per_sub_window - ; genesis_state_timestamp= Block_time.of_int64 t.genesis_state_timestamp } + { k = T.of_int t.k + ; delta = T.of_int t.delta + ; slots_per_epoch = T.of_int t.slots_per_epoch + ; slots_per_sub_window = T.of_int t.slots_per_sub_window + ; genesis_state_timestamp = Block_time.of_int64 t.genesis_state_timestamp + } let t_of_value (v : value) : Genesis_constants.Protocol.t = - { k= T.to_int v.k - ; delta= T.to_int v.delta - ; slots_per_epoch= T.to_int v.slots_per_epoch - ; slots_per_sub_window= T.to_int v.slots_per_sub_window - ; genesis_state_timestamp= Block_time.to_int64 v.genesis_state_timestamp } + { k = T.to_int v.k + ; delta = T.to_int v.delta + ; slots_per_epoch = T.to_int v.slots_per_epoch + ; slots_per_sub_window = T.to_int v.slots_per_sub_window + ; genesis_state_timestamp = Block_time.to_int64 v.genesis_state_timestamp + } let to_input (t : value) = Random_oracle.Input.bitstrings @@ -82,10 +83,10 @@ let to_input (t : value) = ; T.to_bits t.delta ; T.to_bits t.slots_per_epoch ; T.to_bits t.slots_per_sub_window - ; Block_time.Bits.to_bits t.genesis_state_timestamp |] + ; Block_time.Bits.to_bits t.genesis_state_timestamp + |] -[%%if -defined consensus_mechanism] +[%%if defined consensus_mechanism] type var = (T.Checked.t, T.Checked.t, Block_time.Unpacked.var) Poly.t @@ -95,7 +96,8 @@ let data_spec = ; T.Checked.typ ; T.Checked.typ ; T.Checked.typ - ; Block_time.Unpacked.typ ] + ; Block_time.Unpacked.typ + ] let typ = Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist @@ -117,7 +119,8 @@ let var_to_input (var : var) = ; delta ; slots_per_epoch ; slots_per_sub_window - ; genesis_state_timestamp |]) + ; genesis_state_timestamp + |]) let%test_unit "value = var" = let compiled = Genesis_constants.for_unit_tests.protocol in @@ -132,6 +135,7 @@ let%test_unit "value = var" = [%test_eq: Value.t] protocol_constants (t_of_value protocol_constants |> value_of_t) in - Quickcheck.test ~trials:100 Value.gen ~examples:[value_of_t compiled] ~f:test + Quickcheck.test ~trials:100 Value.gen ~examples:[ value_of_t compiled ] + ~f:test [%%endif] diff --git a/src/lib/mina_base/receipt.ml b/src/lib/mina_base/receipt.ml index e57477455fe..7db04f7f388 100644 --- a/src/lib/mina_base/receipt.ml +++ b/src/lib/mina_base/receipt.ml @@ -1,13 +1,11 @@ (* receipt.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel module B58_lib = Base58_check -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -39,15 +37,14 @@ module Chain_hash = struct module V1 = struct module T = struct - type t = Field.t [@@deriving sexp, compare, hash, version {asserted}] + type t = Field.t [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) @@ -73,8 +70,7 @@ module Chain_hash = struct Input.(append x (field (t :> Field.t))) |> pack_input |> hash ~init |> of_hash - [%%if - defined consensus_mechanism] + [%%if defined consensus_mechanism] module Checked = struct module Elt = struct @@ -106,7 +102,7 @@ module Chain_hash = struct in make_checked (fun () -> hash ~init (pack_input Input.(append x (var_to_input t))) - |> var_of_hash_packed ) + |> var_of_hash_packed) end let%test_unit "checked-unchecked equivalence" = @@ -129,12 +125,11 @@ module Chain_hash = struct let (), x = Or_error.ok_exn (run_and_check comp ()) in x in - assert (equal unchecked checked) ) + assert (equal unchecked checked)) let%test_unit "json" = Quickcheck.test ~trials:20 gen ~sexp_of:sexp_of_t ~f:(fun t -> - assert (Codable.For_tests.check_encoding (module Stable.V1) ~equal t) - ) + assert (Codable.For_tests.check_encoding (module Stable.V1) ~equal t)) [%%endif] end diff --git a/src/lib/mina_base/rpc_intf.ml b/src/lib/mina_base/rpc_intf.ml index ee3c094de83..61d6fa77577 100644 --- a/src/lib/mina_base/rpc_intf.ml +++ b/src/lib/mina_base/rpc_intf.ml @@ -41,10 +41,11 @@ module type Rpc_interface_intf = sig type rpc_handler = | Rpc_handler : - { rpc: ('q, 'r) rpc - ; f: ('q, 'r) rpc_fn - ; cost: 'q -> int - ; budget: int * [`Per of Time.Span.t] } + { rpc : ('q, 'r) rpc + ; f : ('q, 'r) rpc_fn + ; cost : 'q -> int + ; budget : int * [ `Per of Time.Span.t ] + } -> rpc_handler val implementation_of_rpc : ('q, 'r) rpc -> ('q, 'r) rpc_implementation diff --git a/src/lib/mina_base/side_loaded_verification_key.ml b/src/lib/mina_base/side_loaded_verification_key.ml index 43363761ce5..f601a277286 100644 --- a/src/lib/mina_base/side_loaded_verification_key.ml +++ b/src/lib/mina_base/side_loaded_verification_key.ml @@ -1,8 +1,6 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] include Pickles.Side_loaded.Verification_key @@ -29,8 +27,7 @@ module R = struct module Stable = struct module V1 = struct type t = - G.Stable.V1.t - Pickles_base.Side_loaded_verification_key.Repr.Stable.V1.t + G.Stable.V1.t Pickles_base.Side_loaded_verification_key.Repr.Stable.V1.t let to_latest = Fn.id end @@ -56,12 +53,16 @@ module Stable = struct type nonrec t = t let to_binable - {Poly.step_data; max_width; wrap_index; wrap_vk= _} = - {Repr.Stable.V1.step_data; max_width; wrap_index} + { Poly.step_data; max_width; wrap_index; wrap_vk = _ } = + { Repr.Stable.V1.step_data; max_width; wrap_index } let of_binable - {Repr.Stable.V1.step_data; max_width; wrap_index= c} = - {Poly.step_data; max_width; wrap_index= c; wrap_vk= Some ()} + { Repr.Stable.V1.step_data; max_width; wrap_index = c } = + { Poly.step_data + ; max_width + ; wrap_index = c + ; wrap_vk = Some () + } end) end end] @@ -70,28 +71,30 @@ let to_input = Pickles_base.Side_loaded_verification_key.to_input let dummy : t = let open Pickles_types in - { step_data= At_most.[] - ; max_width= Pickles_base.Side_loaded_verification_key.Width.zero - ; wrap_index= - (let g = [Snarkette.Pasta.Pallas.(to_affine_exn one)] in - { sigma_comm_0= g - ; sigma_comm_1= g - ; sigma_comm_2= g - ; ql_comm= g - ; qr_comm= g - ; qo_comm= g - ; qm_comm= g - ; qc_comm= g - ; rcm_comm_0= g - ; rcm_comm_1= g - ; rcm_comm_2= g - ; psm_comm= g - ; add_comm= g - ; mul1_comm= g - ; mul2_comm= g - ; emul1_comm= g - ; emul2_comm= g - ; emul3_comm= g }) - ; wrap_vk= None } + { step_data = At_most.[] + ; max_width = Pickles_base.Side_loaded_verification_key.Width.zero + ; wrap_index = + (let g = [ Snarkette.Pasta.Pallas.(to_affine_exn one) ] in + { sigma_comm_0 = g + ; sigma_comm_1 = g + ; sigma_comm_2 = g + ; ql_comm = g + ; qr_comm = g + ; qo_comm = g + ; qm_comm = g + ; qc_comm = g + ; rcm_comm_0 = g + ; rcm_comm_1 = g + ; rcm_comm_2 = g + ; psm_comm = g + ; add_comm = g + ; mul1_comm = g + ; mul2_comm = g + ; emul1_comm = g + ; emul2_comm = g + ; emul3_comm = g + }) + ; wrap_vk = None + } [%%endif] diff --git a/src/lib/mina_base/signature.ml b/src/lib/mina_base/signature.ml index cb58884c5a1..061a9d89ac0 100644 --- a/src/lib/mina_base/signature.ml +++ b/src/lib/mina_base/signature.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -41,13 +39,12 @@ module Stable = struct end module Tests = struct - [%%if - curve_size = 255] + [%%if curve_size = 255] let%test "signature serialization v1 (curve_size=255)" = let signature = - Quickcheck.random_value - ~seed:(`Deterministic "signature serialization") V1.gen + Quickcheck.random_value ~seed:(`Deterministic "signature serialization") + V1.gen in let known_good_digest = "88a094d50a90b5054152af85bd6e60e8" in Ppx_version_runtime.Serialization.check_serialization @@ -79,11 +76,10 @@ module Raw = struct let%test_unit "partial isomorphism" = Quickcheck.test ~trials:300 Stable.Latest.gen ~f:(fun signature -> - [%test_eq: t option] (Some signature) (encode signature |> decode) ) + [%test_eq: t option] (Some signature) (encode signature |> decode)) end -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = Field.Var.t * Inner_curve.Scalar.var diff --git a/src/lib/mina_base/signed_command.ml b/src/lib/mina_base/signed_command.ml index aaddac82280..13c49ddc443 100644 --- a/src/lib/mina_base/signed_command.ml +++ b/src/lib/mina_base/signed_command.ml @@ -1,11 +1,9 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Import -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] module Mina_numbers = Mina_numbers_nonconsensus.Mina_numbers module Currency = Currency_nonconsensus.Currency @@ -22,7 +20,7 @@ module Poly = struct module Stable = struct module V1 = struct type ('payload, 'pk, 'signature) t = - {payload: 'payload; signer: 'pk; signature: 'signature} + { payload : 'payload; signer : 'pk; signature : 'signature } [@@deriving compare, sexp, hash, yojson, equal] end end] @@ -54,17 +52,16 @@ module Stable = struct include Comparable.Make (T) include Hashable.Make (T) - let accounts_accessed ~next_available_token ({payload; _} : t) = + let accounts_accessed ~next_available_token ({ payload; _ } : t) = Payload.accounts_accessed ~next_available_token payload end end] -type _unused = unit - constraint (Payload.t, Public_key.t, Signature.t) Poly.t = t +type _unused = unit constraint (Payload.t, Public_key.t, Signature.t) Poly.t = t include (Stable.Latest : module type of Stable.Latest with type t := t) -let payload Poly.{payload; _} = payload +let payload Poly.{ payload; _ } = payload let fee = Fn.compose Payload.fee payload @@ -75,26 +72,26 @@ let minimum_fee = Mina_compile_config.minimum_user_command_fee let has_insufficient_fee t = Currency.Fee.(fee t < minimum_fee) -let signer {Poly.signer; _} = signer +let signer { Poly.signer; _ } = signer -let fee_token ({payload; _} : t) = Payload.fee_token payload +let fee_token ({ payload; _ } : t) = Payload.fee_token payload -let fee_payer_pk ({payload; _} : t) = Payload.fee_payer_pk payload +let fee_payer_pk ({ payload; _ } : t) = Payload.fee_payer_pk payload -let fee_payer ({payload; _} : t) = Payload.fee_payer payload +let fee_payer ({ payload; _ } : t) = Payload.fee_payer payload -let fee_excess ({payload; _} : t) = Payload.fee_excess payload +let fee_excess ({ payload; _ } : t) = Payload.fee_excess payload -let token ({payload; _} : t) = Payload.token payload +let token ({ payload; _ } : t) = Payload.token payload -let source_pk ({payload; _} : t) = Payload.source_pk payload +let source_pk ({ payload; _ } : t) = Payload.source_pk payload -let source ~next_available_token ({payload; _} : t) = +let source ~next_available_token ({ payload; _ } : t) = Payload.source ~next_available_token payload -let receiver_pk ({payload; _} : t) = Payload.receiver_pk payload +let receiver_pk ({ payload; _ } : t) = Payload.receiver_pk payload -let receiver ~next_available_token ({payload; _} : t) = +let receiver ~next_available_token ({ payload; _ } : t) = Payload.receiver ~next_available_token payload let amount = Fn.compose Payload.amount payload @@ -103,7 +100,7 @@ let memo = Fn.compose Payload.memo payload let valid_until = Fn.compose Payload.valid_until payload -let tag ({payload; _} : t) = Payload.tag payload +let tag ({ payload; _ } : t) = Payload.tag payload let tag_string (t : t) = match t.payload.body with @@ -118,26 +115,26 @@ let tag_string (t : t) = | Mint_tokens _ -> "mint_tokens" -let next_available_token ({payload; _} : t) tid = +let next_available_token ({ payload; _ } : t) tid = Payload.next_available_token payload tid let to_input (payload : Payload.t) = Transaction_union_payload.(to_input (of_user_command_payload payload)) -let check_tokens ({payload= {common= {fee_token; _}; body}; _} : t) = +let check_tokens ({ payload = { common = { fee_token; _ }; body }; _ } : t) = (not (Token_id.(equal invalid) fee_token)) && match body with - | Payment {token_id; _} -> + | Payment { token_id; _ } -> not (Token_id.(equal invalid) token_id) | Stake_delegation _ -> true | Create_new_token _ -> Token_id.(equal default) fee_token - | Create_token_account {token_id; account_disabled; _} -> + | Create_token_account { token_id; account_disabled; _ } -> Token_id.(equal default) fee_token && not (Token_id.(equal default) token_id && account_disabled) - | Mint_tokens {token_id; _} -> + | Mint_tokens { token_id; _ } -> (not (Token_id.(equal invalid) token_id)) && not (Token_id.(equal default) token_id) @@ -147,13 +144,14 @@ let sign_payload (private_key : Signature_lib.Private_key.t) let sign (kp : Signature_keypair.t) (payload : Payload.t) : t = { payload - ; signer= kp.public_key - ; signature= sign_payload kp.private_key payload } + ; signer = kp.public_key + ; signature = sign_payload kp.private_key payload + } module For_tests = struct (* Pretend to sign a command. Much faster than actually signing. *) let fake_sign (kp : Signature_keypair.t) (payload : Payload.t) : t = - {payload; signer= kp.public_key; signature= Signature.dummy} + { payload; signer = kp.public_key; signature = Signature.dummy } end module Gen = struct @@ -186,14 +184,15 @@ module Gen = struct ?nonce ~max_amount ?fee_token ?(payment_token = Token_id.default) ~fee_range () = gen_inner sign' ~key_gen ?nonce ?fee_token ~fee_range - @@ fun {public_key= signer; _} {public_key= receiver; _} -> + @@ fun { public_key = signer; _ } { public_key = receiver; _ } -> let open Quickcheck.Generator.Let_syntax in let%map amount = Int.gen_incl 1 max_amount >>| Currency.Amount.of_int in Signed_command_payload.Body.Payment - { receiver_pk= Public_key.compress receiver - ; source_pk= Public_key.compress signer - ; token_id= payment_token - ; amount } + { receiver_pk = Public_key.compress receiver + ; source_pk = Public_key.compress signer + ; token_id = payment_token + ; amount + } let gen ?(sign_type = `Fake) = match sign_type with @@ -206,18 +205,19 @@ module Gen = struct ?fee_token ?payment_token ~fee_range = with_random_participants ~keys ~gen:(fun ~key_gen -> gen ?sign_type ~key_gen ?nonce ~max_amount ?fee_token ?payment_token - ~fee_range ) + ~fee_range) end module Stake_delegation = struct let gen ~key_gen ?nonce ?fee_token ~fee_range () = gen_inner For_tests.fake_sign ~key_gen ?nonce ?fee_token ~fee_range - (fun {public_key= signer; _} {public_key= new_delegate; _} -> + (fun { public_key = signer; _ } { public_key = new_delegate; _ } -> Quickcheck.Generator.return @@ Signed_command_payload.Body.Stake_delegation (Set_delegate - { delegator= Public_key.compress signer - ; new_delegate= Public_key.compress new_delegate }) ) + { delegator = Public_key.compress signer + ; new_delegate = Public_key.compress new_delegate + })) let gen_with_random_participants ~keys ?nonce ?fee_token ~fee_range = with_random_participants ~keys ~gen:(gen ?nonce ?fee_token ~fee_range) @@ -234,7 +234,7 @@ module Gen = struct let sequence : ?length:int - -> ?sign_type:[`Fake | `Real] + -> ?sign_type:[ `Fake | `Real ] -> ( Signature_lib.Keypair.t * Currency.Amount.t * Mina_numbers.Account_nonce.t @@ -260,7 +260,7 @@ module Gen = struct let%bind command_senders = Quickcheck_lib.shuffle @@ List.concat_mapi command_splits ~f:(fun idx cmds -> - List.init cmds ~f:(Fn.const idx) ) + List.init cmds ~f:(Fn.const idx)) in (* within the accounts, how will the currency be split into separate payments? *) @@ -271,11 +271,10 @@ module Gen = struct let _, balance, _, _ = account_info.(i) in let amount_to_spend = if spend_all then balance - else - Currency.Amount.of_int (Currency.Amount.to_int balance / 2) + else Currency.Amount.of_int (Currency.Amount.to_int balance / 2) in Quickcheck_lib.gen_division_currency amount_to_spend - command_splits'.(i) ) + command_splits'.(i)) n_accounts in return (command_senders, currency_splits)) @@ -285,10 +284,10 @@ module Gen = struct redraws command_splits as well as currency_splits, so we don't get stuck in a situation where it's very unlikely for the predicate to pass. *) - Quickcheck.Generator.filter ~f:(fun (_, splits) -> - Array.for_all splits ~f:(fun split -> - List.for_all split ~f:(fun amt -> - Currency.Amount.(amt >= of_int 2_000_000_000) ) ) ) + Quickcheck.Generator.filter ~f:(fun (_, splits) -> + Array.for_all splits ~f:(fun split -> + List.for_all split ~f:(fun amt -> + Currency.Amount.(amt >= of_int 2_000_000_000)))) in let account_nonces = Array.map ~f:(fun (_, _, nonce, _) -> nonce) account_info @@ -308,7 +307,7 @@ module Gen = struct let%bind fee = (* use of_string here because json_of_ocaml won't handle equivalent integer constants - *) + *) Currency.Fee.( gen_incl (of_string "6000000000") (min (of_string "10000000000") @@ -320,7 +319,7 @@ module Gen = struct let%bind receiver = map ~f:(fun idx -> let kp, _, _, _ = account_info.(idx) in - Public_key.compress kp.public_key ) + Public_key.compress kp.public_key) @@ Int.gen_uniform_incl 0 (n_accounts - 1) in let memo = Signed_command_memo.dummy in @@ -330,15 +329,16 @@ module Gen = struct ~fee_payer_pk:sender_pk ~valid_until:None ~nonce ~memo ~body: (Payment - { source_pk= sender_pk - ; receiver_pk= receiver - ; token_id= Token_id.default - ; amount }) + { source_pk = sender_pk + ; receiver_pk = receiver + ; token_id = Token_id.default + ; amount + }) in let sign' = match sign_type with `Fake -> For_tests.fake_sign | `Real -> sign in - return @@ sign' sender_pk payload ) + return @@ sign' sender_pk payload) end module With_valid_signature = struct @@ -369,20 +369,18 @@ module Base58_check = Codable.Make_base58_check (Stable.Latest) [%%define_locally Base58_check.(to_base58_check, of_base58_check, of_base58_check_exn)] -[%%define_locally -Base58_check.String_ops.(to_string, of_string)] +[%%define_locally Base58_check.String_ops.(to_string, of_string)] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] -let check_signature ({payload; signer; signature} : t) = +let check_signature ({ payload; signer; signature } : t) = Signature_lib.Schnorr.verify signature (Snark_params.Tick.Inner_curve.of_affine signer) (to_input payload) [%%else] -let check_signature ({payload; signer; signature} : t) = +let check_signature ({ payload; signer; signature } : t) = Signature_lib_nonconsensus.Schnorr.verify signature (Snark_params_nonconsensus.Inner_curve.of_affine signer) (to_input payload) @@ -393,13 +391,13 @@ let check_valid_keys t = let fee_payer = fee_payer_pk t in let source = source_pk t in let receiver = receiver_pk t in - List.for_all [fee_payer; source; receiver] ~f:(fun pk -> - Option.is_some (Public_key.decompress pk) ) + List.for_all [ fee_payer; source; receiver ] ~f:(fun pk -> + Option.is_some (Public_key.decompress pk)) let create_with_signature_checked signature signer payload = let open Option.Let_syntax in let%bind signer = Public_key.decompress signer in - let t = Poly.{payload; signature; signer} in + let t = Poly.{ payload; signature; signer } in Option.some_if (check_signature t && check_valid_keys t) t let gen_test = @@ -415,8 +413,7 @@ let%test_unit "completeness" = let%test_unit "json" = Quickcheck.test ~trials:20 ~sexp_of:sexp_of_t gen_test ~f:(fun t -> - assert (Codable.For_tests.check_encoding (module Stable.Latest) ~equal t) - ) + assert (Codable.For_tests.check_encoding (module Stable.Latest) ~equal t)) let check t = Option.some_if (check_signature t && check_valid_keys t) t @@ -429,4 +426,4 @@ let filter_by_participant user_commands public_key = ~f: (Fn.compose (Public_key.Compressed.equal public_key) - Account_id.public_key) ) + Account_id.public_key)) diff --git a/src/lib/mina_base/signed_command.mli b/src/lib/mina_base/signed_command.mli index 34f6207a85c..ccbb0188fcc 100644 --- a/src/lib/mina_base/signed_command.mli +++ b/src/lib/mina_base/signed_command.mli @@ -7,7 +7,7 @@ module Poly : sig module Stable : sig module V1 : sig type ('payload, 'pk, 'signature) t = - {payload: 'payload; signer: 'pk; signature: 'signature} + { payload : 'payload; signer : 'pk; signature : 'signature } [@@deriving sexp, hash, yojson, equal, compare] end end] diff --git a/src/lib/mina_base/signed_command_intf.ml b/src/lib/mina_base/signed_command_intf.ml index 0555910a321..8d9f585db37 100644 --- a/src/lib/mina_base/signed_command_intf.ml +++ b/src/lib/mina_base/signed_command_intf.ml @@ -1,13 +1,11 @@ (* user_command_intf.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Import open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Mina_numbers open Snark_params.Tick @@ -31,9 +29,9 @@ module type Gen_intf = sig * and an amount $\in [1,max_amount]$ *) val payment : - ?sign_type:[`Fake | `Real] - -> key_gen:(Signature_keypair.t * Signature_keypair.t) - Quickcheck.Generator.t + ?sign_type:[ `Fake | `Real ] + -> key_gen: + (Signature_keypair.t * Signature_keypair.t) Quickcheck.Generator.t -> ?nonce:Account_nonce.t -> max_amount:int -> ?fee_token:Token_id.t @@ -49,7 +47,7 @@ module type Gen_intf = sig * and an amount $\in [1,max_amount]$ *) val payment_with_random_participants : - ?sign_type:[`Fake | `Real] + ?sign_type:[ `Fake | `Real ] -> keys:Signature_keypair.t array -> ?nonce:Account_nonce.t -> max_amount:int @@ -60,8 +58,8 @@ module type Gen_intf = sig -> t Quickcheck.Generator.t val stake_delegation : - key_gen:(Signature_keypair.t * Signature_keypair.t) - Quickcheck.Generator.t + key_gen: + (Signature_keypair.t * Signature_keypair.t) Quickcheck.Generator.t -> ?nonce:Account_nonce.t -> ?fee_token:Token_id.t -> fee_range:int @@ -81,7 +79,7 @@ module type Gen_intf = sig *) val sequence : ?length:int - -> ?sign_type:[`Fake | `Real] + -> ?sign_type:[ `Fake | `Real ] -> ( Signature_lib.Keypair.t * Currency.Amount.t * Mina_numbers.Account_nonce.t diff --git a/src/lib/mina_base/signed_command_memo.ml b/src/lib/mina_base/signed_command_memo.ml index d362e273632..d073c635fc5 100644 --- a/src/lib/mina_base/signed_command_memo.ml +++ b/src/lib/mina_base/signed_command_memo.ml @@ -1,12 +1,10 @@ (* user_command_memo.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Crypto_params @@ -41,8 +39,7 @@ module Stable = struct end end] -[%%define_locally -Stable.Latest.(to_yojson, of_yojson, to_string, of_string)] +[%%define_locally Stable.Latest.(to_yojson, of_yojson, to_string, of_string)] exception Too_long_user_memo_input @@ -99,7 +96,7 @@ let create_by_digesting_string_exn s = String.init memo_length ~f:(fun ndx -> if Int.(ndx = tag_index) then digest_tag else if Int.(ndx = length_index) then digest_length_byte - else digest.[ndx - 2] ) + else digest.[ndx - 2]) let create_by_digesting_string (s : string) = try Ok (create_by_digesting_string_exn s) @@ -122,7 +119,7 @@ let create_from_value_exn (type t) (module M : Memoable with type t = t) if Int.(ndx = tag_index) then bytes_tag else if Int.(ndx = length_index) then Char.of_int_exn len else if Int.(ndx < len + 2) then M.get value (ndx - 2) - else '\x00' ) + else '\x00') let create_from_bytes_exn bytes = create_from_value_exn (module Bytes) bytes @@ -145,7 +142,7 @@ let dummy = (create_by_digesting_string_exn "" :> t) let empty = create_from_string_exn "" let fold_bits t = - { Fold_lib.Fold.fold= + { Fold_lib.Fold.fold = (fun ~init ~f -> let n = 8 * String.length t in let rec go acc i = @@ -154,12 +151,12 @@ let fold_bits t = let b = (Char.to_int t.[i / 8] lsr (i mod 8)) land 1 = 1 in go (f acc b) (i + 1) in - go init 0 ) } + go init 0) + } let to_bits t = Fold_lib.Fold.to_list (fold_bits t) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module Boolean = Tick.Boolean module Typ = Tick.Typ @@ -220,8 +217,7 @@ let%test_module "user_command_memo" = false with Too_long_user_memo_input -> true - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let%test_unit "typ is identity" = let s = "this is a string" in @@ -234,11 +230,10 @@ let%test_module "user_command_memo" = in let memo_var = Snarky_backendless.Typ_monads.Store.run (typ.store memo) (fun x -> - Snarky_backendless.Cvar.Constant x ) + Snarky_backendless.Cvar.Constant x) in let memo_read = - Snarky_backendless.Typ_monads.Read.run (typ.read memo_var) - read_constant + Snarky_backendless.Typ_monads.Read.run (typ.read memo_var) read_constant in [%test_eq: string] memo memo_read diff --git a/src/lib/mina_base/signed_command_payload.ml b/src/lib/mina_base/signed_command_payload.ml index 7f788db8344..44e476df03e 100644 --- a/src/lib/mina_base/signed_command_payload.ml +++ b/src/lib/mina_base/signed_command_payload.ml @@ -1,12 +1,10 @@ (* user_command_payload.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Signature_lib @@ -31,19 +29,19 @@ module Common = struct module Stable = struct module V1 = struct type ('fee, 'public_key, 'token_id, 'nonce, 'global_slot, 'memo) t = - { fee: 'fee - ; fee_token: 'token_id - ; fee_payer_pk: 'public_key - ; nonce: 'nonce - ; valid_until: 'global_slot - ; memo: 'memo } + { fee : 'fee + ; fee_token : 'token_id + ; fee_payer_pk : 'public_key + ; nonce : 'nonce + ; valid_until : 'global_slot + ; memo : 'memo + } [@@deriving compare, equal, sexp, hash, yojson, hlist] end end] end - [%%if - feature_tokens] + [%%if feature_tokens] [%%versioned module Stable = struct @@ -109,7 +107,8 @@ module Common = struct [%%endif] - let to_input ({fee; fee_token; fee_payer_pk; nonce; valid_until; memo} : t) = + let to_input ({ fee; fee_token; fee_payer_pk; nonce; valid_until; memo } : t) + = let bitstring = Random_oracle.Input.bitstring in Array.reduce_exn ~f:Random_oracle.Input.append [| Currency.Fee.to_input fee @@ -117,7 +116,8 @@ module Common = struct ; Public_key.Compressed.to_input fee_payer_pk ; Account_nonce.to_input nonce ; Global_slot.to_input valid_until - ; bitstring (Memo.to_bits memo) |] + ; bitstring (Memo.to_bits memo) + |] let gen ?fee_token_id () : t Quickcheck.Generator.t = let open Quickcheck.Generator.Let_syntax in @@ -141,10 +141,9 @@ module Common = struct String.gen_with_length Memo.max_input_length Char.quickcheck_generator >>| Memo.create_from_string_exn in - Poly.{fee; fee_token; fee_payer_pk; nonce; valid_until; memo} + Poly.{ fee; fee_token; fee_payer_pk; nonce; valid_until; memo } - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] type var = ( Currency.Fee.var @@ -162,22 +161,24 @@ module Common = struct ; Public_key.Compressed.typ ; Account_nonce.typ ; Global_slot.typ - ; Memo.typ ] + ; Memo.typ + ] ~var_to_hlist:Poly.to_hlist ~var_of_hlist:Poly.of_hlist ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist module Checked = struct - let constant ({fee; fee_token; fee_payer_pk; nonce; valid_until; memo} : t) - : var = - { fee= Currency.Fee.var_of_t fee - ; fee_token= Token_id.var_of_t fee_token - ; fee_payer_pk= Public_key.Compressed.var_of_t fee_payer_pk - ; nonce= Account_nonce.Checked.constant nonce - ; memo= Memo.Checked.constant memo - ; valid_until= Global_slot.Checked.constant valid_until } + let constant + ({ fee; fee_token; fee_payer_pk; nonce; valid_until; memo } : t) : var = + { fee = Currency.Fee.var_of_t fee + ; fee_token = Token_id.var_of_t fee_token + ; fee_payer_pk = Public_key.Compressed.var_of_t fee_payer_pk + ; nonce = Account_nonce.Checked.constant nonce + ; memo = Memo.Checked.constant memo + ; valid_until = Global_slot.Checked.constant valid_until + } let to_input - ({fee; fee_token; fee_payer_pk; nonce; valid_until; memo} : var) = + ({ fee; fee_token; fee_payer_pk; nonce; valid_until; memo } : var) = let%map nonce = Account_nonce.Checked.to_input nonce and valid_until = Global_slot.Checked.to_input valid_until and fee_token = Token_id.Checked.to_input fee_token in @@ -188,7 +189,8 @@ module Common = struct ; nonce ; valid_until ; Random_oracle.Input.bitstring - (Array.to_list (memo :> Boolean.var array)) |] + (Array.to_list (memo :> Boolean.var array)) + |] end [%%endif] @@ -212,8 +214,7 @@ module Body = struct end] end - [%%if - feature_tokens] + [%%if feature_tokens] [%%versioned module Stable = struct @@ -286,7 +287,7 @@ module Body = struct match source_pk with | Some token_owner_pk -> map New_token_payload.gen ~f:(fun payload -> - {payload with token_owner_pk} ) + { payload with token_owner_pk }) | None -> New_token_payload.gen in @@ -294,7 +295,7 @@ module Body = struct match source_pk with | Some token_owner_pk -> map New_account_payload.gen ~f:(fun payload -> - {payload with token_owner_pk} ) + { payload with token_owner_pk }) | None -> New_account_payload.gen in @@ -302,7 +303,7 @@ module Body = struct match source_pk with | Some token_owner_pk -> map Minting_payload.gen ~f:(fun payload -> - {payload with token_owner_pk} ) + { payload with token_owner_pk }) | None -> Minting_payload.gen in @@ -320,7 +321,7 @@ module Body = struct | `D payload -> Create_token_account payload | `E payload -> - Mint_tokens payload ) + Mint_tokens payload) let source_pk (t : t) = match t with @@ -404,13 +405,13 @@ module Poly = struct [%%versioned module Stable = struct module V1 = struct - type ('common, 'body) t = {common: 'common; body: 'body} + type ('common, 'body) t = { common : 'common; body : 'body } [@@deriving equal, sexp, hash, yojson, compare, hlist] - let of_latest common_latest body_latest {common; body} = + let of_latest common_latest body_latest { common; body } = let open Result.Let_syntax in let%map common = common_latest common and body = body_latest body in - {common; body} + { common; body } end end] end @@ -426,14 +427,16 @@ module Stable = struct end] let create ~fee ~fee_token ~fee_payer_pk ~nonce ~valid_until ~memo ~body : t = - { common= + { common = { fee ; fee_token ; fee_payer_pk ; nonce - ; valid_until= Option.value valid_until ~default:Global_slot.max_value - ; memo } - ; body } + ; valid_until = Option.value valid_until ~default:Global_slot.max_value + ; memo + } + ; body + } let fee (t : t) = t.common.fee @@ -485,20 +488,23 @@ let fee_excess (t : t) = let accounts_accessed ~next_available_token (t : t) = [ fee_payer t ; source ~next_available_token t - ; receiver ~next_available_token t ] + ; receiver ~next_available_token t + ] let next_available_token (t : t) token = match t.body with Create_new_token _ -> Token_id.next token | _ -> token let dummy : t = - { common= - { fee= Currency.Fee.zero - ; fee_token= Token_id.default - ; fee_payer_pk= Public_key.Compressed.empty - ; nonce= Account_nonce.zero - ; valid_until= Global_slot.max_value - ; memo= Memo.dummy } - ; body= Payment Payment_payload.dummy } + { common = + { fee = Currency.Fee.zero + ; fee_token = Token_id.default + ; fee_payer_pk = Public_key.Compressed.empty + ; nonce = Account_nonce.zero + ; valid_until = Global_slot.max_value + ; memo = Memo.dummy + } + ; body = Payment Payment_payload.dummy + } let gen = let open Quickcheck.Generator.Let_syntax in @@ -508,4 +514,4 @@ let gen = |> Option.value_exn ?here:None ?error:None ?message:None in let%map body = Body.gen ~source_pk:common.fee_payer_pk ~max_amount in - Poly.{common; body} + Poly.{ common; body } diff --git a/src/lib/mina_base/signed_command_payload.mli b/src/lib/mina_base/signed_command_payload.mli index bcc86572b3c..c499d9c26df 100644 --- a/src/lib/mina_base/signed_command_payload.mli +++ b/src/lib/mina_base/signed_command_payload.mli @@ -53,12 +53,13 @@ module Common : sig module Stable : sig module V1 : sig type ('fee, 'public_key, 'token_id, 'nonce, 'global_slot, 'memo) t = - { fee: 'fee - ; fee_token: 'token_id - ; fee_payer_pk: 'public_key - ; nonce: 'nonce - ; valid_until: 'global_slot - ; memo: 'memo } + { fee : 'fee + ; fee_token : 'token_id + ; fee_payer_pk : 'public_key + ; nonce : 'nonce + ; valid_until : 'global_slot + ; memo : 'memo + } [@@deriving equal, sexp, hash, yojson] end end] @@ -113,7 +114,7 @@ module Poly : sig [%%versioned: module Stable : sig module V1 : sig - type ('common, 'body) t = {common: 'common; body: 'body} + type ('common, 'body) t = { common : 'common; body : 'body } [@@deriving equal, sexp, hash, yojson, compare, hlist] val of_latest : diff --git a/src/lib/mina_base/snapp_account.ml b/src/lib/mina_base/snapp_account.ml index 86f51c5025e..5ad665f7cd6 100644 --- a/src/lib/mina_base/snapp_account.ml +++ b/src/lib/mina_base/snapp_account.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick module Mina_numbers = Mina_numbers @@ -25,14 +23,15 @@ module Poly = struct [%%versioned module Stable = struct module V1 = struct - type ('app_state, 'vk) t = {app_state: 'app_state; verification_key: 'vk} + type ('app_state, 'vk) t = + { app_state : 'app_state; verification_key : 'vk } [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] end end] end type ('app_state, 'vk) t_ = ('app_state, 'vk) Poly.t = - {app_state: 'app_state; verification_key: 'vk} + { app_state : 'app_state; verification_key : 'vk } [%%versioned module Stable = struct @@ -57,8 +56,7 @@ let digest_vk (t : Side_loaded_verification_key.t) = hash ~init:Hash_prefix_states.side_loaded_vk (pack_input (Side_loaded_verification_key.to_input t))) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module Checked = struct type t = @@ -77,7 +75,7 @@ module Checked = struct |> List.reduce_exn ~f:append let to_input (t : t) = - to_input' {t with verification_key= Lazy.force t.verification_key.hash} + to_input' { t with verification_key = Lazy.force t.verification_key.hash } let digest_vk t = Random_oracle.Checked.( @@ -102,7 +100,7 @@ let typ : (Checked.t, t) Typ.t = | None -> Pickles.Side_loaded.Verification_key.dummy | Some x -> - With_hash.data x ) + With_hash.data x) ~back:(fun x -> Some (With_hash.of_data x ~hash_data:digest_vk)) |> Typ.transport_var ~there:With_hash.data ~back: @@ -129,8 +127,9 @@ let to_input (t : t) = let default : _ Poly.t = (* These are the permissions of a "user"/"non snapp" account. *) - { app_state= Vector.init Snapp_state.Max_state_size.n ~f:(fun _ -> F.zero) - ; verification_key= None } + { app_state = Vector.init Snapp_state.Max_state_size.n ~f:(fun _ -> F.zero) + ; verification_key = None + } let digest (t : t) = Random_oracle.( diff --git a/src/lib/mina_base/snapp_basic.ml b/src/lib/mina_base/snapp_basic.ml index 707c651a35d..1f84b477ea1 100644 --- a/src/lib/mina_base/snapp_basic.ml +++ b/src/lib/mina_base/snapp_basic.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Signature_lib @@ -20,63 +18,60 @@ module Transition = struct [%%versioned module Stable = struct module V1 = struct - type 'a t = {prev: 'a; next: 'a} + type 'a t = { prev : 'a; next : 'a } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] - let to_input {prev; next} ~f = Random_oracle_input.append (f prev) (f next) + let to_input { prev; next } ~f = Random_oracle_input.append (f prev) (f next) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let typ t = - Typ.of_hlistable [t; t] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist + Typ.of_hlistable [ t; t ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist [%%endif] end module Flagged_data = struct - type ('flag, 'a) t = {flag: 'flag; data: 'a} [@@deriving hlist, fields] + type ('flag, 'a) t = { flag : 'flag; data : 'a } [@@deriving hlist, fields] - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let typ flag t = - Typ.of_hlistable [flag; t] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist + Typ.of_hlistable [ flag; t ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist [%%endif] - let to_input' {flag; data} ~flag:f ~data:d = + let to_input' { flag; data } ~flag:f ~data:d = Random_oracle_input.(append (f flag) (d data)) end module Flagged_option = struct - type ('bool, 'a) t = {is_some: 'bool; data: 'a} [@@deriving hlist, fields] + type ('bool, 'a) t = { is_some : 'bool; data : 'a } [@@deriving hlist, fields] - let to_input' {is_some; data} ~f = - Random_oracle_input.(append (bitstring [is_some]) (f data)) + let to_input' { is_some; data } ~f = + Random_oracle_input.(append (bitstring [ is_some ]) (f data)) - let to_input {is_some; data} ~default ~f = + let to_input { is_some; data } ~default ~f = let data = if is_some then data else default in - to_input' {is_some; data} ~f + to_input' { is_some; data } ~f let of_option t ~default = match t with | None -> - {is_some= false; data= default} + { is_some = false; data = default } | Some data -> - {is_some= true; data} + { is_some = true; data } - let to_option {is_some; data} = Option.some_if is_some data + let to_option { is_some; data } = Option.some_if is_some data - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let typ t = - Typ.of_hlistable [Boolean.typ; t] ~var_to_hlist:to_hlist + Typ.of_hlistable [ Boolean.typ; t ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let option_typ ~default t = @@ -106,8 +101,7 @@ module Set_or_keep = struct let is_keep = function Keep -> true | _ -> false - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked : sig type 'a t @@ -131,7 +125,7 @@ module Set_or_keep = struct end = struct type 'a t = (Boolean.var, 'a) Flagged_option.t - let set_or_keep ~if_ ({is_some; data} : _ t) x = + let set_or_keep ~if_ ({ is_some; data } : _ t) x = if_ is_some ~then_:data ~else_:x let data = Flagged_option.data @@ -170,8 +164,7 @@ module Or_ignore = struct let of_option = function None -> Ignore | Some x -> Check x - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked : sig type 'a t @@ -207,8 +200,8 @@ module Or_ignore = struct match t with | Implicit x -> f x - | Explicit {is_some; data} -> - Pickles.Impls.Step.Boolean.(any [not is_some; f data]) + | Explicit { is_some; data } -> + Pickles.Impls.Step.Boolean.(any [ not is_some; f data ]) let typ_implicit (type a a_var) ~equal ~(ignore : a) (t : (a_var, a) Typ.t) : (a_var t, a Stable.Latest.t) Typ.t = @@ -246,38 +239,37 @@ module Account_state = struct end] module Encoding = struct - type 'b t = {any: 'b; empty: 'b} [@@deriving hlist] + type 'b t = { any : 'b; empty : 'b } [@@deriving hlist] - let to_input {any; empty} = Random_oracle_input.bitstring [any; empty] + let to_input { any; empty } = Random_oracle_input.bitstring [ any; empty ] end let encode : t -> bool Encoding.t = function | Empty -> - {any= false; empty= true} + { any = false; empty = true } | Non_empty -> - {any= false; empty= false} + { any = false; empty = false } | Any -> - {any= true; empty= false} + { any = true; empty = false } let decode : bool Encoding.t -> t = function - | {any= false; empty= true} -> + | { any = false; empty = true } -> Empty - | {any= false; empty= false} -> + | { any = false; empty = false } -> Non_empty - | {any= true; empty= false} | {any= true; empty= true} -> + | { any = true; empty = false } | { any = true; empty = true } -> Any let to_input (x : t) = Encoding.to_input (encode x) - let check (t : t) (x : [`Empty | `Non_empty]) = + let check (t : t) (x : [ `Empty | `Non_empty ]) = match (t, x) with | Any, _ | Non_empty, `Non_empty | Empty, `Empty -> Ok () | _ -> Or_error.error_string "Bad account_type" - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked = struct open Pickles.Impls.Step @@ -287,20 +279,22 @@ module Account_state = struct let to_input (t : t) = Encoding.to_input t let check (t : t) ~is_empty = - Boolean.(any [t.any; t.empty && is_empty; (not t.empty) && not is_empty]) + Boolean.( + any [ t.any; t.empty && is_empty; (not t.empty) && not is_empty ]) end let typ : (Checked.t, t) Typ.t = let open Encoding in - Typ.of_hlistable [Boolean.typ; Boolean.typ] ~var_to_hlist:to_hlist - ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist + Typ.of_hlistable + [ Boolean.typ; Boolean.typ ] + ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist + ~value_of_hlist:of_hlist |> Typ.transport ~there:encode ~back:decode [%%endif] end -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module F = Pickles.Backend.Tick.Field @@ -311,7 +305,7 @@ let invalid_public_key : Public_key.Compressed.t Lazy.t = b + (x * (a + square x)) in let rec go i : Public_key.Compressed.t = - if not (is_square (f i)) then {x= i; is_odd= false} else go (i + one) + if not (is_square (f i)) then { x = i; is_odd = false } else go (i + one) in lazy (go zero) diff --git a/src/lib/mina_base/snapp_command.ml b/src/lib/mina_base/snapp_command.ml index 2dbd2785656..11e5f83b441 100644 --- a/src/lib/mina_base/snapp_command.ml +++ b/src/lib/mina_base/snapp_command.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Signature_lib @@ -43,10 +41,11 @@ module Party = struct module Stable = struct module V1 = struct type ('state_element, 'pk, 'vk, 'perms) t = - { app_state: 'state_element Snapp_state.V.Stable.V1.t - ; delegate: 'pk - ; verification_key: 'vk - ; permissions: 'perms } + { app_state : 'state_element Snapp_state.V.Stable.V1.t + ; delegate : 'pk + ; verification_key : 'vk + ; permissions : 'perms + } [@@deriving compare, equal, sexp, hash, yojson, hlist] end end] @@ -82,7 +81,8 @@ module Party = struct , Permissions.Checked.t Set_or_keep.Checked.t ) Poly.t - let to_input ({app_state; delegate; verification_key; permissions} : t) = + let to_input ({ app_state; delegate; verification_key; permissions } : t) + = let open Random_oracle_input in List.reduce_exn ~f:append [ Snapp_state.to_input app_state @@ -91,18 +91,20 @@ module Party = struct ~f:Public_key.Compressed.Checked.to_input ; Set_or_keep.Checked.to_input verification_key ~f:field ; Set_or_keep.Checked.to_input permissions - ~f:Permissions.Checked.to_input ] + ~f:Permissions.Checked.to_input + ] end let dummy : t = - { app_state= + { app_state = Vector.init Snapp_state.Max_state_size.n ~f:(fun _ -> - Set_or_keep.Keep ) - ; delegate= Keep - ; verification_key= Keep - ; permissions= Keep } + Set_or_keep.Keep) + ; delegate = Keep + ; verification_key = Keep + ; permissions = Keep + } - let to_input ({app_state; delegate; verification_key; permissions} : t) = + let to_input ({ app_state; delegate; verification_key; permissions } : t) = let open Random_oracle_input in List.reduce_exn ~f:append [ Snapp_state.to_input app_state @@ -114,7 +116,8 @@ module Party = struct (Set_or_keep.map verification_key ~f:With_hash.hash) ~dummy:Field.zero ~f:field ; Set_or_keep.to_input permissions ~dummy:Permissions.user_default - ~f:Permissions.to_input ] + ~f:Permissions.to_input + ] let typ () : (Checked.t, t) Typ.t = let open Poly in @@ -127,7 +130,8 @@ module Party = struct |> Typ.transport ~there:(Set_or_keep.map ~f:With_hash.hash) ~back:(Set_or_keep.map ~f:(fun _ -> failwith "vk typ")) - ; Set_or_keep.typ ~dummy:Permissions.user_default Permissions.typ ] + ; Set_or_keep.typ ~dummy:Permissions.user_default Permissions.typ + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -138,7 +142,7 @@ module Party = struct module Stable = struct module V1 = struct type ('pk, 'update, 'signed_amount) t = - {pk: 'pk; update: 'update; delta: 'signed_amount} + { pk : 'pk; update : 'update; delta : 'signed_amount } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] @@ -162,11 +166,12 @@ module Party = struct type t = (Public_key.Compressed.var, Update.Checked.t, Amount.Signed.var) Poly.t - let to_input ({pk; update; delta} : t) = + let to_input ({ pk; update; delta } : t) = List.reduce_exn ~f:Random_oracle_input.append [ Public_key.Compressed.Checked.to_input pk ; Update.Checked.to_input update - ; Amount.Signed.Checked.to_input delta ] + ; Amount.Signed.Checked.to_input delta + ] let digest (t : t) = Random_oracle.Checked.( @@ -176,20 +181,22 @@ module Party = struct let typ () : (Checked.t, t) Typ.t = let open Poly in Typ.of_hlistable - [Public_key.Compressed.typ; Update.typ (); Amount.Signed.typ] + [ Public_key.Compressed.typ; Update.typ (); Amount.Signed.typ ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let dummy : t = - { pk= Public_key.Compressed.empty - ; update= Update.dummy - ; delta= Amount.Signed.zero } + { pk = Public_key.Compressed.empty + ; update = Update.dummy + ; delta = Amount.Signed.zero + } - let to_input ({pk; update; delta} : t) = + let to_input ({ pk; update; delta } : t) = List.reduce_exn ~f:Random_oracle_input.append [ Public_key.Compressed.to_input pk ; Update.to_input update - ; Amount.Signed.to_input delta ] + ; Amount.Signed.to_input delta + ] let digest (t : t) = Random_oracle.( @@ -209,7 +216,7 @@ module Party = struct [%%versioned module Stable = struct module V1 = struct - type ('body, 'predicate) t = {body: 'body; predicate: 'predicate} + type ('body, 'predicate) t = { body : 'body; predicate : 'predicate } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] @@ -247,7 +254,7 @@ module Party = struct type t = ( Body.Stable.V1.t (* It's really more natural for this to be a predicate. Consider doing this - if predicates are not too expensive. *) + if predicates are not too expensive. *) , Account_nonce.Stable.V1.t ) Poly.Stable.V1.t [@@deriving sexp, equal, yojson, hash, compare] @@ -268,9 +275,10 @@ module Party = struct type t = (Body.Checked.t, Account_nonce.Checked.t) Poly.t end - let typ : (Checked.t, t) Typ.t = Poly.typ [Body.typ (); Account_nonce.typ] + let typ : (Checked.t, t) Typ.t = + Poly.typ [ Body.typ (); Account_nonce.typ ] - let dummy : t = {body= Body.dummy; predicate= Account_nonce.zero} + let dummy : t = { body = Body.dummy; predicate = Account_nonce.zero } end module Empty = struct @@ -284,9 +292,9 @@ module Party = struct end end] - let dummy : t = {body= Body.dummy; predicate= ()} + let dummy : t = { body = Body.dummy; predicate = () } - let create body : t = {body; predicate= ()} + let create body : t = { body; predicate = () } end end @@ -295,7 +303,7 @@ module Party = struct [%%versioned module Stable = struct module V1 = struct - type ('data, 'auth) t = {data: 'data; authorization: 'auth} + type ('data, 'auth) t = { data : 'data; authorization : 'auth } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] @@ -350,10 +358,11 @@ module Inner = struct module Stable = struct module V1 = struct type ('one, 'two) t = - { token_id: Token_id.Stable.V1.t - ; fee_payment: Other_fee_payer.Stable.V1.t option - ; one: 'one - ; two: 'two } + { token_id : Token_id.Stable.V1.t + ; fee_payment : Other_fee_payer.Stable.V1.t option + ; one : 'one + ; two : 'two + } [@@deriving sexp, equal, yojson, hash, compare, fields, hlist] end end] @@ -395,8 +404,7 @@ module Binable_arg = struct end] end -[%%if -feature_snapps] +[%%if feature_snapps] include Binable_arg @@ -449,17 +457,18 @@ end] [%%endif] type transfer = - { source: Public_key.Compressed.t - ; receiver: Public_key.Compressed.t - ; amount: Amount.t } + { source : Public_key.Compressed.t + ; receiver : Public_key.Compressed.t + ; amount : Amount.t + } let token_id (t : t) : Token_id.t = match t with - | Proved_empty {token_id; _} - | Proved_signed {token_id; _} - | Proved_proved {token_id; _} - | Signed_signed {token_id; _} - | Signed_empty {token_id; _} -> + | Proved_empty { token_id; _ } + | Proved_signed { token_id; _ } + | Proved_proved { token_id; _ } + | Signed_signed { token_id; _ } + | Signed_empty { token_id; _ } -> token_id let assert_ b lab = if b then Ok () else Or_error.error_string lab @@ -481,11 +490,7 @@ let signed_to_non_positive (t : Amount.Signed.t) = let fee_token (t : t) : Token_id.t = let f (x : _ Inner.t) = - match x.fee_payment with - | Some x -> - x.payload.token_id - | None -> - x.token_id + match x.fee_payment with Some x -> x.payload.token_id | None -> x.token_id in match t with | Proved_empty r -> @@ -503,7 +508,7 @@ let check_tokens (t : t) = let f (r : _ Inner.t) = let valid x = not (Token_id.(equal invalid) x) in Option.value_map r.fee_payment ~default:true ~f:(fun x -> - valid x.payload.token_id ) + valid x.payload.token_id) && valid r.token_id in match t with @@ -523,10 +528,11 @@ let check_tokens (t : t) = let native_excess_exn (t : t) = let open Party in let f1 - { Inner.one: ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t - ; two: ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t option + { Inner.one : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t + ; two : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t option ; token_id - ; _ } = + ; _ + } = match two with | None -> assert (is_non_pos one.data.body.delta) ; @@ -544,7 +550,7 @@ let native_excess_exn (t : t) = in (Account_id.create pk token_id, x.magnitude) in - let f2 r = f1 {r with two= Some r.Inner.two} in + let f2 r = f1 { r with two = Some r.Inner.two } in match t with | Proved_empty r -> f1 r @@ -595,11 +601,7 @@ let fee_payment t = let fee_exn (t : t) = let f (r : _ Inner.t) = let _, e = native_excess_exn t in - match r.fee_payment with - | Some p -> - p.payload.fee - | None -> - Amount.to_fee e + match r.fee_payment with Some p -> p.payload.fee | None -> Amount.to_fee e in match t with | Proved_empty r -> @@ -618,24 +620,27 @@ let fee_exn (t : t) = let as_transfer (t : t) : transfer = let open Party in let f1 - { Inner.one: ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t - ; two: ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t option - ; _ } : transfer = + { Inner.one : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t + ; two : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t option + ; _ + } : transfer = match two with | None -> - { source= one.data.body.pk - ; receiver= one.data.body.pk - ; amount= Amount.zero } + { source = one.data.body.pk + ; receiver = one.data.body.pk + ; amount = Amount.zero + } | Some two -> let sender, receiver = if is_non_pos one.data.body.delta then (one.data.body, two.data.body) else (two.data.body, one.data.body) in - { source= sender.pk - ; receiver= receiver.pk - ; amount= receiver.delta.magnitude } + { source = sender.pk + ; receiver = receiver.pk + ; amount = receiver.delta.magnitude + } in - let f2 r = f1 {r with two= Some r.Inner.two} in + let f2 r = f1 { r with two = Some r.Inner.two } in match t with | Proved_empty r -> f1 r @@ -666,17 +671,18 @@ let accounts_accessed (t : t) : Account_id.t list = let f { Inner.token_id ; fee_payment - ; one: ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t - ; two: ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t option } = + ; one : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t + ; two : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t option + } = let a k = Account_id.create k token_id in a one.data.body.pk :: Option.(to_list (map two ~f:(fun x -> a x.data.body.pk))) @ Option.( to_list (map fee_payment ~f:(fun x -> - Account_id.create x.payload.pk x.payload.token_id ))) + Account_id.create x.payload.pk x.payload.token_id))) in - let f2 r = f {r with two= Some r.Inner.two} in + let f2 r = f { r with two = Some r.Inner.two } in match t with | Proved_empty r -> f r @@ -710,19 +716,20 @@ module Payload = struct module Stable = struct module V1 = struct type ('bool, 'token_id, 'fee_payer_opt, 'one, 'two) t = - { second_starts_empty: 'bool - ; second_ends_empty: 'bool - ; token_id: 'token_id - ; other_fee_payer_opt: 'fee_payer_opt + { second_starts_empty : 'bool + ; second_ends_empty : 'bool + ; token_id : 'token_id + ; other_fee_payer_opt : 'fee_payer_opt (* It would be more optimal if it was - - one: Body-minus-update - - two: Body-minus-update - - updates: { one: Update.t; two: Update.t } - - since both statements contain both updates. -*) - ; one: 'one - ; two: 'two } + - one: Body-minus-update + - two: Body-minus-update + - updates: { one: Update.t; two: Update.t } + + since both statements contain both updates. + *) + ; one : 'one + ; two : 'two + } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] @@ -782,9 +789,9 @@ module Payload = struct let digested (r : t) : Digested.Checked.t = let b (x : _ Party.Predicated.Poly.t) = - {x with body= Party.Body.Checked.digest x.body} + { x with body = Party.Body.Checked.digest x.body } in - {r with one= b r.one; two= b r.two} + { r with one = b r.one; two = b r.two } end let typ : (Checked.t, t) Typ.t = @@ -799,7 +806,8 @@ module Payload = struct ~default:Other_fee_payer.Payload.dummy) ~back:Flagged_option.to_option ; Party.Predicated.Signed.typ - ; Party.Predicated.Signed.typ ] + ; Party.Predicated.Signed.typ + ] end module One_proved = struct @@ -932,19 +940,21 @@ module Payload = struct ; token_id ; other_fee_payer_opt ; one - ; two } : + ; two + } : _ Inner.t) ~f1 ~f2 = - let p f {Party.Predicated.Poly.body; predicate} = - List.reduce_exn ~f:append [b body; f predicate] + let p f { Party.Predicated.Poly.body; predicate } = + List.reduce_exn ~f:append [ b body; f predicate ] in List.reduce_exn ~f:append - [ bitstring [second_starts_empty; second_ends_empty] + [ bitstring [ second_starts_empty; second_ends_empty ] ; !(Token_id.Checked.to_input token_id) ; Snapp_basic.Flagged_option.( to_input' ~f:Other_fee_payer.Payload.Checked.to_input other_fee_payer_opt) ; p f1 one - ; p f2 two ] + ; p f2 two + ] in let nonce x = !(Account_nonce.Checked.to_input x) in match t with @@ -969,20 +979,22 @@ module Payload = struct ; token_id ; other_fee_payer_opt ; one - ; two } : + ; two + } : _ Inner.t) ~f1 ~f2 = - let p f {Party.Predicated.Poly.body; predicate} = - List.reduce_exn ~f:append [b body; f predicate] + let p f { Party.Predicated.Poly.body; predicate } = + List.reduce_exn ~f:append [ b body; f predicate ] in List.reduce_exn ~f:append - [ bitstring [second_starts_empty; second_ends_empty] + [ bitstring [ second_starts_empty; second_ends_empty ] ; Token_id.to_input token_id ; Snapp_basic.Flagged_option.( to_input' ~f:Other_fee_payer.Payload.to_input (of_option ~default:Other_fee_payer.Payload.dummy other_fee_payer_opt)) ; p f1 one - ; p f2 two ] + ; p f2 two + ] in match t with | Zero_proved r -> @@ -999,19 +1011,19 @@ module Payload = struct let digested (t : t) : Digested.t = let b (x : _ Party.Predicated.Poly.t) = - {x with body= Party.Body.digest x.body} + { x with body = Party.Body.digest x.body } in let s x = let t = b x in - {t with predicate= Snapp_predicate.digest t.predicate} + { t with predicate = Snapp_predicate.digest t.predicate } in match t with | Zero_proved r -> - Zero_proved {r with one= b r.one; two= b r.two} + Zero_proved { r with one = b r.one; two = b r.two } | One_proved r -> - One_proved {r with one= s r.one; two= b r.two} + One_proved { r with one = s r.one; two = b r.two } | Two_proved r -> - Two_proved {r with one= s r.one; two= s r.two} + Two_proved { r with one = s r.one; two = s r.two } end (* In order to be compatible with the transaction pool (where transactions are stored in @@ -1039,7 +1051,7 @@ let nonce (t : t) = match p.self_predicate.nonce with | Ignore -> None - | Check {lower; upper} -> + | Check { lower; upper } -> if Account_nonce.equal lower upper then Some lower else None in let p x = T (x, pred) in @@ -1053,15 +1065,15 @@ let nonce (t : t) = in match t with | Proved_proved r -> - nonce r [p r.one; p r.two] + nonce r [ p r.one; p r.two ] | Proved_signed r -> - nonce r [p r.one; n r.two] + nonce r [ p r.one; n r.two ] | Proved_empty r -> - nonce r [p r.one] + nonce r [ p r.one ] | Signed_signed r -> - nonce r [n r.one; n r.two] + nonce r [ n r.one; n r.two ] | Signed_empty r -> - nonce r [n r.one] + nonce r [ n r.one ] let nonce_invariant t = match nonce t with @@ -1074,12 +1086,7 @@ let nonce_invariant t = (* TODO: Check that predicates are consistent. *) (* TODO: Check the predicates that can be checked (e.g., on fee_payment) *) let check (t : t) : unit Or_error.t = - let opt lab = function - | None -> - Or_error.error_string lab - | Some x -> - Ok x - in + let opt lab = function None -> Or_error.error_string lab | Some x -> Ok x in let open Or_error.Let_syntax in let open Party in let%bind _ = Or_error.try_with (fun () -> fee_exn t) in @@ -1112,13 +1119,12 @@ let check (t : t) : unit Or_error.t = return () in let check_both - ({token_id; fee_payment; one; two} : + ({ token_id; fee_payment; one; two } : ( ((_ Body.Poly.t, _) Predicated.Poly.t, _) Authorized.Poly.t , ((_ Body.Poly.t, _) Predicated.Poly.t, _) Authorized.Poly.t ) Inner.t) = let%bind excess = - opt "overflow" - (Amount.Signed.add one.data.body.delta two.data.body.delta) + opt "overflow" (Amount.Signed.add one.data.body.delta two.data.body.delta) in let%bind () = assert_ @@ -1128,10 +1134,9 @@ let check (t : t) : unit Or_error.t = fee_checks ~excess ~token_id ~fee_payment in let check_opt - ({token_id; fee_payment; one; two} : + ({ token_id; fee_payment; one; two } : ( ((_ Body.Poly.t, _) Predicated.Poly.t, _) Authorized.Poly.t - , ((_ Body.Poly.t, _) Predicated.Poly.t, _) Authorized.Poly.t option - ) + , ((_ Body.Poly.t, _) Predicated.Poly.t, _) Authorized.Poly.t option ) Inner.t) = let%bind excess = opt "overflow" @@ -1153,7 +1158,7 @@ let check (t : t) : unit Or_error.t = assert_ (List.for_all (accounts_accessed t) ~f:(fun aid -> Account_id.public_key aid |> Public_key.decompress - |> Option.is_some )) + |> Option.is_some)) "public keys for all accounts involved in the transaction must be valid" in fee_checks ~excess ~token_id ~fee_payment @@ -1174,84 +1179,94 @@ let check (t : t) : unit Or_error.t = let to_payload (t : t) : Payload.t = let opt x = Option.value_map x ~default:Party.Predicated.Signed.dummy - ~f:(fun {Party.Authorized.Poly.data; authorization= _} -> - {data with predicate= Party.Predicated.Signed.dummy.predicate} ) + ~f:(fun { Party.Authorized.Poly.data; authorization = _ } -> + { data with predicate = Party.Predicated.Signed.dummy.predicate }) in match t with | Proved_empty - {one= {data= one; authorization= _}; two; token_id; fee_payment} -> + { one = { data = one; authorization = _ }; two; token_id; fee_payment } -> One_proved - { second_starts_empty= true - ; second_ends_empty= Option.is_none two + { second_starts_empty = true + ; second_ends_empty = Option.is_none two ; one - ; two= opt two + ; two = opt two ; token_id - ; other_fee_payer_opt= - Option.map fee_payment ~f:(fun {payload; signature= _} -> payload) + ; other_fee_payer_opt = + Option.map fee_payment ~f:(fun { payload; signature = _ } -> + payload) } | Signed_empty - {one= {data= one; authorization= _}; two; token_id; fee_payment} -> + { one = { data = one; authorization = _ }; two; token_id; fee_payment } -> Zero_proved - { second_starts_empty= true - ; second_ends_empty= Option.is_none two + { second_starts_empty = true + ; second_ends_empty = Option.is_none two ; one - ; two= opt two + ; two = opt two ; token_id - ; other_fee_payer_opt= - Option.map fee_payment ~f:(fun {payload; signature= _} -> payload) + ; other_fee_payer_opt = + Option.map fee_payment ~f:(fun { payload; signature = _ } -> + payload) } | Signed_signed - { one= {data= one; authorization= _} - ; two= {data= two; authorization= _} + { one = { data = one; authorization = _ } + ; two = { data = two; authorization = _ } ; token_id - ; fee_payment } -> + ; fee_payment + } -> Zero_proved - { second_starts_empty= false - ; second_ends_empty= false + { second_starts_empty = false + ; second_ends_empty = false ; one ; two ; token_id - ; other_fee_payer_opt= - Option.map fee_payment ~f:(fun {payload; signature= _} -> payload) + ; other_fee_payer_opt = + Option.map fee_payment ~f:(fun { payload; signature = _ } -> + payload) } | Proved_signed - { one= {data= one; authorization= _} - ; two= {data= two; authorization= _} + { one = { data = one; authorization = _ } + ; two = { data = two; authorization = _ } ; token_id - ; fee_payment } -> + ; fee_payment + } -> One_proved - { second_starts_empty= false - ; second_ends_empty= false + { second_starts_empty = false + ; second_ends_empty = false ; one ; two ; token_id - ; other_fee_payer_opt= - Option.map fee_payment ~f:(fun {payload; signature= _} -> payload) + ; other_fee_payer_opt = + Option.map fee_payment ~f:(fun { payload; signature = _ } -> + payload) } | Proved_proved - { one= {data= one; authorization= _} - ; two= {data= two; authorization= _} + { one = { data = one; authorization = _ } + ; two = { data = two; authorization = _ } ; token_id - ; fee_payment } -> + ; fee_payment + } -> Two_proved - { second_starts_empty= false - ; second_ends_empty= false + { second_starts_empty = false + ; second_ends_empty = false ; one ; two ; token_id - ; other_fee_payer_opt= - Option.map fee_payment ~f:(fun {payload; signature= _} -> payload) + ; other_fee_payer_opt = + Option.map fee_payment ~f:(fun { payload; signature = _ } -> + payload) } -let signed_signed ?fee_payment ~token_id (signer1, data1) (signer2, data2) : t - = +let signed_signed ?fee_payment ~token_id (signer1, data1) (signer2, data2) : t = let r : _ Inner.t = - { one= {Party.Authorized.Poly.data= data1; authorization= Signature.dummy} - ; two= {Party.Authorized.Poly.data= data2; authorization= Signature.dummy} + { one = + { Party.Authorized.Poly.data = data1; authorization = Signature.dummy } + ; two = + { Party.Authorized.Poly.data = data2; authorization = Signature.dummy } ; token_id - ; fee_payment= + ; fee_payment = Option.map fee_payment ~f:(fun (_priv_key, payload) -> - {Other_fee_payer.payload; signature= Signature.dummy} ) } + { Other_fee_payer.payload; signature = Signature.dummy }) + } in let sign = let msg = @@ -1263,22 +1278,25 @@ let signed_signed ?fee_payment ~token_id (signer1, data1) (signer2, data2) : t in Signed_signed { r with - one= {r.one with authorization= sign signer1} - ; two= {r.two with authorization= sign signer2} - ; fee_payment= + one = { r.one with authorization = sign signer1 } + ; two = { r.two with authorization = sign signer2 } + ; fee_payment = Option.map2 fee_payment r.fee_payment ~f:(fun (sk, _) x -> - {x with signature= sign sk} ) } + { x with signature = sign sk }) + } let signed_empty ?fee_payment ?data2 ~token_id (signer1, data1) : t = let r : _ Inner.t = - { one= {Party.Authorized.Poly.data= data1; authorization= Signature.dummy} - ; two= + { one = + { Party.Authorized.Poly.data = data1; authorization = Signature.dummy } + ; two = Option.map data2 ~f:(fun data -> - {Party.Authorized.Poly.data; authorization= ()} ) + { Party.Authorized.Poly.data; authorization = () }) ; token_id - ; fee_payment= + ; fee_payment = Option.map fee_payment ~f:(fun (_priv_key, payload) -> - {Other_fee_payer.payload; signature= Signature.dummy} ) } + { Other_fee_payer.payload; signature = Signature.dummy }) + } in let sign = let msg = @@ -1290,10 +1308,11 @@ let signed_empty ?fee_payment ?data2 ~token_id (signer1, data1) : t = in Signed_empty { r with - one= {r.one with authorization= sign signer1} - ; fee_payment= + one = { r.one with authorization = sign signer1 } + ; fee_payment = Option.map2 fee_payment r.fee_payment ~f:(fun (sk, _) x -> - {x with signature= sign sk} ) } + { x with signature = sign sk }) + } module Base58_check = Codable.Make_base58_check (Stable.Latest) diff --git a/src/lib/mina_base/snapp_predicate.ml b/src/lib/mina_base/snapp_predicate.ml index 362c5bf62cd..abef7903dd5 100644 --- a/src/lib/mina_base/snapp_predicate.ml +++ b/src/lib/mina_base/snapp_predicate.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Signature_lib @@ -30,16 +28,16 @@ module Closed_interval = struct [%%versioned module Stable = struct module V1 = struct - type 'a t = {lower: 'a; upper: 'a} + type 'a t = { lower : 'a; upper : 'a } [@@deriving sexp, equal, compare, hash, yojson, hlist] end end] - let to_input {lower; upper} ~f = + let to_input { lower; upper } ~f = Random_oracle_input.append (f lower) (f upper) let typ x = - Typ.of_hlistable [x; x] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist + Typ.of_hlistable [ x; x ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -51,15 +49,16 @@ let assert_ b e = if b then Ok () else Or_error.error_string e module Numeric = struct module Tc = struct type ('var, 'a) t = - { zero: 'a - ; max_value: 'a - ; compare: 'a -> 'a -> int - ; equal: 'a -> 'a -> bool - ; typ: ('var, 'a) Typ.t - ; to_input: 'a -> (F.t, bool) Random_oracle_input.t - ; to_input_checked: + { zero : 'a + ; max_value : 'a + ; compare : 'a -> 'a -> int + ; equal : 'a -> 'a -> bool + ; typ : ('var, 'a) Typ.t + ; to_input : 'a -> (F.t, bool) Random_oracle_input.t + ; to_input_checked : 'var -> (Field.Var.t, Boolean.var) Random_oracle_input.t - ; lte_checked: 'var -> 'var -> Boolean.var } + ; lte_checked : 'var -> 'var -> Boolean.var + } let run f x y = Impl.run_checked (f x y) @@ -68,80 +67,87 @@ module Numeric = struct { zero ; max_value ; compare - ; lte_checked= run Checked.( <= ) + ; lte_checked = run Checked.( <= ) ; equal ; typ ; to_input - ; to_input_checked= Fn.compose Impl.run_checked Checked.to_input } + ; to_input_checked = Fn.compose Impl.run_checked Checked.to_input + } let amount = Currency.Amount. { zero - ; max_value= max_int + ; max_value = max_int ; compare - ; lte_checked= run Checked.( <= ) + ; lte_checked = run Checked.( <= ) ; equal ; typ ; to_input - ; to_input_checked= var_to_input } + ; to_input_checked = var_to_input + } let balance = Currency.Balance. { zero - ; max_value= max_int + ; max_value = max_int ; compare - ; lte_checked= run Checked.( <= ) + ; lte_checked = run Checked.( <= ) ; equal ; typ ; to_input - ; to_input_checked= var_to_input } + ; to_input_checked = var_to_input + } let nonce = Account_nonce. { zero ; max_value ; compare - ; lte_checked= run Checked.( <= ) + ; lte_checked = run Checked.( <= ) ; equal ; typ ; to_input - ; to_input_checked= Fn.compose Impl.run_checked Checked.to_input } + ; to_input_checked = Fn.compose Impl.run_checked Checked.to_input + } let global_slot = Global_slot. { zero ; max_value ; compare - ; lte_checked= run Checked.( <= ) + ; lte_checked = run Checked.( <= ) ; equal ; typ ; to_input - ; to_input_checked= Fn.compose Impl.run_checked Checked.to_input } + ; to_input_checked = Fn.compose Impl.run_checked Checked.to_input + } let token_id = Token_id. - { zero= of_uint64 Unsigned.UInt64.zero - ; max_value= of_uint64 Unsigned.UInt64.max_int + { zero = of_uint64 Unsigned.UInt64.zero + ; max_value = of_uint64 Unsigned.UInt64.max_int ; equal ; compare - ; lte_checked= run Checked.( <= ) + ; lte_checked = run Checked.( <= ) ; typ ; to_input - ; to_input_checked= Fn.compose Impl.run_checked Checked.to_input } + ; to_input_checked = Fn.compose Impl.run_checked Checked.to_input + } let time = Block_time. { equal ; compare - ; lte_checked= run Checked.( <= ) + ; lte_checked = run Checked.( <= ) ; zero ; max_value - ; typ= Unpacked.typ - ; to_input= Fn.compose Random_oracle_input.bitstring Bits.to_bits - ; to_input_checked= + ; typ = Unpacked.typ + ; to_input = Fn.compose Random_oracle_input.bitstring Bits.to_bits + ; to_input_checked = (fun x -> Random_oracle_input.bitstring - (Unpacked.var_to_bits x :> Boolean.var list) ) } + (Unpacked.var_to_bits x :> Boolean.var list)) + } end open Tc @@ -154,34 +160,38 @@ module Numeric = struct end end] - let to_input {zero; max_value; to_input; _} (t : 'a t) = + let to_input { zero; max_value; to_input; _ } (t : 'a t) = Closed_interval.to_input ~f:to_input - (match t with Check x -> x | Ignore -> {lower= zero; upper= max_value}) + ( match t with + | Check x -> + x + | Ignore -> + { lower = zero; upper = max_value } ) module Checked = struct type 'a t = 'a Closed_interval.t Or_ignore.Checked.t - let to_input {to_input_checked; _} (t : 'a t) = + let to_input { to_input_checked; _ } (t : 'a t) = Or_ignore.Checked.to_input t ~f:(Closed_interval.to_input ~f:to_input_checked) open Impl - let check {lte_checked= ( <= ); _} (t : 'a t) (x : 'a) = - Or_ignore.Checked.check t ~f:(fun {lower; upper} -> - Boolean.all [lower <= x; x <= upper] ) + let check { lte_checked = ( <= ); _ } (t : 'a t) (x : 'a) = + Or_ignore.Checked.check t ~f:(fun { lower; upper } -> + Boolean.all [ lower <= x; x <= upper ]) end - let typ {equal= eq; zero; max_value; typ; _} = + let typ { equal = eq; zero; max_value; typ; _ } = Or_ignore.typ_implicit (Closed_interval.typ typ) ~equal:(Closed_interval.equal eq) - ~ignore:{Closed_interval.lower= zero; upper= max_value} + ~ignore:{ Closed_interval.lower = zero; upper = max_value } - let check ~label {compare; _} (t : 'a t) (x : 'a) = + let check ~label { compare; _ } (t : 'a t) (x : 'a) = match t with | Ignore -> Ok () - | Check {lower; upper} -> + | Check { lower; upper } -> if compare lower x <= 0 && compare x upper <= 0 then Ok () else Or_error.errorf "Bounds check failed: %s" label end @@ -191,13 +201,14 @@ module Eq_data = struct module Tc = struct type ('var, 'a) t = - { equal: 'a -> 'a -> bool - ; equal_checked: 'var -> 'var -> Boolean.var - ; default: 'a - ; typ: ('var, 'a) Typ.t - ; to_input: 'a -> (F.t, bool) Random_oracle_input.t - ; to_input_checked: - 'var -> (Field.Var.t, Boolean.var) Random_oracle_input.t } + { equal : 'a -> 'a -> bool + ; equal_checked : 'var -> 'var -> Boolean.var + ; default : 'a + ; typ : ('var, 'a) Typ.t + ; to_input : 'a -> (F.t, bool) Random_oracle_input.t + ; to_input_checked : + 'var -> (Field.Var.t, Boolean.var) Random_oracle_input.t + } let run f x y = Impl.run_checked (f x y) @@ -206,80 +217,87 @@ module Eq_data = struct Field. { typ ; equal - ; equal_checked= run Checked.equal - ; default= zero - ; to_input= field - ; to_input_checked= field } + ; equal_checked = run Checked.equal + ; default = zero + ; to_input = field + ; to_input_checked = field + } let receipt_chain_hash = Receipt.Chain_hash. { field with - to_input_checked= var_to_input + to_input_checked = var_to_input ; typ ; equal - ; equal_checked= run equal_var } + ; equal_checked = run equal_var + } let ledger_hash = Ledger_hash. { field with - to_input_checked= var_to_input + to_input_checked = var_to_input ; typ ; equal - ; equal_checked= run equal_var } + ; equal_checked = run equal_var + } let frozen_ledger_hash = Frozen_ledger_hash. { field with - to_input_checked= var_to_input + to_input_checked = var_to_input ; typ ; equal - ; equal_checked= run equal_var } + ; equal_checked = run equal_var + } let state_hash = State_hash. { field with - to_input_checked= var_to_input + to_input_checked = var_to_input ; typ ; equal - ; equal_checked= run equal_var } + ; equal_checked = run equal_var + } let epoch_seed = Epoch_seed. { field with - to_input_checked= var_to_input + to_input_checked = var_to_input ; typ ; equal - ; equal_checked= run equal_var } + ; equal_checked = run equal_var + } let public_key () = Public_key.Compressed. - { default= Lazy.force invalid_public_key + { default = Lazy.force invalid_public_key ; to_input - ; to_input_checked= Checked.to_input - ; equal_checked= run Checked.equal + ; to_input_checked = Checked.to_input + ; equal_checked = run Checked.equal ; typ - ; equal } + ; equal + } end - let to_input ~explicit {Tc.default; to_input; _} (t : _ t) = + let to_input ~explicit { Tc.default; to_input; _ } (t : _ t) = if explicit then Flagged_option.to_input' ~f:to_input ( match t with | Ignore -> - {is_some= false; data= default} + { is_some = false; data = default } | Check data -> - {is_some= true; data} ) + { is_some = true; data } ) else to_input (match t with Ignore -> default | Check x -> x) let to_input_explicit tc = to_input ~explicit:true tc - let to_input_checked {Tc.to_input_checked; _} (t : _ Checked.t) = + let to_input_checked { Tc.to_input_checked; _ } (t : _ Checked.t) = Checked.to_input t ~f:to_input_checked - let check_checked {Tc.equal_checked; _} (t : 'a Checked.t) (x : 'a) = + let check_checked { Tc.equal_checked; _ } (t : 'a Checked.t) (x : 'a) = Checked.check t ~f:(equal_checked x) - let check ~label {Tc.equal; _} (t : 'a t) (x : 'a) = + let check ~label { Tc.equal; _ } (t : 'a t) (x : 'a) = match t with | Ignore -> Ok () @@ -287,10 +305,10 @@ module Eq_data = struct if equal x y then Ok () else Or_error.errorf "Equality check failed: %s" label - let typ_implicit {Tc.equal; default= ignore; typ; _} = + let typ_implicit { Tc.equal; default = ignore; typ; _ } = typ_implicit ~equal ~ignore typ - let typ_explicit {Tc.default= ignore; typ; _} = typ_explicit ~ignore typ + let typ_explicit { Tc.default = ignore; typ; _ } = typ_explicit ~ignore typ end module Hash = struct @@ -341,12 +359,13 @@ module Account = struct module Stable = struct module V1 = struct type ('balance, 'nonce, 'receipt_chain_hash, 'pk, 'field) t = - { balance: 'balance - ; nonce: 'nonce - ; receipt_chain_hash: 'receipt_chain_hash - ; public_key: 'pk - ; delegate: 'pk - ; state: 'field Snapp_state.V.Stable.V1.t } + { balance : 'balance + ; nonce : 'nonce + ; receipt_chain_hash : 'receipt_chain_hash + ; public_key : 'pk + ; delegate : 'pk + ; state : 'field Snapp_state.V.Stable.V1.t + } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] @@ -369,17 +388,18 @@ module Account = struct end] let accept : t = - { balance= Ignore - ; nonce= Ignore - ; receipt_chain_hash= Ignore - ; public_key= Ignore - ; delegate= Ignore - ; state= + { balance = Ignore + ; nonce = Ignore + ; receipt_chain_hash = Ignore + ; public_key = Ignore + ; delegate = Ignore + ; state = Vector.init Snapp_state.Max_state_size.n ~f:(fun _ -> Or_ignore.Ignore) } let to_input - ({balance; nonce; receipt_chain_hash; public_key; delegate; state} : t) = + ({ balance; nonce; receipt_chain_hash; public_key; delegate; state } : t) + = let open Random_oracle_input in List.reduce_exn ~f:append [ Numeric.(to_input Tc.balance balance) @@ -388,7 +408,8 @@ module Account = struct ; Eq_data.(to_input_explicit (Tc.public_key ()) public_key) ; Eq_data.(to_input_explicit (Tc.public_key ()) delegate) ; Vector.reduce_exn ~f:append - (Vector.map state ~f:Eq_data.(to_input_explicit Tc.field)) ] + (Vector.map state ~f:Eq_data.(to_input_explicit Tc.field)) + ] let digest t = Random_oracle.( @@ -404,8 +425,8 @@ module Account = struct Poly.Stable.Latest.t let to_input - ({balance; nonce; receipt_chain_hash; public_key; delegate; state} : t) - = + ({ balance; nonce; receipt_chain_hash; public_key; delegate; state } : + t) = let open Random_oracle_input in List.reduce_exn ~f:append [ Numeric.(Checked.to_input Tc.balance balance) @@ -414,12 +435,13 @@ module Account = struct ; Eq_data.(to_input_checked (Tc.public_key ()) public_key) ; Eq_data.(to_input_checked (Tc.public_key ()) delegate) ; Vector.reduce_exn ~f:append - (Vector.map state ~f:Eq_data.(to_input_checked Tc.field)) ] + (Vector.map state ~f:Eq_data.(to_input_checked Tc.field)) + ] open Impl let check_nonsnapp - ({balance; nonce; receipt_chain_hash; public_key; delegate; state= _} : + ({ balance; nonce; receipt_chain_hash; public_key; delegate; state = _ } : t) (a : Account.Checked.Unhashed.t) = Boolean.all [ Numeric.(Checked.check Tc.balance balance a.balance) @@ -428,15 +450,17 @@ module Account = struct check_checked Tc.receipt_chain_hash receipt_chain_hash a.receipt_chain_hash) ; Eq_data.(check_checked (Tc.public_key ()) delegate a.delegate) - ; Eq_data.(check_checked (Tc.public_key ()) public_key a.public_key) ] + ; Eq_data.(check_checked (Tc.public_key ()) public_key a.public_key) + ] let check_snapp - ({ balance= _ - ; nonce= _ - ; receipt_chain_hash= _ - ; public_key= _ - ; delegate= _ - ; state } : + ({ balance = _ + ; nonce = _ + ; receipt_chain_hash = _ + ; public_key = _ + ; delegate = _ + ; state + } : t) (snapp : Snapp_account.Checked.t) = Boolean.all Vector.( @@ -445,8 +469,7 @@ module Account = struct let digest (t : t) = Random_oracle.Checked.( - hash ~init:Hash_prefix.snapp_predicate_account - (pack_input (to_input t))) + hash ~init:Hash_prefix.snapp_predicate_account (pack_input (to_input t))) end let typ () : (Checked.t, Stable.Latest.t) Typ.t = @@ -460,12 +483,13 @@ module Account = struct ; public_key () ; Snapp_state.typ (Or_ignore.typ_implicit Field.typ ~equal:Field.equal - ~ignore:Field.zero) ] + ~ignore:Field.zero) + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let check - ({balance; nonce; receipt_chain_hash; public_key; delegate; state} : t) + ({ balance; nonce; receipt_chain_hash; public_key; delegate; state } : t) (a : Account.t) = let open Or_error.Let_syntax in let%bind () = @@ -498,7 +522,7 @@ module Account = struct let%map () = Eq_data.(check Tc.field ~label:(sprintf "state[%d]" i) c v) in - i + 1 ) + i + 1) >>| ignore in return () @@ -507,7 +531,7 @@ end module Protocol_state = struct (* On each numeric field, you may assert a range On each hash field, you may assert an equality - *) + *) module Epoch_data = struct module Poly = Epoch_data.Poly @@ -532,11 +556,12 @@ module Protocol_state = struct end] let to_input - ({ ledger= {hash; total_currency} + ({ ledger = { hash; total_currency } ; seed ; start_checkpoint ; lock_checkpoint - ; epoch_length } : + ; epoch_length + } : t) = let open Random_oracle.Input in List.reduce_exn ~f:append @@ -545,7 +570,8 @@ module Protocol_state = struct ; Hash.(to_input Tc.epoch_seed seed) ; Hash.(to_input Tc.state_hash start_checkpoint) ; Hash.(to_input Tc.state_hash lock_checkpoint) - ; Numeric.(to_input Tc.length epoch_length) ] + ; Numeric.(to_input Tc.length epoch_length) + ] module Checked = struct type t = @@ -559,11 +585,12 @@ module Protocol_state = struct Poly.t let to_input - ({ ledger= {hash; total_currency} + ({ ledger = { hash; total_currency } ; seed ; start_checkpoint ; lock_checkpoint - ; epoch_length } : + ; epoch_length + } : t) = let open Random_oracle.Input in List.reduce_exn ~f:append @@ -572,7 +599,8 @@ module Protocol_state = struct ; Hash.(to_input_checked Tc.epoch_seed seed) ; Hash.(to_input_checked Tc.state_hash start_checkpoint) ; Hash.(to_input_checked Tc.state_hash lock_checkpoint) - ; Numeric.(Checked.to_input Tc.length epoch_length) ] + ; Numeric.(Checked.to_input Tc.length epoch_length) + ] end end @@ -590,28 +618,29 @@ module Protocol_state = struct , 'epoch_data ) t = { (* TODO: - We should include staged ledger hash again! It only changes once per - block. *) - snarked_ledger_hash: 'snarked_ledger_hash - ; snarked_next_available_token: 'token_id - ; timestamp: 'time - ; blockchain_length: 'length + We should include staged ledger hash again! It only changes once per + block. *) + snarked_ledger_hash : 'snarked_ledger_hash + ; snarked_next_available_token : 'token_id + ; timestamp : 'time + ; blockchain_length : 'length (* TODO: This previously had epoch_count but I removed it as I believe it is redundant - with curr_global_slot. - - epoch_count in [a, b] - - should be equivalent to - - curr_global_slot in [slots_per_epoch * a, slots_per_epoch * b] -*) - ; min_window_density: 'length - ; last_vrf_output: 'vrf_output - ; total_currency: 'amount - ; curr_global_slot: 'global_slot - ; global_slot_since_genesis: 'global_slot - ; staking_epoch_data: 'epoch_data - ; next_epoch_data: 'epoch_data } + with curr_global_slot. + + epoch_count in [a, b] + + should be equivalent to + + curr_global_slot in [slots_per_epoch * a, slots_per_epoch * b] + *) + ; min_window_density : 'length + ; last_vrf_output : 'vrf_output + ; total_currency : 'amount + ; curr_global_slot : 'global_slot + ; global_slot_since_genesis : 'global_slot + ; staking_epoch_data : 'epoch_data + ; next_epoch_data : 'epoch_data + } [@@deriving hlist, sexp, equal, yojson, hash, compare, fields] end end] @@ -647,7 +676,8 @@ module Protocol_state = struct ; curr_global_slot ; global_slot_since_genesis ; staking_epoch_data - ; next_epoch_data } : + ; next_epoch_data + } : t) = let open Random_oracle.Input in let () = last_vrf_output in @@ -662,7 +692,8 @@ module Protocol_state = struct ; Numeric.(to_input Tc.global_slot curr_global_slot) ; Numeric.(to_input Tc.global_slot global_slot_since_genesis) ; Epoch_data.to_input staking_epoch_data - ; Epoch_data.to_input next_epoch_data ] + ; Epoch_data.to_input next_epoch_data + ] let digest t = Random_oracle.( @@ -738,7 +769,8 @@ module Protocol_state = struct ; curr_global_slot ; global_slot_since_genesis ; staking_epoch_data - ; next_epoch_data } : + ; next_epoch_data + } : t) = let open Random_oracle.Input in let () = last_vrf_output in @@ -753,7 +785,8 @@ module Protocol_state = struct ; Numeric.(Checked.to_input Tc.global_slot curr_global_slot) ; Numeric.(Checked.to_input Tc.global_slot global_slot_since_genesis) ; Epoch_data.Checked.to_input staking_epoch_data - ; Epoch_data.Checked.to_input next_epoch_data ] + ; Epoch_data.Checked.to_input next_epoch_data + ] let digest t = Random_oracle.Checked.( @@ -762,34 +795,36 @@ module Protocol_state = struct let check (* Bind all the fields explicity so we make sure they are all used. *) - ({ snarked_ledger_hash - ; snarked_next_available_token - ; timestamp - ; blockchain_length - ; min_window_density - ; last_vrf_output - ; total_currency - ; curr_global_slot - ; global_slot_since_genesis - ; staking_epoch_data - ; next_epoch_data } : - t) (s : View.Checked.t) = + ({ snarked_ledger_hash + ; snarked_next_available_token + ; timestamp + ; blockchain_length + ; min_window_density + ; last_vrf_output + ; total_currency + ; curr_global_slot + ; global_slot_since_genesis + ; staking_epoch_data + ; next_epoch_data + } : + t) (s : View.Checked.t) = let open Impl in - let epoch_ledger ({hash; total_currency} : _ Epoch_ledger.Poly.t) + let epoch_ledger ({ hash; total_currency } : _ Epoch_ledger.Poly.t) (t : Epoch_ledger.var) = [ Hash.(check_checked Tc.frozen_ledger_hash) hash t.hash - ; Numeric.(Checked.check Tc.amount) total_currency t.total_currency ] + ; Numeric.(Checked.check Tc.amount) total_currency t.total_currency + ] in let epoch_data - ({ledger; seed; start_checkpoint; lock_checkpoint; epoch_length} : + ({ ledger; seed; start_checkpoint; lock_checkpoint; epoch_length } : _ Epoch_data.Poly.t) (t : _ Epoch_data.Poly.t) = ignore seed ; epoch_ledger ledger t.ledger @ [ Hash.(check_checked Tc.state_hash) start_checkpoint t.start_checkpoint - ; Hash.(check_checked Tc.state_hash) - lock_checkpoint t.lock_checkpoint - ; Numeric.(Checked.check Tc.length) epoch_length t.epoch_length ] + ; Hash.(check_checked Tc.state_hash) lock_checkpoint t.lock_checkpoint + ; Numeric.(Checked.check Tc.length) epoch_length t.epoch_length + ] in ignore last_vrf_output ; Boolean.all @@ -806,7 +841,8 @@ module Protocol_state = struct ; Numeric.(Checked.check Tc.global_slot) curr_global_slot s.curr_global_slot ; Numeric.(Checked.check Tc.global_slot) - global_slot_since_genesis s.global_slot_since_genesis ] + global_slot_since_genesis s.global_slot_since_genesis + ] @ epoch_data staking_epoch_data s.staking_epoch_data @ epoch_data next_epoch_data s.next_epoch_data ) end @@ -825,13 +861,13 @@ module Protocol_state = struct let epoch_ledger = let open Epoch_ledger.Poly in Typ.of_hlistable - [frozen_ledger_hash; amount] - ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist - ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist + [ frozen_ledger_hash; amount ] + ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist + ~value_of_hlist:of_hlist in let open Epoch_data.Poly in Typ.of_hlistable - [epoch_ledger; epoch_seed; state_hash; state_hash; length] + [ epoch_ledger; epoch_seed; state_hash; state_hash; length ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist in @@ -846,46 +882,50 @@ module Protocol_state = struct ; global_slot ; global_slot ; epoch_data - ; epoch_data ] + ; epoch_data + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let accept : t = let epoch_data : Epoch_data.t = - { ledger= {hash= Ignore; total_currency= Ignore} - ; seed= Ignore - ; start_checkpoint= Ignore - ; lock_checkpoint= Ignore - ; epoch_length= Ignore } + { ledger = { hash = Ignore; total_currency = Ignore } + ; seed = Ignore + ; start_checkpoint = Ignore + ; lock_checkpoint = Ignore + ; epoch_length = Ignore + } in - { snarked_ledger_hash= Ignore - ; snarked_next_available_token= Ignore - ; timestamp= Ignore - ; blockchain_length= Ignore - ; min_window_density= Ignore - ; last_vrf_output= () - ; total_currency= Ignore - ; curr_global_slot= Ignore - ; global_slot_since_genesis= Ignore - ; staking_epoch_data= epoch_data - ; next_epoch_data= epoch_data } + { snarked_ledger_hash = Ignore + ; snarked_next_available_token = Ignore + ; timestamp = Ignore + ; blockchain_length = Ignore + ; min_window_density = Ignore + ; last_vrf_output = () + ; total_currency = Ignore + ; curr_global_slot = Ignore + ; global_slot_since_genesis = Ignore + ; staking_epoch_data = epoch_data + ; next_epoch_data = epoch_data + } let check (* Bind all the fields explicity so we make sure they are all used. *) - ({ snarked_ledger_hash - ; snarked_next_available_token - ; timestamp - ; blockchain_length - ; min_window_density - ; last_vrf_output - ; total_currency - ; curr_global_slot - ; global_slot_since_genesis - ; staking_epoch_data - ; next_epoch_data } : - t) (s : View.t) = + ({ snarked_ledger_hash + ; snarked_next_available_token + ; timestamp + ; blockchain_length + ; min_window_density + ; last_vrf_output + ; total_currency + ; curr_global_slot + ; global_slot_since_genesis + ; staking_epoch_data + ; next_epoch_data + } : + t) (s : View.t) = let open Or_error.Let_syntax in - let epoch_ledger ({hash; total_currency} : _ Epoch_ledger.Poly.t) + let epoch_ledger ({ hash; total_currency } : _ Epoch_ledger.Poly.t) (t : Epoch_ledger.Value.t) = let%bind () = Hash.(check ~label:"epoch_ledger_hash" Tc.frozen_ledger_hash) @@ -898,7 +938,7 @@ module Protocol_state = struct () in let epoch_data label - ({ledger; seed; start_checkpoint; lock_checkpoint; epoch_length} : + ({ ledger; seed; start_checkpoint; lock_checkpoint; epoch_length } : _ Epoch_data.Poly.t) (t : _ Epoch_data.Poly.t) = let l s = sprintf "%s_%s" label s in let%bind () = epoch_ledger ledger t.ledger in @@ -987,46 +1027,46 @@ module Account_type = struct let to_bits = function | User -> - [true; false] + [ true; false ] | Snapp -> - [false; true] + [ false; true ] | None -> - [false; false] + [ false; false ] | Any -> - [true; true] + [ true; true ] let of_bits = function - | [user; snapp] -> ( - match (user, snapp) with - | true, false -> - User - | false, true -> - Snapp - | false, false -> - None - | true, true -> - Any ) + | [ user; snapp ] -> ( + match (user, snapp) with + | true, false -> + User + | false, true -> + Snapp + | false, false -> + None + | true, true -> + Any ) | _ -> assert false let to_input x = Random_oracle_input.bitstring (to_bits x) module Checked = struct - type t = {user: Boolean.var; snapp: Boolean.var} [@@deriving hlist] + type t = { user : Boolean.var; snapp : Boolean.var } [@@deriving hlist] - let to_input {user; snapp} = Random_oracle_input.bitstring [user; snapp] + let to_input { user; snapp } = Random_oracle_input.bitstring [ user; snapp ] let constant = let open Boolean in function | User -> - {user= true_; snapp= false_} + { user = true_; snapp = false_ } | Snapp -> - {user= false_; snapp= true_} + { user = false_; snapp = true_ } | None -> - {user= false_; snapp= false_} + { user = false_; snapp = false_ } | Any -> - {user= true_; snapp= true_} + { user = true_; snapp = true_ } (* TODO: Write a unit test for these. *) let snapp_allowed t = t.snapp @@ -1036,18 +1076,19 @@ module Account_type = struct let typ = let open Checked in - Typ.of_hlistable [Boolean.typ; Boolean.typ] ~var_to_hlist:to_hlist - ~var_of_hlist:of_hlist + Typ.of_hlistable + [ Boolean.typ; Boolean.typ ] + ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:(function | User -> - [true; false] + [ true; false ] | Snapp -> - [false; true] + [ false; true ] | None -> - [false; false] + [ false; false ] | Any -> - [true; true] ) - ~value_of_hlist:(fun [user; snapp] -> + [ true; true ]) + ~value_of_hlist:(fun [ user; snapp ] -> match (user, snapp) with | true, false -> User @@ -1056,7 +1097,7 @@ module Account_type = struct | false, false -> None | true, true -> - Any ) + Any) end module Other = struct @@ -1065,9 +1106,10 @@ module Other = struct module Stable = struct module V1 = struct type ('account, 'account_transition, 'vk) t = - { predicate: 'account - ; account_transition: 'account_transition - ; account_vk: 'vk } + { predicate : 'account + ; account_transition : 'account_transition + ; account_vk : 'vk + } [@@deriving hlist, sexp, equal, yojson, hash, compare] end end] @@ -1094,33 +1136,36 @@ module Other = struct , Field.Var.t Or_ignore.Checked.t ) Poly.Stable.Latest.t - let to_input ({predicate; account_transition; account_vk} : t) = + let to_input ({ predicate; account_transition; account_vk } : t) = let open Random_oracle_input in List.reduce_exn ~f:append [ Account.Checked.to_input predicate ; Transition.to_input ~f:Account_state.Checked.to_input account_transition - ; Hash.(to_input_checked Tc.field) account_vk ] + ; Hash.(to_input_checked Tc.field) account_vk + ] end - let to_input ({predicate; account_transition; account_vk} : t) = + let to_input ({ predicate; account_transition; account_vk } : t) = let open Random_oracle_input in List.reduce_exn ~f:append [ Account.to_input predicate ; Transition.to_input ~f:Account_state.to_input account_transition - ; Hash.(to_input Tc.field) account_vk ] + ; Hash.(to_input Tc.field) account_vk + ] let typ () = let open Poly in Typ.of_hlistable - [Account.typ (); Transition.typ Account_state.typ; Hash.(typ Tc.field)] + [ Account.typ (); Transition.typ Account_state.typ; Hash.(typ Tc.field) ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let accept : t = - { predicate= Account.accept - ; account_transition= {prev= Any; next= Any} - ; account_vk= Ignore } + { predicate = Account.accept + ; account_transition = { prev = Any; next = Any } + ; account_vk = Ignore + } end module Poly = struct @@ -1128,10 +1173,11 @@ module Poly = struct module Stable = struct module V1 = struct type ('account, 'protocol_state, 'other, 'pk) t = - { self_predicate: 'account - ; other: 'other - ; fee_payer: 'pk - ; protocol_state_predicate: 'protocol_state } + { self_predicate : 'account + ; other : 'other + ; fee_payer : 'pk + ; protocol_state_predicate : 'protocol_state + } [@@deriving hlist, sexp, equal, yojson, hash, compare] let to_latest = Fn.id @@ -1161,28 +1207,28 @@ end] module Digested = F -let to_input ({self_predicate; other; fee_payer; protocol_state_predicate} : t) - = +let to_input + ({ self_predicate; other; fee_payer; protocol_state_predicate } : t) = let open Random_oracle_input in List.reduce_exn ~f:append [ Account.to_input self_predicate ; Other.to_input other ; Eq_data.(to_input_explicit (Tc.public_key ())) fee_payer - ; Protocol_state.to_input protocol_state_predicate ] + ; Protocol_state.to_input protocol_state_predicate + ] let digest t = Random_oracle.( hash ~init:Hash_prefix.snapp_predicate (pack_input (to_input t))) -let check ({self_predicate; other; fee_payer; protocol_state_predicate} : t) +let check ({ self_predicate; other; fee_payer; protocol_state_predicate } : t) ~state_view ~self ~(other_prev : A.t option) ~(other_next : unit option) ~fee_payer_pk = let open Or_error.Let_syntax in let%bind () = Protocol_state.check protocol_state_predicate state_view in let%bind () = Account.check self_predicate self in let%bind () = - Eq_data.(check (Tc.public_key ())) - ~label:"fee_payer" fee_payer fee_payer_pk + Eq_data.(check (Tc.public_key ())) ~label:"fee_payer" fee_payer fee_payer_pk in let%bind () = let check (s : Account_state.t) (a : _ option) = @@ -1213,10 +1259,11 @@ let check ({self_predicate; other; fee_payer; protocol_state_predicate} : t) return () let accept : t = - { self_predicate= Account.accept - ; other= Other.accept - ; fee_payer= Ignore - ; protocol_state_predicate= Protocol_state.accept } + { self_predicate = Account.accept + ; other = Other.accept + ; fee_payer = Ignore + ; protocol_state_predicate = Protocol_state.accept + } module Checked = struct type t = @@ -1227,13 +1274,14 @@ module Checked = struct Poly.Stable.Latest.t let to_input - ({self_predicate; other; fee_payer; protocol_state_predicate} : t) = + ({ self_predicate; other; fee_payer; protocol_state_predicate } : t) = let open Random_oracle_input in List.reduce_exn ~f:append [ Account.Checked.to_input self_predicate ; Other.Checked.to_input other ; Eq_data.(to_input_checked (Tc.public_key ())) fee_payer - ; Protocol_state.Checked.to_input protocol_state_predicate ] + ; Protocol_state.Checked.to_input protocol_state_predicate + ] let digest t = Random_oracle.Checked.( @@ -1245,4 +1293,5 @@ let typ () : (Checked.t, Stable.Latest.t) Typ.t = [ Account.typ () ; Other.typ () ; Eq_data.(typ_explicit (Tc.public_key ())) - ; Protocol_state.typ ] + ; Protocol_state.typ + ] diff --git a/src/lib/mina_base/snapp_state.ml b/src/lib/mina_base/snapp_state.ml index fb321909127..526445418d4 100644 --- a/src/lib/mina_base/snapp_state.ml +++ b/src/lib/mina_base/snapp_state.ml @@ -30,7 +30,8 @@ module V = struct end let () = - let _f : type a. + let _f : + type a. unit -> (a V.t, a Vector.With_length(Max_state_size).t) Type_equal.t = fun () -> Type_equal.T in diff --git a/src/lib/mina_base/snapp_statement.ml b/src/lib/mina_base/snapp_statement.ml index 634cce70788..f6bf2e28995 100644 --- a/src/lib/mina_base/snapp_statement.ml +++ b/src/lib/mina_base/snapp_statement.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick module Mina_numbers = Mina_numbers @@ -31,7 +29,7 @@ module Poly = struct module Stable = struct module V1 = struct type ('predicate, 'body) t = - {predicate: 'predicate; body1: 'body; body2: 'body} + { predicate : 'predicate; body1 : 'body; body2 : 'body } [@@deriving hlist, sexp] let to_latest = Fn.id @@ -65,7 +63,7 @@ module Checked = struct , (Snapp_command.Party.Body.Checked.t, Field.t Set_once.t) With_hash.t ) Poly.Stable.Latest.t - let to_field_elements ({predicate; body1; body2} : t) : Field.t array = + let to_field_elements ({ predicate; body1; body2 } : t) : Field.t array = let f hash x = let s = With_hash.hash x in match Set_once.get s with @@ -79,28 +77,30 @@ module Checked = struct let predicate = f Snapp_predicate.Checked.digest predicate in let body1 = f Snapp_command.Party.Body.Checked.digest body1 in let body2 = f Snapp_command.Party.Body.Checked.digest body2 in - [|predicate; body1; body2|] + [| predicate; body1; body2 |] end -let to_field_elements ({predicate; body1; body2} : t) : Field.t array = +let to_field_elements ({ predicate; body1; body2 } : t) : Field.t array = let predicate = Snapp_predicate.digest predicate in let body1 = Snapp_command.Party.Body.digest body1 in let body2 = Snapp_command.Party.Body.digest body2 in - [|predicate; body1; body2|] + [| predicate; body1; body2 |] let typ : (Checked.t, t) Typ.t = Poly.typ [ Predicate.typ () ; Snapp_command.Party.Body.typ () - ; Snapp_command.Party.Body.typ () ] + ; Snapp_command.Party.Body.typ () + ] |> Typ.transport_var - ~there:(fun ({predicate; body1; body2} : Checked.t) -> - { Poly.predicate= With_hash.data predicate - ; body1= With_hash.data body1 - ; body2= With_hash.data body2 } ) - ~back:(fun ({predicate; body1; body2} : _ Poly.t) -> + ~there:(fun ({ predicate; body1; body2 } : Checked.t) -> + { Poly.predicate = With_hash.data predicate + ; body1 = With_hash.data body1 + ; body2 = With_hash.data body2 + }) + ~back:(fun ({ predicate; body1; body2 } : _ Poly.t) -> let f = With_hash.of_data ~hash_data:(fun _ -> Set_once.create ()) in - {Poly.predicate= f predicate; body1= f body1; body2= f body2} ) + { Poly.predicate = f predicate; body1 = f body1; body2 = f body2 }) open Snapp_basic @@ -108,11 +108,12 @@ module Complement = struct module One_proved = struct module Poly = struct type ('bool, 'token_id, 'fee_payer_opt, 'nonce) t = - { second_starts_empty: 'bool - ; second_ends_empty: 'bool - ; token_id: 'token_id - ; account2_nonce: 'nonce - ; other_fee_payer_opt: 'fee_payer_opt } + { second_starts_empty : 'bool + ; second_ends_empty : 'bool + ; token_id : 'token_id + ; account2_nonce : 'nonce + ; other_fee_payer_opt : 'fee_payer_opt + } [@@deriving hlist, sexp, equal, yojson, hash, compare] end @@ -131,8 +132,9 @@ module Complement = struct ; second_ends_empty ; token_id ; account2_nonce - ; other_fee_payer_opt } : - t) ~one:({predicate; body1; body2} as one : Checked.t) : + ; other_fee_payer_opt + } : + t) ~one:({ predicate; body1; body2 } as one : Checked.t) : Snapp_command.Payload.One_proved.Digested.Checked.t = let (_ : Pickles.Impls.Step.Field.t array) = Checked.to_field_elements one @@ -142,8 +144,9 @@ module Complement = struct ; second_ends_empty ; token_id ; other_fee_payer_opt - ; one= {predicate= !predicate; body= !body1} - ; two= {predicate= account2_nonce; body= !body2} } + ; one = { predicate = !predicate; body = !body1 } + ; two = { predicate = account2_nonce; body = !body2 } + } end type t = @@ -165,7 +168,8 @@ module Complement = struct ~there: (Flagged_option.of_option ~default:Other_fee_payer.Payload.dummy) - ~back:Flagged_option.to_option ] + ~back:Flagged_option.to_option + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -174,49 +178,54 @@ module Complement = struct ; second_ends_empty ; token_id ; other_fee_payer_opt - ; one= _ - ; two } : + ; one = _ + ; two + } : Snapp_command.Payload.One_proved.t) : t = { second_starts_empty ; second_ends_empty ; token_id - ; account2_nonce= two.predicate - ; other_fee_payer_opt } + ; account2_nonce = two.predicate + ; other_fee_payer_opt + } let complete ({ second_starts_empty ; second_ends_empty ; token_id ; account2_nonce - ; other_fee_payer_opt } : - t) ~one:({predicate; body1; body2} : Stable.Latest.t) : + ; other_fee_payer_opt + } : + t) ~one:({ predicate; body1; body2 } : Stable.Latest.t) : Snapp_command.Payload.One_proved.t = { Snapp_command.Payload.Inner.second_starts_empty ; second_ends_empty ; token_id ; other_fee_payer_opt - ; one= {predicate; body= body1} - ; two= {predicate= account2_nonce; body= body2} } + ; one = { predicate; body = body1 } + ; two = { predicate = account2_nonce; body = body2 } + } end module Two_proved = struct module Poly = struct type ('token_id, 'fee_payer_opt) t = - {token_id: 'token_id; other_fee_payer_opt: 'fee_payer_opt} + { token_id : 'token_id; other_fee_payer_opt : 'fee_payer_opt } [@@deriving hlist, sexp, equal, yojson, hash, compare] end type t = (Token_id.t, Other_fee_payer.Payload.t option) Poly.t let create - ({ second_starts_empty= _ - ; second_ends_empty= _ + ({ second_starts_empty = _ + ; second_ends_empty = _ ; token_id ; other_fee_payer_opt - ; one= _ - ; two= _ } : + ; one = _ + ; two = _ + } : Snapp_command.Payload.Two_proved.t) : t = - {token_id; other_fee_payer_opt} + { token_id; other_fee_payer_opt } module Checked = struct type t = @@ -224,7 +233,7 @@ module Complement = struct , (Boolean.var, Other_fee_payer.Payload.Checked.t) Flagged_option.t ) Poly.t - let complete ({token_id; other_fee_payer_opt} : t) ~(one : Checked.t) + let complete ({ token_id; other_fee_payer_opt } : t) ~(one : Checked.t) ~(two : Checked.t) : Snapp_command.Payload.Two_proved.Digested.Checked.t = let (_ : Pickles.Impls.Step.Field.t array) = @@ -234,14 +243,15 @@ module Complement = struct Checked.to_field_elements two in let ( ! ) x = Set_once.get_exn (With_hash.hash x) [%here] in - { Snapp_command.Payload.Inner.second_starts_empty= Boolean.false_ - ; second_ends_empty= Boolean.false_ + { Snapp_command.Payload.Inner.second_starts_empty = Boolean.false_ + ; second_ends_empty = Boolean.false_ ; token_id ; other_fee_payer_opt - (* one.body2 = two.body1 - two.body2 = one.body1 *) - ; one= {predicate= !(one.predicate); body= !(one.body1)} - ; two= {predicate= !(two.predicate); body= !(one.body2)} } + (* one.body2 = two.body1 + two.body2 = one.body1 *) + ; one = { predicate = !(one.predicate); body = !(one.body1) } + ; two = { predicate = !(two.predicate); body = !(one.body2) } + } end let typ : (Checked.t, t) Typ.t = @@ -253,19 +263,22 @@ module Complement = struct ~there: (Flagged_option.of_option ~default:Other_fee_payer.Payload.dummy) - ~back:Flagged_option.to_option ] + ~back:Flagged_option.to_option + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist - let complete ({token_id; other_fee_payer_opt} : t) ~(one : Stable.Latest.t) - ~(two : Stable.Latest.t) : Snapp_command.Payload.Two_proved.t = - { Snapp_command.Payload.Inner.second_starts_empty= false - ; second_ends_empty= false + let complete ({ token_id; other_fee_payer_opt } : t) + ~(one : Stable.Latest.t) ~(two : Stable.Latest.t) : + Snapp_command.Payload.Two_proved.t = + { Snapp_command.Payload.Inner.second_starts_empty = false + ; second_ends_empty = false ; token_id ; other_fee_payer_opt - (* one.body2 = two.body1 - two.body2 = one.body1 *) - ; one= {predicate= one.predicate; body= one.body1} - ; two= {predicate= one.predicate; body= two.body1} } + (* one.body2 = two.body1 + two.body2 = one.body1 *) + ; one = { predicate = one.predicate; body = one.body1 } + ; two = { predicate = one.predicate; body = two.body1 } + } end end diff --git a/src/lib/mina_base/sok_message.ml b/src/lib/mina_base/sok_message.ml index e75994332ea..630150fb452 100644 --- a/src/lib/mina_base/sok_message.ml +++ b/src/lib/mina_base/sok_message.ml @@ -5,14 +5,16 @@ open Import module Stable = struct module V1 = struct type t = - {fee: Currency.Fee.Stable.V1.t; prover: Public_key.Compressed.Stable.V1.t} + { fee : Currency.Fee.Stable.V1.t + ; prover : Public_key.Compressed.Stable.V1.t + } [@@deriving sexp, yojson, equal, compare] let to_latest = Fn.id end end] -let create ~fee ~prover = Stable.Latest.{fee; prover} +let create ~fee ~prover = Stable.Latest.{ fee; prover } module Digest = struct let length_in_bytes = Blake2.digest_size_in_bytes @@ -55,8 +57,7 @@ module Digest = struct let to_input t = Random_oracle.Input.bitstring (Array.to_list t) end - [%%define_locally - Stable.Latest.(to_input, typ)] + [%%define_locally Stable.Latest.(to_input, typ)] let default = String.init length_in_bytes ~f:(fun _ -> '\000') end diff --git a/src/lib/mina_base/sok_message.mli b/src/lib/mina_base/sok_message.mli index 1f8d1b66b54..bd8b09ac5c3 100644 --- a/src/lib/mina_base/sok_message.mli +++ b/src/lib/mina_base/sok_message.mli @@ -9,7 +9,9 @@ open Import module Stable : sig module V1 : sig type t = - {fee: Currency.Fee.Stable.V1.t; prover: Public_key.Compressed.Stable.V1.t} + { fee : Currency.Fee.Stable.V1.t + ; prover : Public_key.Compressed.Stable.V1.t + } [@@deriving sexp, yojson, equal, compare] end end] @@ -17,7 +19,7 @@ end] [@@@warning "+32"] type t = Stable.Latest.t = - {fee: Currency.Fee.Stable.V1.t; prover: Public_key.Compressed.Stable.V1.t} + { fee : Currency.Fee.Stable.V1.t; prover : Public_key.Compressed.Stable.V1.t } [@@deriving sexp, yojson, equal, compare] val create : fee:Currency.Fee.t -> prover:Public_key.Compressed.t -> t diff --git a/src/lib/mina_base/sparse_ledger.ml b/src/lib/mina_base/sparse_ledger.ml index 74ed943272d..285154d5d84 100644 --- a/src/lib/mina_base/sparse_ledger.ml +++ b/src/lib/mina_base/sparse_ledger.ml @@ -32,7 +32,7 @@ end module M = Sparse_ledger_lib.Sparse_ledger.Make (Hash) (Token_id) (Account_id) (Account) -type account_state = [`Added | `Existed] [@@deriving equal] +type account_state = [ `Added | `Existed ] [@@deriving equal] module L = struct type t = M.t ref @@ -57,9 +57,10 @@ module L = struct let public_key = Account_id.public_key id in let account' : Account.t = { account with - delegate= Some public_key + delegate = Some public_key ; public_key - ; token_id= Account_id.token_id id } + ; token_id = Account_id.token_id id + } in set t loc account' ; (`Added, account', loc) ) @@ -76,7 +77,7 @@ module L = struct if Public_key.Compressed.(equal empty a.public_key) then ( set t loc to_set ; (`Added, loc) ) - else (`Existed, loc) ) + else (`Existed, loc)) let remove_accounts_exn : t -> Account_id.t list -> unit = fun _t _xs -> failwith "remove_accounts_exn: not implemented" @@ -90,7 +91,7 @@ module L = struct fun t -> M.next_available_token !t let set_next_available_token : t -> Token_id.t -> unit = - fun t token -> t := {!t with next_available_token= token} + fun t token -> t := { !t with next_available_token = token } end module T = Transaction_logic.Make (L) @@ -122,8 +123,7 @@ let of_any_ledger (ledger : Ledger.Any_ledger.witness) = ~init: (of_root ~depth:(Ledger.Any_ledger.M.depth ledger) - ~next_available_token: - (Ledger.Any_ledger.M.next_available_token ledger) + ~next_available_token:(Ledger.Any_ledger.M.next_available_token ledger) (Ledger.Any_ledger.M.merkle_root ledger)) ~f:(fun _addr sparse_ledger account -> let loc = @@ -134,7 +134,7 @@ let of_any_ledger (ledger : Ledger.Any_ledger.witness) = add_path sparse_ledger (Ledger.Any_ledger.M.merkle_path ledger loc) (Account.identifier account) - (Option.value_exn (Ledger.Any_ledger.M.get ledger loc)) ) + (Option.value_exn (Ledger.Any_ledger.M.get ledger loc))) let of_ledger_subset_exn (oledger : Ledger.t) keys = let ledger = Ledger.copy oledger in @@ -151,14 +151,13 @@ let of_ledger_subset_exn (oledger : Ledger.t) keys = |> Option.value_exn ?here:None ?error:None ?message:None ) ) | None -> let path, acct = Ledger.create_empty_exn ledger key in - (key :: new_keys, add_path sl path key acct) ) + (key :: new_keys, add_path sl path key acct)) ~init:([], of_ledger_root ledger) in Debug_assert.debug_assert (fun () -> [%test_eq: Ledger_hash.t] (Ledger.merkle_root ledger) - ((merkle_root sparse :> Random_oracle.Digest.t) |> Ledger_hash.of_hash) - ) ; + ((merkle_root sparse :> Random_oracle.Digest.t) |> Ledger_hash.of_hash)) ; sparse let of_ledger_index_subset_exn (ledger : Ledger.Any_ledger.witness) indexes = @@ -166,15 +165,14 @@ let of_ledger_index_subset_exn (ledger : Ledger.Any_ledger.witness) indexes = ~init: (of_root ~depth:(Ledger.Any_ledger.M.depth ledger) - ~next_available_token: - (Ledger.Any_ledger.M.next_available_token ledger) + ~next_available_token:(Ledger.Any_ledger.M.next_available_token ledger) (Ledger.Any_ledger.M.merkle_root ledger)) ~f:(fun acc i -> let account = Ledger.Any_ledger.M.get_at_index_exn ledger i in add_path acc (Ledger.Any_ledger.M.merkle_path_at_index_exn ledger i) (Account.identifier account) - account ) + account) let%test_unit "of_ledger_subset_exn with keys that don't exist works" = let keygen () = @@ -188,10 +186,10 @@ let%test_unit "of_ledger_subset_exn with keys that don't exist works" = let _, pub2 = keygen () in let aid1 = Account_id.create pub1 Token_id.default in let aid2 = Account_id.create pub2 Token_id.default in - let sl = of_ledger_subset_exn ledger [aid1; aid2] in + let sl = of_ledger_subset_exn ledger [ aid1; aid2 ] in [%test_eq: Ledger_hash.t] (Ledger.merkle_root ledger) - ((merkle_root sl :> Random_oracle.Digest.t) |> Ledger_hash.of_hash) ) + ((merkle_root sl :> Random_oracle.Digest.t) |> Ledger_hash.of_hash)) let get_or_initialize_exn account_id t idx = let account = get_exn t idx in @@ -206,7 +204,8 @@ let get_or_initialize_exn account_id t idx = , { account with delegate ; public_key - ; token_id= Account_id.token_id account_id } ) + ; token_id = Account_id.token_id account_id + } ) else (`Existed, account) let sub_account_creation_fee @@ -221,7 +220,7 @@ let sub_account_creation_fee let apply_user_command_exn ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~txn_global_slot t - ({signer; payload; signature= _} as user_command : Signed_command.t) = + ({ signer; payload; signature = _ } as user_command : Signed_command.t) = let open Currency in let signer_pk = Public_key.compress signer in let current_global_slot = txn_global_slot in @@ -244,14 +243,15 @@ let apply_user_command_exn in ( idx , { account with - nonce= Account.Nonce.succ account.nonce - ; balance= + nonce = Account.Nonce.succ account.nonce + ; balance = Balance.sub_amount account.balance (Amount.of_fee fee) |> Option.value_exn ?here:None ?error:None ?message:None - ; receipt_chain_hash= + ; receipt_chain_hash = Receipt.Chain_hash.cons (Signed_command payload) account.receipt_chain_hash - ; timing } ) + ; timing + } ) in (* Charge the fee. *) let t = set_exn t fee_payer_idx fee_payer_account in @@ -265,13 +265,13 @@ let apply_user_command_exn (Balance.sub_amount account.balance (Amount.of_fee constraint_constants.account_creation_fee)) in - let account = {account with balance} in + let account = { account with balance } in let timing = Or_error.ok_exn (Transaction_logic.validate_timing ~txn_amount:Amount.zero ~txn_global_slot:current_global_slot ~account) in - {account with timing} + { account with timing } in let compute_updates () = (* Raise an exception if any of the invariants for the user command are not @@ -313,8 +313,7 @@ let apply_user_command_exn let receiver_account = get_exn t @@ find_index_exn t receiver in (* Check that receiver account exists. *) assert ( - not Public_key.Compressed.(equal empty receiver_account.public_key) - ) ; + not Public_key.Compressed.(equal empty receiver_account.public_key) ) ; let source_idx = find_index_exn t source in let source_account = get_exn t source_idx in (* Check that source account exists. *) @@ -330,11 +329,12 @@ let apply_user_command_exn ~txn_global_slot:current_global_slot ~account:source_account in { source_account with - delegate= Some (Account_id.public_key receiver) - ; timing } + delegate = Some (Account_id.public_key receiver) + ; timing + } in - [(source_idx, source_account)] - | Payment {amount; token_id= token; _} -> + [ (source_idx, source_account) ] + | Payment { amount; token_id = token; _ } -> let receiver_idx = find_index_exn t receiver in let action, receiver_account = get_or_initialize_exn receiver t receiver_idx @@ -348,9 +348,10 @@ let apply_user_command_exn in let receiver_account = { receiver_account with - balance= + balance = Balance.add_amount receiver_account.balance receiver_amount - |> Option.value_exn ?here:None ?error:None ?message:None } + |> Option.value_exn ?here:None ?error:None ?message:None + } in let source_idx = find_index_exn t source in let source_account = @@ -364,21 +365,22 @@ let apply_user_command_exn assert (not Public_key.Compressed.(equal empty account.public_key)) ; try { account with - balance= + balance = Balance.sub_amount account.balance amount |> Option.value_exn ?here:None ?error:None ?message:None - ; timing= + ; timing = Or_error.ok_exn @@ Transaction_logic.validate_timing ~txn_amount:amount - ~txn_global_slot:current_global_slot ~account } + ~txn_global_slot:current_global_slot ~account + } with exn when Account_id.equal fee_payer source -> (* Don't process transactions with insufficient balance from the fee-payer. *) raise (Reject exn) in - [(receiver_idx, receiver_account); (source_idx, source_account)] - | Create_new_token {disable_new_accounts; _} -> + [ (receiver_idx, receiver_account); (source_idx, source_account) ] + | Create_new_token { disable_new_accounts; _ } -> (* NOTE: source and receiver are definitionally equal here. *) let fee_payer_account = try charge_account_creation_fee_exn fee_payer_account @@ -395,11 +397,12 @@ let apply_user_command_exn "Token owner account for newly created token already exists")) ; let receiver_account = { receiver_account with - token_permissions= - Token_permissions.Token_owned {disable_new_accounts} } + token_permissions = + Token_permissions.Token_owned { disable_new_accounts } + } in - [(fee_payer_idx, fee_payer_account); (receiver_idx, receiver_account)] - | Create_token_account {account_disabled; _} -> + [ (fee_payer_idx, fee_payer_account); (receiver_idx, receiver_account) ] + | Create_token_account { account_disabled; _ } -> if account_disabled && Token_id.(equal default) (Account_id.token_id receiver) @@ -419,7 +422,7 @@ let apply_user_command_exn failwith "Attempted to create an account that already exists" ; let receiver_account = { receiver_account with - token_permissions= Token_permissions.Not_owned {account_disabled} + token_permissions = Token_permissions.Not_owned { account_disabled } } in let source_idx = find_index_exn t source in @@ -435,7 +438,7 @@ let apply_user_command_exn in let () = match source_account.token_permissions with - | Token_owned {disable_new_accounts} -> + | Token_owned { disable_new_accounts } -> if not ( Bool.equal account_disabled disable_new_accounts @@ -445,8 +448,7 @@ let apply_user_command_exn "The fee-payer is not authorised to create token accounts \ for this token" | Not_owned _ -> - if Token_id.(equal default) (Account_id.token_id receiver) then - () + if Token_id.(equal default) (Account_id.token_id receiver) then () else failwith "Token owner account does not own the token" in let source_account = @@ -455,16 +457,17 @@ let apply_user_command_exn @@ Transaction_logic.validate_timing ~txn_amount:Amount.zero ~txn_global_slot:current_global_slot ~account:source_account in - {source_account with timing} + { source_account with timing } in if Account_id.equal source receiver then (* For token_id= default, we allow this *) - [(fee_payer_idx, fee_payer_account); (source_idx, source_account)] + [ (fee_payer_idx, fee_payer_account); (source_idx, source_account) ] else [ (receiver_idx, receiver_account) ; (fee_payer_idx, fee_payer_account) - ; (source_idx, source_account) ] - | Mint_tokens {token_id= token; amount; _} -> + ; (source_idx, source_account) + ] + | Mint_tokens { token_id = token; amount; _ } -> assert (not (Token_id.(equal default) token)) ; let receiver_idx = find_index_exn t receiver in let action, receiver_account = @@ -473,9 +476,10 @@ let apply_user_command_exn assert (equal_account_state action `Existed) ; let receiver_account = { receiver_account with - balance= + balance = Balance.add_amount receiver_account.balance amount - |> Option.value_exn ?here:None ?error:None ?message:None } + |> Option.value_exn ?here:None ?error:None ?message:None + } in let source_idx = find_index_exn t source in let source_account = @@ -492,23 +496,24 @@ let apply_user_command_exn () | Not_owned _ -> failwithf - !"The claimed token owner %{sexp: Account_id.t} does not \ - own the token %{sexp: Token_id.t}" + !"The claimed token owner %{sexp: Account_id.t} does not own \ + the token %{sexp: Token_id.t}" source token () in { account with - timing= + timing = Or_error.ok_exn @@ Transaction_logic.validate_timing ~txn_amount:Amount.zero - ~txn_global_slot:current_global_slot ~account } + ~txn_global_slot:current_global_slot ~account + } in - [(receiver_idx, receiver_account); (source_idx, source_account)] + [ (receiver_idx, receiver_account); (source_idx, source_account) ] in try let indexed_accounts = compute_updates () in (* User command succeeded, update accounts in the ledger. *) List.fold ~init:t indexed_accounts ~f:(fun t (idx, account) -> - set_exn t idx account ) + set_exn t idx account) with | Reject exn -> (* TODO: These transactions should never reach this stage, this error @@ -551,7 +556,7 @@ let apply_fee_transfer_exn ~constraint_constants ~txn_global_slot = in Option.value_exn (Balance.add_amount account.balance amount') in - set_exn t index {account with balance; timing} + set_exn t index { account with balance; timing } in fun t transfer -> match Fee_transfer.to_singles transfer with @@ -563,7 +568,7 @@ let apply_fee_transfer_exn ~constraint_constants ~txn_global_slot = apply_single ~update_timing:true t' s2 let apply_coinbase_exn ~constraint_constants ~txn_global_slot t - ({receiver; fee_transfer; amount= coinbase_amount} : Coinbase.t) = + ({ receiver; fee_transfer; amount = coinbase_amount } : Coinbase.t) = let open Currency in let add_to_balance ~update_timing t pk amount = let idx = find_index_exn t pk in @@ -578,14 +583,14 @@ let apply_coinbase_exn ~constraint_constants ~txn_global_slot t in Option.value_exn (Balance.add_amount a.balance amount') in - set_exn t idx {a with balance; timing} + set_exn t idx { a with balance; timing } in (* Note: Updating coinbase receiver timing only if there is no fee transfer. This is so as to not add any extra constraints in transaction snark for checking "receiver" timings. This is OK because timing rules will not be violated when balance increases and will be checked whenever an amount is deducted from the account(#5973)*) let receiver_reward, t, update_coinbase_receiver_timing = match fee_transfer with | None -> (coinbase_amount, t, true) - | Some ({receiver_pk= _; fee} as ft) -> + | Some ({ receiver_pk = _; fee } as ft) -> let fee = Amount.of_fee fee in let reward = Amount.sub coinbase_amount fee @@ -618,8 +623,7 @@ let has_locked_tokens_exn ~global_slot ~account_id t = let _, account = get_or_initialize_exn account_id t idx in Account.has_locked_tokens ~global_slot account -let merkle_root t = - Ledger_hash.of_hash (merkle_root t :> Random_oracle.Digest.t) +let merkle_root t = Ledger_hash.of_hash (merkle_root t :> Random_oracle.Digest.t) let depth t = M.depth t @@ -628,7 +632,7 @@ let handler t = let path_exn idx = List.map (path_exn !ledger idx) ~f:(function `Left h -> h | `Right h -> h) in - stage (fun (With {request; respond}) -> + stage (fun (With { request; respond }) -> match request with | Ledger_hash.Get_element idx -> let elt = get_exn !ledger idx in @@ -644,7 +648,7 @@ let handler t = let index = find_index_exn !ledger pk in respond (Provide index) | _ -> - unhandled ) + unhandled) let snapp_accounts (ledger : t) (t : Transaction.t) = match t with @@ -656,7 +660,7 @@ let snapp_accounts (ledger : t) (t : Transaction.t) = Option.try_with (fun () -> ( find_index_exn ledger (Account_id.create pk token_id) |> get_exn ledger ) - .snapp ) + .snapp) |> Option.join in match Snapp_command.to_payload c with diff --git a/src/lib/mina_base/sparse_ledger.mli b/src/lib/mina_base/sparse_ledger.mli index c7b7d785272..9c9f2afbcd8 100644 --- a/src/lib/mina_base/sparse_ledger.mli +++ b/src/lib/mina_base/sparse_ledger.mli @@ -23,12 +23,11 @@ val next_available_token : t -> Token_id.t val get_exn : t -> int -> Account.t val path_exn : - t -> int -> [`Left of Ledger_hash.t | `Right of Ledger_hash.t] list + t -> int -> [ `Left of Ledger_hash.t | `Right of Ledger_hash.t ] list val find_index_exn : t -> Account_id.t -> int -val of_root : - depth:int -> next_available_token:Token_id.t -> Ledger_hash.t -> t +val of_root : depth:int -> next_available_token:Token_id.t -> Ledger_hash.t -> t val apply_user_command_exn : constraint_constants:Genesis_constants.Constraint_constants.t @@ -58,7 +57,4 @@ val snapp_accounts : t -> Transaction.t -> Snapp_account.t option * Snapp_account.t option val has_locked_tokens_exn : - global_slot:Mina_numbers.Global_slot.t - -> account_id:Account_id.t - -> t - -> bool + global_slot:Mina_numbers.Global_slot.t -> account_id:Account_id.t -> t -> bool diff --git a/src/lib/mina_base/staged_ledger_hash.ml b/src/lib/mina_base/staged_ledger_hash.ml index 0fb54b44421..61db4a3cc91 100644 --- a/src/lib/mina_base/staged_ledger_hash.ml +++ b/src/lib/mina_base/staged_ledger_hash.ml @@ -1,5 +1,4 @@ -[%%import -"../../config.mlh"] +[%%import "../../config.mlh"] open Core open Fold_lib @@ -28,20 +27,19 @@ module Aux_hash = struct let of_yojson = function | `String s -> ( - match Base58_check.decode s with - | Error e -> - Error - (sprintf "Aux_hash.of_yojson, bad Base58Check:%s" - (Error.to_string_hum e)) - | Ok x -> - Ok x ) + match Base58_check.decode s with + | Error e -> + Error + (sprintf "Aux_hash.of_yojson, bad Base58Check:%s" + (Error.to_string_hum e)) + | Ok x -> + Ok x ) | _ -> Error "Aux_hash.of_yojson expected `String" end end] - [%%define_locally - Stable.Latest.(to_yojson, of_yojson)] + [%%define_locally Stable.Latest.(to_yojson, of_yojson)] let of_bytes = Fn.id @@ -73,20 +71,19 @@ module Pending_coinbase_aux = struct let of_yojson = function | `String s -> ( - match Base58_check.decode s with - | Ok x -> - Ok x - | Error e -> - Error - (sprintf "Pending_coinbase_aux.of_yojson, bad Base58Check:%s" - (Error.to_string_hum e)) ) + match Base58_check.decode s with + | Ok x -> + Ok x + | Error e -> + Error + (sprintf "Pending_coinbase_aux.of_yojson, bad Base58Check:%s" + (Error.to_string_hum e)) ) | _ -> Error "Pending_coinbase_aux.of_yojson expected `String" end end] - [%%define_locally - Stable.Latest.(to_yojson, of_yojson)] + [%%define_locally Stable.Latest.(to_yojson, of_yojson)] let dummy : t = String.init length_in_bytes ~f:(fun _ -> '\000') end @@ -96,9 +93,10 @@ module Non_snark = struct module Stable = struct module V1 = struct type t = - { ledger_hash: Ledger_hash.Stable.V1.t - ; aux_hash: Aux_hash.Stable.V1.t - ; pending_coinbase_aux: Pending_coinbase_aux.Stable.V1.t } + { ledger_hash : Ledger_hash.Stable.V1.t + ; aux_hash : Aux_hash.Stable.V1.t + ; pending_coinbase_aux : Pending_coinbase_aux.Stable.V1.t + } [@@deriving sexp, equal, compare, hash, yojson] let to_latest = Fn.id @@ -109,20 +107,22 @@ module Non_snark = struct let dummy : t Lazy.t = lazy - { ledger_hash= Ledger_hash.empty_hash - ; aux_hash= Aux_hash.dummy - ; pending_coinbase_aux= Pending_coinbase_aux.dummy } + { ledger_hash = Ledger_hash.empty_hash + ; aux_hash = Aux_hash.dummy + ; pending_coinbase_aux = Pending_coinbase_aux.dummy + } let genesis ~genesis_ledger_hash : t = - { ledger_hash= genesis_ledger_hash - ; aux_hash= Aux_hash.dummy - ; pending_coinbase_aux= Pending_coinbase_aux.dummy } + { ledger_hash = genesis_ledger_hash + ; aux_hash = Aux_hash.dummy + ; pending_coinbase_aux = Pending_coinbase_aux.dummy + } type var = Boolean.var list let length_in_bits = 256 - let digest ({ledger_hash; aux_hash; pending_coinbase_aux} : t) = + let digest ({ ledger_hash; aux_hash; pending_coinbase_aux } : t) = let h = Digestif.SHA256.init () in let h = Digestif.SHA256.feed_string h (Ledger_hash.to_bytes ledger_hash) in let h = Digestif.SHA256.feed_string h aux_hash in @@ -133,21 +133,20 @@ module Non_snark = struct let to_input t = Random_oracle.Input.bitstring (Fold.to_list (fold t)) - let ledger_hash ({ledger_hash; _} : t) = ledger_hash + let ledger_hash ({ ledger_hash; _ } : t) = ledger_hash - let aux_hash ({aux_hash; _} : t) = aux_hash + let aux_hash ({ aux_hash; _ } : t) = aux_hash let of_ledger_aux_coinbase_hash aux_hash ledger_hash pending_coinbase_aux : t = - {aux_hash; ledger_hash; pending_coinbase_aux} + { aux_hash; ledger_hash; pending_coinbase_aux } let var_to_input = Random_oracle.Input.bitstring let var_of_t t : var = List.map (Fold.to_list @@ fold t) ~f:Boolean.var_of_value - [%%if - proof_level = "check"] + [%%if proof_level = "check"] let warn_improper_transport () = () @@ -162,10 +161,10 @@ module Non_snark = struct Typ.transport (Typ.list ~length:length_in_bits Boolean.typ) ~there:(Fn.compose Fold.to_list fold) ~back:(fun _ -> (* If we put a failwith here, we lose the ability to printf-inspect - * anything that uses staged-ledger-hashes from within Checked - * computations. It's useful when debugging to dump the protocol state - * and so we can just lie here instead. *) - warn_improper_transport () ; Lazy.force dummy ) + * anything that uses staged-ledger-hashes from within Checked + * computations. It's useful when debugging to dump the protocol state + * and so we can just lie here instead. *) + warn_improper_transport () ; Lazy.force dummy) end module Poly = struct @@ -173,7 +172,9 @@ module Poly = struct module Stable = struct module V1 = struct type ('non_snark, 'pending_coinbase_hash) t = - {non_snark: 'non_snark; pending_coinbase_hash: 'pending_coinbase_hash} + { non_snark : 'non_snark + ; pending_coinbase_hash : 'pending_coinbase_hash + } [@@deriving sexp, equal, compare, hash, yojson, hlist] end end] @@ -205,21 +206,22 @@ type var = (Non_snark.var, Pending_coinbase.Hash.var) t_ include Hashable.Make (Stable.Latest) -let ledger_hash ({non_snark; _} : t) = Non_snark.ledger_hash non_snark +let ledger_hash ({ non_snark; _ } : t) = Non_snark.ledger_hash non_snark -let aux_hash ({non_snark; _} : t) = Non_snark.aux_hash non_snark +let aux_hash ({ non_snark; _ } : t) = Non_snark.aux_hash non_snark -let pending_coinbase_hash ({pending_coinbase_hash; _} : t) = +let pending_coinbase_hash ({ pending_coinbase_hash; _ } : t) = pending_coinbase_hash -let pending_coinbase_hash_var ({pending_coinbase_hash; _} : var) = +let pending_coinbase_hash_var ({ pending_coinbase_hash; _ } : var) = pending_coinbase_hash let of_aux_ledger_and_coinbase_hash aux_hash ledger_hash pending_coinbase : t = - { non_snark= + { non_snark = Non_snark.of_ledger_aux_coinbase_hash aux_hash ledger_hash (Pending_coinbase.hash_extra pending_coinbase) - ; pending_coinbase_hash= Pending_coinbase.merkle_root pending_coinbase } + ; pending_coinbase_hash = Pending_coinbase.merkle_root pending_coinbase + } let genesis ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~genesis_ledger_hash : t = @@ -228,23 +230,24 @@ let genesis ~(constraint_constants : Genesis_constants.Constraint_constants.t) () |> Or_error.ok_exn in - { non_snark= Non_snark.genesis ~genesis_ledger_hash - ; pending_coinbase_hash= Pending_coinbase.merkle_root pending_coinbase } + { non_snark = Non_snark.genesis ~genesis_ledger_hash + ; pending_coinbase_hash = Pending_coinbase.merkle_root pending_coinbase + } -let var_of_t ({pending_coinbase_hash; non_snark} : t) : var = +let var_of_t ({ pending_coinbase_hash; non_snark } : t) : var = let non_snark = Non_snark.var_of_t non_snark in let pending_coinbase_hash = Pending_coinbase.Hash.var_of_t pending_coinbase_hash in - {non_snark; pending_coinbase_hash} + { non_snark; pending_coinbase_hash } -let to_input ({non_snark; pending_coinbase_hash} : t) = +let to_input ({ non_snark; pending_coinbase_hash } : t) = Random_oracle.Input.( append (Non_snark.to_input non_snark) (field (pending_coinbase_hash :> Field.t))) -let var_to_input ({non_snark; pending_coinbase_hash} : var) = +let var_to_input ({ non_snark; pending_coinbase_hash } : var) = Random_oracle.Input.( append (Non_snark.var_to_input non_snark) @@ -252,7 +255,7 @@ let var_to_input ({non_snark; pending_coinbase_hash} : var) = let data_spec = let open Data_spec in - [Non_snark.typ; Pending_coinbase.Hash.typ] + [ Non_snark.typ; Pending_coinbase.Hash.typ ] let typ : (var, t) Typ.t = Typ.of_hlistable data_spec ~var_to_hlist:Poly.to_hlist diff --git a/src/lib/mina_base/stake_delegation.ml b/src/lib/mina_base/stake_delegation.ml index d02a377f808..c12fc4729bb 100644 --- a/src/lib/mina_base/stake_delegation.ml +++ b/src/lib/mina_base/stake_delegation.ml @@ -1,12 +1,10 @@ (* stake_delegation.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Signature_lib @@ -22,35 +20,36 @@ module Stable = struct module V1 = struct type t = | Set_delegate of - { delegator: Public_key.Compressed.Stable.V1.t - ; new_delegate: Public_key.Compressed.Stable.V1.t } + { delegator : Public_key.Compressed.Stable.V1.t + ; new_delegate : Public_key.Compressed.Stable.V1.t + } [@@deriving compare, equal, sexp, hash, yojson] let to_latest = Fn.id end end] -let receiver_pk = function Set_delegate {new_delegate; _} -> new_delegate +let receiver_pk = function Set_delegate { new_delegate; _ } -> new_delegate let receiver = function - | Set_delegate {new_delegate; _} -> + | Set_delegate { new_delegate; _ } -> Account_id.create new_delegate Token_id.default -let source_pk = function Set_delegate {delegator; _} -> delegator +let source_pk = function Set_delegate { delegator; _ } -> delegator let source = function - | Set_delegate {delegator; _} -> + | Set_delegate { delegator; _ } -> Account_id.create delegator Token_id.default let gen_with_delegator delegator = Quickcheck.Generator.map Public_key.Compressed.gen ~f:(fun k -> - Set_delegate {delegator; new_delegate= k} ) + Set_delegate { delegator; new_delegate = k }) let gen = Quickcheck.Generator.bind ~f:gen_with_delegator Public_key.Compressed.gen let to_input = function - | Set_delegate {delegator; new_delegate} -> + | Set_delegate { delegator; new_delegate } -> Random_oracle.Input.append (Public_key.Compressed.to_input delegator) (Public_key.Compressed.to_input new_delegate) diff --git a/src/lib/mina_base/state_body_hash.ml b/src/lib/mina_base/state_body_hash.ml index 344098b4972..2d213ec99f4 100644 --- a/src/lib/mina_base/state_body_hash.ml +++ b/src/lib/mina_base/state_body_hash.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -31,15 +29,14 @@ module Stable = struct module V1 = struct module T = struct - type t = Field.t [@@deriving sexp, compare, hash, version {asserted}] + type t = Field.t [@@deriving sexp, compare, hash, version { asserted }] end include T let to_latest = Fn.id - [%%define_from_scope - to_yojson, of_yojson] + [%%define_from_scope to_yojson, of_yojson] include Comparable.Make (T) include Hashable.Make_binable (T) diff --git a/src/lib/mina_base/state_hash.ml b/src/lib/mina_base/state_hash.ml index 357dd31dd3f..04ce93839d4 100644 --- a/src/lib/mina_base/state_hash.ml +++ b/src/lib/mina_base/state_hash.ml @@ -1,10 +1,8 @@ (* state_hash.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] include Data_hash_lib.State_hash diff --git a/src/lib/mina_base/token_id.ml b/src/lib/mina_base/token_id.ml index 48ce119294c..06e6d49b708 100644 --- a/src/lib/mina_base/token_id.ml +++ b/src/lib/mina_base/token_id.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] open Import @@ -28,8 +26,7 @@ let next = T.succ let invalid = T.of_uint64 Unsigned.UInt64.zero -[%%if -feature_tokens] +[%%if feature_tokens] [%%versioned module Stable = struct @@ -73,7 +70,7 @@ let gen_ge minimum = Quickcheck.Generator.map Int64.(gen_incl (min_value + minimum) max_value) ~f:(fun x -> - Int64.(x - min_value) |> Unsigned.UInt64.of_int64 |> T.of_uint64 ) + Int64.(x - min_value) |> Unsigned.UInt64.of_int64 |> T.of_uint64) let gen = gen_ge 1L @@ -86,8 +83,7 @@ let unpack = T.to_bits include Hashable.Make_binable (Stable.Latest) include Comparable.Make_binable (Stable.Latest) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = T.Checked.t @@ -134,6 +130,6 @@ let%test_unit "var_of_t preserves the underlying value" = [%test_eq: t] tid (Test_util.checked_to_unchecked Typ.unit typ (fun () -> Snark_params.Tick.Checked.return (var_of_t tid)) - ()) ) + ())) [%%endif] diff --git a/src/lib/mina_base/token_permissions.ml b/src/lib/mina_base/token_permissions.ml index f05743d96a5..7633db323e7 100644 --- a/src/lib/mina_base/token_permissions.ml +++ b/src/lib/mina_base/token_permissions.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] open Import @@ -14,70 +12,73 @@ open Import module Stable = struct module V1 = struct type t = - | Token_owned of {disable_new_accounts: bool} - | Not_owned of {account_disabled: bool} + | Token_owned of { disable_new_accounts : bool } + | Not_owned of { account_disabled : bool } [@@deriving compare, equal, sexp, hash, yojson] let to_latest = Fn.id end end] -let default = Not_owned {account_disabled= false} +let default = Not_owned { account_disabled = false } let to_input = function - | Token_owned {disable_new_accounts} -> - Random_oracle.Input.bitstring [true; disable_new_accounts] - | Not_owned {account_disabled} -> - Random_oracle.Input.bitstring [false; account_disabled] + | Token_owned { disable_new_accounts } -> + Random_oracle.Input.bitstring [ true; disable_new_accounts ] + | Not_owned { account_disabled } -> + Random_oracle.Input.bitstring [ false; account_disabled ] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick -type var = {token_owner: Boolean.var; token_locked: Boolean.var} +type var = { token_owner : Boolean.var; token_locked : Boolean.var } let var_of_t = function - | Token_owned {disable_new_accounts} -> - { token_owner= Boolean.true_ - ; token_locked= Boolean.var_of_value disable_new_accounts } - | Not_owned {account_disabled} -> - { token_owner= Boolean.false_ - ; token_locked= Boolean.var_of_value account_disabled } + | Token_owned { disable_new_accounts } -> + { token_owner = Boolean.true_ + ; token_locked = Boolean.var_of_value disable_new_accounts + } + | Not_owned { account_disabled } -> + { token_owner = Boolean.false_ + ; token_locked = Boolean.var_of_value account_disabled + } let typ : (var, t) Typ.t = let open Typ in - { alloc= + { alloc = Alloc.( let%bind token_owner = Boolean.typ.alloc in let%map token_locked = Boolean.typ.alloc in - {token_owner; token_locked}) - ; read= + { token_owner; token_locked }) + ; read = Read.( fun t -> let%bind token_owner = Boolean.typ.read t.token_owner in let%map token_locked = Boolean.typ.read t.token_locked in - if token_owner then Token_owned {disable_new_accounts= token_locked} - else Not_owned {account_disabled= token_locked}) - ; store= + if token_owner then + Token_owned { disable_new_accounts = token_locked } + else Not_owned { account_disabled = token_locked }) + ; store = Store.( function - | Token_owned {disable_new_accounts} -> + | Token_owned { disable_new_accounts } -> let%bind token_owner = Boolean.typ.store true in let%map token_locked = Boolean.typ.store disable_new_accounts in - {token_owner; token_locked} - | Not_owned {account_disabled} -> + { token_owner; token_locked } + | Not_owned { account_disabled } -> let%bind token_owner = Boolean.typ.store false in let%map token_locked = Boolean.typ.store account_disabled in - {token_owner; token_locked}) - ; check= + { token_owner; token_locked }) + ; check = Checked.( - fun {token_owner; token_locked} -> + fun { token_owner; token_locked } -> all_unit - [Boolean.typ.check token_owner; Boolean.typ.check token_locked]) } + [ Boolean.typ.check token_owner; Boolean.typ.check token_locked ]) + } -let var_to_input {token_owner; token_locked} = - Random_oracle.Input.bitstring [token_owner; token_locked] +let var_to_input { token_owner; token_locked } = + Random_oracle.Input.bitstring [ token_owner; token_locked ] [%%endif] @@ -85,5 +86,5 @@ let gen = let open Quickcheck.Generator.Let_syntax in let%bind token_owner = Quickcheck.Generator.bool in let%map token_locked = Quickcheck.Generator.bool in - if token_owner then Token_owned {disable_new_accounts= token_locked} - else Not_owned {account_disabled= token_locked} + if token_owner then Token_owned { disable_new_accounts = token_locked } + else Not_owned { account_disabled = token_locked } diff --git a/src/lib/mina_base/transaction.ml b/src/lib/mina_base/transaction.ml index 1f89d1b4053..9cb9f5e355d 100644 --- a/src/lib/mina_base/transaction.ml +++ b/src/lib/mina_base/transaction.ml @@ -70,7 +70,8 @@ let public_keys : t -> _ = function | Command (Signed_command cmd) -> [ Signed_command.fee_payer_pk cmd ; Signed_command.source_pk cmd - ; Signed_command.receiver_pk cmd ] + ; Signed_command.receiver_pk cmd + ] | Command (Snapp_command t) -> Snapp_command.(accounts_accessed (t :> t)) |> List.map ~f:Account_id.public_key diff --git a/src/lib/mina_base/transaction_hash.ml b/src/lib/mina_base/transaction_hash.ml index 165ef5f7bd3..074c4ba9c0d 100644 --- a/src/lib/mina_base/transaction_hash.ml +++ b/src/lib/mina_base/transaction_hash.ml @@ -22,8 +22,8 @@ let to_yojson t = `String (to_base58_check t) let of_yojson = function | `String str -> Result.map_error (of_base58_check str) ~f:(fun _ -> - "Transaction_hash.of_yojson: Error decoding string from \ - base58_check format" ) + "Transaction_hash.of_yojson: Error decoding string from base58_check \ + format") | _ -> Error "Transaction_hash.of_yojson: Expected a string" @@ -45,8 +45,10 @@ module User_command_with_valid_signature = struct module Stable = struct module V1 = struct type t = - ( (User_command.Valid.Stable.V1.t[@hash.ignore]) - , (T.Stable.V1.t[@to_yojson hash_to_yojson]) ) + ( (User_command.Valid.Stable.V1.t + [@hash.ignore]) + , (T.Stable.V1.t + [@to_yojson hash_to_yojson]) ) With_hash.Stable.V1.t [@@deriving sexp, hash, to_yojson] @@ -60,20 +62,20 @@ module User_command_with_valid_signature = struct end] let create (c : User_command.Valid.t) : t = - {data= c; hash= hash_command (User_command.forget_check c)} + { data = c; hash = hash_command (User_command.forget_check c) } - let data ({data; _} : t) = data + let data ({ data; _ } : t) = data - let command ({data; _} : t) = User_command.forget_check data + let command ({ data; _ } : t) = User_command.forget_check data - let hash ({hash; _} : t) = hash + let hash ({ hash; _ } : t) = hash - let forget_check ({data; hash} : t) = - {With_hash.data= User_command.forget_check data; hash} + let forget_check ({ data; hash } : t) = + { With_hash.data = User_command.forget_check data; hash } include Comparable.Make (Stable.Latest) - let make data hash : t = {data; hash} + let make data hash : t = { data; hash } end module User_command = struct @@ -87,8 +89,10 @@ module User_command = struct module Stable = struct module V1 = struct type t = - ( (User_command.Stable.V1.t[@hash.ignore]) - , (T.Stable.V1.t[@to_yojson hash_to_yojson]) ) + ( (User_command.Stable.V1.t + [@hash.ignore]) + , (T.Stable.V1.t + [@to_yojson hash_to_yojson]) ) With_hash.Stable.V1.t [@@deriving sexp, hash, to_yojson] @@ -101,16 +105,16 @@ module User_command = struct end end] - let create (c : User_command.t) : t = {data= c; hash= hash_command c} + let create (c : User_command.t) : t = { data = c; hash = hash_command c } - let data ({data; _} : t) = data + let data ({ data; _ } : t) = data - let command ({data; _} : t) = data + let command ({ data; _ } : t) = data - let hash ({hash; _} : t) = hash + let hash ({ hash; _ } : t) = hash - let of_checked ({data; hash} : User_command_with_valid_signature.t) : t = - {With_hash.data= User_command.forget_check data; hash} + let of_checked ({ data; hash } : User_command_with_valid_signature.t) : t = + { With_hash.data = User_command.forget_check data; hash } include Comparable.Make (Stable.Latest) end diff --git a/src/lib/mina_base/transaction_logic.ml b/src/lib/mina_base/transaction_logic.ml index ad5821cd7bd..c21c448e30f 100644 --- a/src/lib/mina_base/transaction_logic.ml +++ b/src/lib/mina_base/transaction_logic.ml @@ -3,7 +3,7 @@ open Currency open Signature_lib module Global_slot = Mina_numbers.Global_slot -type account_state = [`Added | `Existed] [@@deriving equal] +type account_state = [ `Added | `Existed ] [@@deriving equal] module type Ledger_intf = sig type t @@ -42,10 +42,11 @@ module Transaction_applied = struct module Stable = struct module V1 = struct type t = - { user_command: Signed_command.Stable.V1.t With_status.Stable.V1.t - ; previous_receipt_chain_hash: Receipt.Chain_hash.Stable.V1.t - ; fee_payer_timing: Account.Timing.Stable.V1.t - ; source_timing: Account.Timing.Stable.V1.t option } + { user_command : Signed_command.Stable.V1.t With_status.Stable.V1.t + ; previous_receipt_chain_hash : Receipt.Chain_hash.Stable.V1.t + ; fee_payer_timing : Account.Timing.Stable.V1.t + ; source_timing : Account.Timing.Stable.V1.t option + } [@@deriving sexp] let to_latest = Fn.id @@ -58,11 +59,11 @@ module Transaction_applied = struct module Stable = struct module V1 = struct type t = - | Payment of {previous_empty_accounts: Account_id.Stable.V1.t list} + | Payment of + { previous_empty_accounts : Account_id.Stable.V1.t list } | Stake_delegation of - { previous_delegate: Public_key.Compressed.Stable.V1.t option - } - | Create_new_token of {created_token: Token_id.Stable.V1.t} + { previous_delegate : Public_key.Compressed.Stable.V1.t option } + | Create_new_token of { created_token : Token_id.Stable.V1.t } | Create_token_account | Mint_tokens | Failed @@ -76,7 +77,7 @@ module Transaction_applied = struct [%%versioned module Stable = struct module V1 = struct - type t = {common: Common.Stable.V1.t; body: Body.Stable.V1.t} + type t = { common : Common.Stable.V1.t; body : Body.Stable.V1.t } [@@deriving sexp] let to_latest = Fn.id @@ -89,8 +90,10 @@ module Transaction_applied = struct module Stable = struct module V1 = struct type t = - { accounts: (Account_id.Stable.V1.t * Account.Stable.V1.t option) list - ; command: Snapp_command.Stable.V1.t With_status.Stable.V1.t } + { accounts : + (Account_id.Stable.V1.t * Account.Stable.V1.t option) list + ; command : Snapp_command.Stable.V1.t With_status.Stable.V1.t + } [@@deriving sexp] let to_latest = Fn.id @@ -117,10 +120,10 @@ module Transaction_applied = struct module Stable = struct module V1 = struct type t = - { fee_transfer: Fee_transfer.Stable.V1.t - ; previous_empty_accounts: Account_id.Stable.V1.t list - ; receiver_timing: Account.Timing.Stable.V1.t - ; balances: Transaction_status.Fee_transfer_balance_data.Stable.V1.t + { fee_transfer : Fee_transfer.Stable.V1.t + ; previous_empty_accounts : Account_id.Stable.V1.t list + ; receiver_timing : Account.Timing.Stable.V1.t + ; balances : Transaction_status.Fee_transfer_balance_data.Stable.V1.t } [@@deriving sexp] @@ -134,10 +137,11 @@ module Transaction_applied = struct module Stable = struct module V1 = struct type t = - { coinbase: Coinbase.Stable.V1.t - ; previous_empty_accounts: Account_id.Stable.V1.t list - ; receiver_timing: Account.Timing.Stable.V1.t - ; balances: Transaction_status.Coinbase_balance_data.Stable.V1.t } + { coinbase : Coinbase.Stable.V1.t + ; previous_empty_accounts : Account_id.Stable.V1.t list + ; receiver_timing : Account.Timing.Stable.V1.t + ; balances : Transaction_status.Coinbase_balance_data.Stable.V1.t + } [@@deriving sexp] let to_latest = Fn.id @@ -164,7 +168,9 @@ module Transaction_applied = struct module Stable = struct module V1 = struct type t = - {previous_hash: Ledger_hash.Stable.V1.t; varying: Varying.Stable.V1.t} + { previous_hash : Ledger_hash.Stable.V1.t + ; varying : Varying.Stable.V1.t + } [@@deriving sexp] let to_latest = Fn.id @@ -179,19 +185,20 @@ module type S = sig module Signed_command_applied : sig module Common : sig type t = Transaction_applied.Signed_command_applied.Common.t = - { user_command: Signed_command.t With_status.t - ; previous_receipt_chain_hash: Receipt.Chain_hash.t - ; fee_payer_timing: Account.Timing.t - ; source_timing: Account.Timing.t option } + { user_command : Signed_command.t With_status.t + ; previous_receipt_chain_hash : Receipt.Chain_hash.t + ; fee_payer_timing : Account.Timing.t + ; source_timing : Account.Timing.t option + } [@@deriving sexp] end module Body : sig type t = Transaction_applied.Signed_command_applied.Body.t = - | Payment of {previous_empty_accounts: Account_id.t list} + | Payment of { previous_empty_accounts : Account_id.t list } | Stake_delegation of - { previous_delegate: Public_key.Compressed.t option } - | Create_new_token of {created_token: Token_id.t} + { previous_delegate : Public_key.Compressed.t option } + | Create_new_token of { created_token : Token_id.t } | Create_token_account | Mint_tokens | Failed @@ -199,14 +206,15 @@ module type S = sig end type t = Transaction_applied.Signed_command_applied.t = - {common: Common.t; body: Body.t} + { common : Common.t; body : Body.t } [@@deriving sexp] end module Snapp_command_applied : sig type t = Transaction_applied.Snapp_command_applied.t = - { accounts: (Account_id.t * Account.t option) list - ; command: Snapp_command.t With_status.t } + { accounts : (Account_id.t * Account.t option) list + ; command : Snapp_command.t With_status.t + } [@@deriving sexp] end @@ -219,19 +227,21 @@ module type S = sig module Fee_transfer_applied : sig type t = Transaction_applied.Fee_transfer_applied.t = - { fee_transfer: Fee_transfer.t - ; previous_empty_accounts: Account_id.t list - ; receiver_timing: Account.Timing.t - ; balances: Transaction_status.Fee_transfer_balance_data.t } + { fee_transfer : Fee_transfer.t + ; previous_empty_accounts : Account_id.t list + ; receiver_timing : Account.Timing.t + ; balances : Transaction_status.Fee_transfer_balance_data.t + } [@@deriving sexp] end module Coinbase_applied : sig type t = Transaction_applied.Coinbase_applied.t = - { coinbase: Coinbase.t - ; previous_empty_accounts: Account_id.t list - ; receiver_timing: Account.Timing.t - ; balances: Transaction_status.Coinbase_balance_data.t } + { coinbase : Coinbase.t + ; previous_empty_accounts : Account_id.t list + ; receiver_timing : Account.Timing.t + ; balances : Transaction_status.Coinbase_balance_data.t + } [@@deriving sexp] end @@ -244,7 +254,7 @@ module type S = sig end type t = Transaction_applied.t = - {previous_hash: Ledger_hash.t; varying: Varying.t} + { previous_hash : Ledger_hash.t; varying : Varying.t } [@@deriving sexp] val transaction : t -> Transaction.t With_status.t @@ -285,14 +295,14 @@ module type S = sig -> txn_state_view:Snapp_predicate.Protocol_state.View.t -> ledger -> Snapp_command.Valid.t - -> Ledger_hash.t * [`Next_available_token of Token_id.t] + -> Ledger_hash.t * [ `Next_available_token of Token_id.t ] val merkle_root_after_user_command_exn : constraint_constants:Genesis_constants.Constraint_constants.t -> txn_global_slot:Global_slot.t -> ledger -> Signed_command.With_valid_signature.t - -> Ledger_hash.t * [`Next_available_token of Token_id.t] + -> Ledger_hash.t * [ `Next_available_token of Token_id.t ] val undo : constraint_constants:Genesis_constants.Constraint_constants.t @@ -311,7 +321,7 @@ module type S = sig account:Account.t -> txn_amount:Amount.t -> txn_global_slot:Global_slot.t - -> (Account.Timing.t * [> `Min_balance of Balance.t]) Or_error.t + -> (Account.Timing.t * [> `Min_balance of Balance.t ]) Or_error.t val validate_timing : account:Account.t @@ -347,7 +357,8 @@ let validate_timing_with_min_balance ~account ~txn_amount ~txn_global_slot = ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } -> + ; vesting_increment + } -> let open Or_error.Let_syntax in let%map curr_min_balance = let account_balance = account.balance in @@ -407,14 +418,14 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let get_with_location ledger account_id = match location_of_account ledger account_id with | Some location -> ( - match get ledger location with - | Some account -> - Ok (`Existing location, account) - | None -> - Or_error.errorf - !"Account %{sexp: Account_id.t} has a location in the ledger, but \ - is not present" - account_id ) + match get ledger location with + | Some account -> + Ok (`Existing location, account) + | None -> + Or_error.errorf + !"Account %{sexp: Account_id.t} has a location in the ledger, \ + but is not present" + account_id ) | None -> Ok (`New, Account.create account_id Balance.zero) @@ -453,8 +464,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Amount.(sub amount (of_fee fee)) else Ok amount - let check b = - ksprintf (fun s -> if b then Ok () else Or_error.error_string s) + let check b = ksprintf (fun s -> if b then Ok () else Or_error.error_string s) let validate_nonces txn_nonce account_nonce = check @@ -474,33 +484,37 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct include Transaction_applied let transaction : t -> Transaction.t With_status.t = - fun {varying; _} -> + fun { varying; _ } -> match varying with | Command (Signed_command uc) -> With_status.map uc.common.user_command ~f:(fun cmd -> - Transaction.Command (User_command.Signed_command cmd) ) + Transaction.Command (User_command.Signed_command cmd)) | Command (Snapp_command s) -> With_status.map s.command ~f:(fun c -> - Transaction.Command (User_command.Snapp_command c) ) + Transaction.Command (User_command.Snapp_command c)) | Fee_transfer f -> - { data= Fee_transfer f.fee_transfer - ; status= + { data = Fee_transfer f.fee_transfer + ; status = Applied ( Transaction_status.Auxiliary_data.empty , Transaction_status.Fee_transfer_balance_data.to_balance_data - f.balances ) } + f.balances ) + } | Coinbase c -> - { data= Coinbase c.coinbase - ; status= + { data = Coinbase c.coinbase + ; status = Applied ( Transaction_status.Auxiliary_data.empty , Transaction_status.Coinbase_balance_data.to_balance_data - c.balances ) } + c.balances ) + } let user_command_status : t -> Transaction_status.t = - fun {varying; _} -> + fun { varying; _ } -> match varying with - | Command (Signed_command {common= {user_command= {status; _}; _}; _}) -> + | Command + (Signed_command { common = { user_command = { status; _ }; _ }; _ }) + -> status | Command (Snapp_command c) -> c.command.status @@ -517,7 +531,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct end let previous_empty_accounts action pk = - if equal_account_state action `Added then [pk] else [] + if equal_account_state action `Added then [ pk ] else [] let has_locked_tokens ~global_slot ~account_id ledger = let open Or_error.Let_syntax in @@ -544,7 +558,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let incr_balance (acct : Account.t) amt = match add_amount acct.balance amt with | Ok balance -> - Ok {acct with balance} + Ok { acct with balance } | Error _ -> Result.fail (failure Overflow) @@ -575,10 +589,11 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct , account , { account with balance - ; nonce= Account.Nonce.succ account.nonce - ; receipt_chain_hash= + ; nonce = Account.Nonce.succ account.nonce + ; receipt_chain_hash = Receipt.Chain_hash.cons command account.receipt_chain_hash - ; timing } ) + ; timing + } ) (* Helper function for [apply_user_command_unchecked] *) let pay_fee ~user_command ~signer_pk ~ledger ~current_global_slot = @@ -602,7 +617,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in let%map () = (* TODO: Remove this check and update the transaction snark once we have - an exchange rate mechanism. See issue #4447. + an exchange rate mechanism. See issue #4447. *) if Token_id.equal fee_token Token_id.default then return () else @@ -618,25 +633,27 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ~ledger ~current_global_slot in let applied_common : Transaction_applied.Signed_command_applied.Common.t = - { user_command= - { data= user_command - ; status= + { user_command = + { data = user_command + ; status = Applied ( Transaction_status.Auxiliary_data.empty - , Transaction_status.Balance_data.empty ) } - ; previous_receipt_chain_hash= account.receipt_chain_hash - ; fee_payer_timing= account.timing - ; source_timing= None } + , Transaction_status.Balance_data.empty ) + } + ; previous_receipt_chain_hash = account.receipt_chain_hash + ; fee_payer_timing = account.timing + ; source_timing = None + } in (loc, account', applied_common) (* someday: It would probably be better if we didn't modify the receipt chain hash - in the case that the sender is equal to the receiver, but it complicates the SNARK, so - we don't for now. *) + in the case that the sender is equal to the receiver, but it complicates the SNARK, so + we don't for now. *) let apply_user_command_unchecked ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~txn_global_slot ledger - ({payload; signer; signature= _} as user_command : Signed_command.t) = + ({ payload; signer; signature = _ } as user_command : Signed_command.t) = let open Or_error.Let_syntax in let signer_pk = Public_key.compress signer in let current_global_slot = txn_global_slot in @@ -659,29 +676,22 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in let next_available_token = next_available_token ledger in let source = Signed_command.source ~next_available_token user_command in - let receiver = - Signed_command.receiver ~next_available_token user_command - in + let receiver = Signed_command.receiver ~next_available_token user_command in let exception Reject of Error.t in - let ok_or_reject = function - | Ok x -> - x - | Error err -> - raise (Reject err) - in + let ok_or_reject = function Ok x -> x | Error err -> raise (Reject err) in let charge_account_creation_fee_exn (account : Account.t) = let balance = Option.value_exn (Balance.sub_amount account.balance (Amount.of_fee constraint_constants.account_creation_fee)) in - let account = {account with balance} in + let account = { account with balance } in let timing = Or_error.ok_exn (validate_timing ~txn_amount:Amount.zero ~txn_global_slot:current_global_slot ~account) in - {account with timing} + { account with timing } in let compute_updates () = let open Result.Let_syntax in @@ -744,15 +754,16 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in let source_account = { source_account with - delegate= Some (Account_id.public_key receiver) - ; timing } + delegate = Some (Account_id.public_key receiver) + ; timing + } in - ( [(source_location, source_account)] + ( [ (source_location, source_account) ] , `Source_timing source_timing , Transaction_status.Auxiliary_data.empty , Transaction_applied.Signed_command_applied.Body.Stake_delegation - {previous_delegate} ) - | Payment {amount; token_id= token; _} -> + { previous_delegate } ) + | Payment { amount; token_id = token; _ } -> let receiver_location, receiver_account = get_with_location ledger receiver |> ok_or_reject in @@ -767,7 +778,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct sub_account_creation_fee ~constraint_constants `Added amount |> Result.map_error ~f:(fun _ -> Transaction_status.Failure - .Amount_insufficient_to_create_account ) + .Amount_insufficient_to_create_account) else Result.fail Transaction_status.Failure.Cannot_pay_creation_fee_in_token @@ -802,9 +813,9 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let%map balance = Result.map_error (sub_amount account.balance amount) ~f:(fun _ -> - Transaction_status.Failure.Source_insufficient_balance ) + Transaction_status.Failure.Source_insufficient_balance) in - (location, source_timing, {account with timing; balance}) + (location, source_timing, { account with timing; balance }) in if Account_id.equal fee_payer source then (* Don't process transactions with insufficient balance from the @@ -825,24 +836,26 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct | `Existing _ -> ([], Transaction_status.Auxiliary_data.empty) | `New -> - ( [receiver] + ( [ receiver ] , { Transaction_status.Auxiliary_data.empty with - receiver_account_creation_fee_paid= + receiver_account_creation_fee_paid = Some (Amount.of_fee - constraint_constants.account_creation_fee) } ) + constraint_constants.account_creation_fee) + } ) in ( [ (receiver_location, receiver_account) - ; (source_location, source_account) ] + ; (source_location, source_account) + ] , `Source_timing source_timing , auxiliary_data , Transaction_applied.Signed_command_applied.Body.Payment - {previous_empty_accounts} ) - | Create_new_token {disable_new_accounts; _} -> + { previous_empty_accounts } ) + | Create_new_token { disable_new_accounts; _ } -> (* NOTE: source and receiver are definitionally equal here. *) let fee_payer_account = Or_error.try_with (fun () -> - charge_account_creation_fee_exn fee_payer_account ) + charge_account_creation_fee_exn fee_payer_account) |> ok_or_reject in let receiver_location, receiver_account = @@ -857,21 +870,23 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ) ; let receiver_account = { receiver_account with - token_permissions= - Token_permissions.Token_owned {disable_new_accounts} } + token_permissions = + Token_permissions.Token_owned { disable_new_accounts } + } in return ( [ (fee_payer_location, fee_payer_account) - ; (receiver_location, receiver_account) ] + ; (receiver_location, receiver_account) + ] , `Source_timing receiver_account.timing , { Transaction_status.Auxiliary_data.empty with - fee_payer_account_creation_fee_paid= - Some - (Amount.of_fee constraint_constants.account_creation_fee) - ; created_token= Some next_available_token } + fee_payer_account_creation_fee_paid = + Some (Amount.of_fee constraint_constants.account_creation_fee) + ; created_token = Some next_available_token + } , Transaction_applied.Signed_command_applied.Body.Create_new_token - {created_token= next_available_token} ) - | Create_token_account {account_disabled; _} -> + { created_token = next_available_token } ) + | Create_token_account { account_disabled; _ } -> if account_disabled && Token_id.(equal default) (Account_id.token_id receiver) @@ -882,7 +897,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct "Cannot open a disabled account in the default token")) ; let fee_payer_account = Or_error.try_with (fun () -> - charge_account_creation_fee_exn fee_payer_account ) + charge_account_creation_fee_exn fee_payer_account) |> ok_or_reject in let receiver_location, receiver_account = @@ -897,7 +912,8 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in let receiver_account = { receiver_account with - token_permissions= Token_permissions.Not_owned {account_disabled} + token_permissions = + Token_permissions.Not_owned { account_disabled } } in let source_location, source_account = @@ -916,7 +932,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in let%bind () = match source_account.token_permissions with - | Token_owned {disable_new_accounts} -> + | Token_owned { disable_new_accounts } -> if not ( Bool.equal account_disabled disable_new_accounts @@ -937,27 +953,29 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ~txn_global_slot:current_global_slot ~account:source_account |> Result.map_error ~f:timing_error_to_user_command_status in - {source_account with timing} + { source_account with timing } in let located_accounts = if Account_id.equal source receiver then (* For token_id= default, we allow this *) [ (fee_payer_location, fee_payer_account) - ; (source_location, source_account) ] + ; (source_location, source_account) + ] else [ (receiver_location, receiver_account) ; (fee_payer_location, fee_payer_account) - ; (source_location, source_account) ] + ; (source_location, source_account) + ] in ( located_accounts , `Source_timing source_timing , { Transaction_status.Auxiliary_data.empty with - fee_payer_account_creation_fee_paid= + fee_payer_account_creation_fee_paid = Some (Amount.of_fee constraint_constants.account_creation_fee) } - , Transaction_applied.Signed_command_applied.Body - .Create_token_account ) - | Mint_tokens {token_id= token; amount; _} -> + , Transaction_applied.Signed_command_applied.Body.Create_token_account + ) + | Mint_tokens { token_id = token; amount; _ } -> let%bind () = if Token_id.(equal default) token then Result.fail Transaction_status.Failure.Not_token_owner @@ -1000,10 +1018,11 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ~txn_global_slot:current_global_slot ~account |> Result.map_error ~f:timing_error_to_user_command_status in - (location, source_timing, {account with timing}) + (location, source_timing, { account with timing }) in ( [ (receiver_location, receiver_account) - ; (source_location, source_account) ] + ; (source_location, source_account) + ] , `Source_timing source_timing , Transaction_status.Auxiliary_data.empty , Transaction_applied.Signed_command_applied.Body.Mint_tokens ) @@ -1016,10 +1035,11 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct | _ -> None in - { Transaction_status.Balance_data.fee_payer_balance= + { Transaction_status.Balance_data.fee_payer_balance = compute_balance fee_payer - ; source_balance= compute_balance source - ; receiver_balance= compute_balance receiver } + ; source_balance = compute_balance source + ; receiver_balance = compute_balance receiver + } in match compute_updates () with | Ok @@ -1032,28 +1052,32 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct List.fold located_accounts ~init:(Ok ()) ~f:(fun acc (location, account) -> let%bind () = acc in - set_with_location ledger location account ) + set_with_location ledger location account) in let applied_common = { applied_common with - source_timing= Some source_timing - ; user_command= - { data= user_command - ; status= Applied (auxiliary_data, compute_balances ()) } } + source_timing = Some source_timing + ; user_command = + { data = user_command + ; status = Applied (auxiliary_data, compute_balances ()) + } + } in return - ( {common= applied_common; body= applied_body} + ( { common = applied_common; body = applied_body } : Transaction_applied.Signed_command_applied.t ) | Error failure -> (* Do not update the ledger. *) let applied_common = { applied_common with - user_command= - { data= user_command - ; status= Failed (failure, compute_balances ()) } } + user_command = + { data = user_command + ; status = Failed (failure, compute_balances ()) + } + } in return - ( {common= applied_common; body= Failed} + ( { common = applied_common; body = Failed } : Transaction_applied.Signed_command_applied.t ) | exception Reject err -> (* TODO: These transactions should never reach this stage, this error @@ -1083,9 +1107,10 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let apply_body ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~(state_view : Snapp_predicate.Protocol_state.View.t) ~check_auth ~is_new - ({ pk= _ - ; update= {app_state; delegate; verification_key; permissions} - ; delta } : + ({ pk = _ + ; update = { app_state; delegate; verification_key; permissions } + ; delta + } : Snapp_command.Party.Body.t) (a : Account.t) : (Account.t, _) Result.t = let open Snapp_basic in let open Result.Let_syntax in @@ -1132,7 +1157,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct if Token_id.(equal default) a.token_id then update a.permissions.set_delegate delegate a.delegate ~is_keep:Set_or_keep.is_keep ~update:(fun u x -> - match u with Keep -> x | Set y -> Some y ) + match u with Keep -> x | Set y -> Some y) else return a.delegate in let%bind snapp = @@ -1142,18 +1167,17 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ~update:(Vector.map2 ~f:Set_or_keep.set_or_keep) and verification_key = update a.permissions.set_verification_key verification_key - init.verification_key ~is_keep:Set_or_keep.is_keep - ~update:(fun u x -> - match (u, x) with Keep, _ -> x | Set x, _ -> Some x ) + init.verification_key ~is_keep:Set_or_keep.is_keep ~update:(fun u x -> + match (u, x) with Keep, _ -> x | Set x, _ -> Some x) in - let t : Snapp_account.t = {app_state; verification_key} in + let t : Snapp_account.t = { app_state; verification_key } in if Snapp_account.(equal default t) then None else Some t in let%bind permissions = update a.permissions.set_delegate permissions a.permissions ~is_keep:Set_or_keep.is_keep ~update:Set_or_keep.set_or_keep in - Ok {a with balance; snapp; delegate; permissions; timing} + Ok { a with balance; snapp; delegate; permissions; timing } let apply_snapp_command_unchecked ledger ~(constraint_constants : Genesis_constants.Constraint_constants.t) @@ -1162,7 +1186,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let open Snapp_command in let current_global_slot = state_view.global_slot_since_genesis in let open Result.Let_syntax in - with_return (fun ({return} : _ Result.t return) -> + with_return (fun ({ return } : _ Result.t return) -> let ok_or_reject = function | Ok x -> x @@ -1181,23 +1205,24 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct match res with | Error failure -> Ok - { Transaction_applied.Snapp_command_applied.command= - { data= c - ; status= + { Transaction_applied.Snapp_command_applied.command = + { data = c + ; status = Failed ( failure , (* TODO: This needs to contain the correct data when we update the archive db to handle snapp commands. *) - Transaction_status.Balance_data.empty ) } - ; accounts= [Set_once.get_exn fee_payer_account [%here]] } + Transaction_status.Balance_data.empty ) + } + ; accounts = [ Set_once.get_exn fee_payer_account [%here] ] + } | Ok (accts, applied) -> let%bind () = - List.fold accts ~init:(Ok ()) - ~f:(fun acc (location, account) -> + List.fold accts ~init:(Ok ()) ~f:(fun acc (location, account) -> let%bind () = acc in - set_with_location ledger location account ) + set_with_location ledger location account) in Ok applied in @@ -1211,32 +1236,36 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in let step (a : Account.t) = { a with - nonce= Account.Nonce.succ a.nonce - ; receipt_chain_hash= - Receipt.Chain_hash.cons payload a.receipt_chain_hash } + nonce = Account.Nonce.succ a.nonce + ; receipt_chain_hash = + Receipt.Chain_hash.cons payload a.receipt_chain_hash + } in let step_amount ~amount (a : Account.t) = let%map timing = validate_timing a amount in - {(step a) with timing} + { (step a) with timing } in let step_fee_payer a fee = let f = Amount.of_fee fee in { (Or_error.ok_exn (step_amount a ~amount:f)) with - balance= opt "Cannot pay fee" (Balance.sub_amount a.balance f) } + balance = opt "Cannot pay fee" (Balance.sub_amount a.balance f) + } in let applied accounts = - { Transaction_applied.Snapp_command_applied.command= - { data= c - ; status= + { Transaction_applied.Snapp_command_applied.command = + { data = c + ; status = Applied ( Transaction_status.Auxiliary_data.empty , (* TODO: This needs to contain the correct data when we update the archive db to handle snapp commands. *) - Transaction_status.Balance_data.empty ) } - ; accounts } + Transaction_status.Balance_data.empty ) + } + ; accounts + } in - let pay_fee ({pk; nonce; fee; _} : Other_fee_payer.Payload.t) = + let pay_fee ({ pk; nonce; fee; _ } : Other_fee_payer.Payload.t) = let ((loc, acct, acct') as info) = pay_fee' ~command:payload ~nonce ~fee_payer:(Account_id.create pk fee_token_id) @@ -1246,20 +1275,20 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Set_once.set_exn fee_payer_account [%here] (Account_id.create pk fee_token_id, Some acct) ; (* Charge the fee. This must happen, whether or not the command itself - succeeds, to ensure that the network is compensated for processing this - command. + succeeds, to ensure that the network is compensated for processing this + command. *) ok_or_reject @@ set_with_location ledger loc acct' ; info in let apply_body = apply_body ~constraint_constants ~state_view in let open Party in - let set_delta - (r : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t) delta = - {r with data= {r.data with body= {r.data.body with delta}}} + let set_delta (r : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t) + delta = + { r with data = { r.data with body = { r.data.body with delta } } } in - let get_delta - (r : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t) = + let get_delta (r : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t) + = r.data.body.delta in (* TODO: @@ -1267,11 +1296,11 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct this just burns it. Probably we should assert that if another fee payer is present, the excess is 0. *) let f - ({token_id; fee_payment; one; two} : + ({ token_id; fee_payment; one; two } : ( ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t , ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t option ) - Inner.t) ~check_predicate1 ~check_predicate2 - ~account2_should_step = + Inner.t) ~check_predicate1 ~check_predicate2 ~account2_should_step + = let account_id1 = Account_id.create one.data.body.pk token_id in let loc1, acct1 = get_with_location ledger account_id1 |> ok_or_reject @@ -1281,7 +1310,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Option.map two ~f:(fun two -> get_with_location ledger (Account_id.create two.data.body.pk token_id) - |> ok_or_reject ) + |> ok_or_reject) in (* Pay the fee, step both accounts. *) let acct1', acct2', one, two, fee_payer_info, fee_payer_pk = @@ -1291,7 +1320,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ok_or_reject @@ set_with_location ledger loc acct ; let delta = (* delta = delta_remaining + (-fee) - delta_remaining = delta + fee + delta_remaining = delta + fee *) opt "Transaction overflow" Amount.Signed.(add (get_delta p) (of_unsigned fee)) @@ -1300,38 +1329,38 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in match fee_payment with | None -> ( - (* TODO: Assert that the two have opposite signs *) - match one.data.body.delta.sgn with - | Neg -> - (* Account 1 is the sender. *) - Set_once.set_exn fee_payer_account [%here] - (account_id1, Some acct1) ; - let acct1, one = party_fee_payer loc1 acct1 one in - ( acct1 - , Option.map lacct2 ~f:(fun (_, a) -> - if account2_should_step then step a else a ) - , one - , two - , None - , one.data.body.pk ) - | Pos -> - (* Account 2 is the sender. *) - let err x = opt "account 2 is the sender, but is None" x in - let loc2, acct2 = err lacct2 in - Set_once.set_exn fee_payer_account [%here] - (Account_id.create acct2.public_key token_id, Some acct2) ; - let two = err two in - let acct2, two = party_fee_payer loc2 acct2 two in - ( step acct1 (* Account 1 always steps. *) - , Some acct2 - , one - , Some two - , None - , two.data.body.pk ) ) - | Some {payload; signature= _} -> + (* TODO: Assert that the two have opposite signs *) + match one.data.body.delta.sgn with + | Neg -> + (* Account 1 is the sender. *) + Set_once.set_exn fee_payer_account [%here] + (account_id1, Some acct1) ; + let acct1, one = party_fee_payer loc1 acct1 one in + ( acct1 + , Option.map lacct2 ~f:(fun (_, a) -> + if account2_should_step then step a else a) + , one + , two + , None + , one.data.body.pk ) + | Pos -> + (* Account 2 is the sender. *) + let err x = opt "account 2 is the sender, but is None" x in + let loc2, acct2 = err lacct2 in + Set_once.set_exn fee_payer_account [%here] + (Account_id.create acct2.public_key token_id, Some acct2) ; + let two = err two in + let acct2, two = party_fee_payer loc2 acct2 two in + ( step acct1 (* Account 1 always steps. *) + , Some acct2 + , one + , Some two + , None + , two.data.body.pk ) ) + | Some { payload; signature = _ } -> ( step acct1 , Option.map lacct2 ~f:(fun (_, a) -> - if account2_should_step then step a else a ) + if account2_should_step then step a else a) , one , two , Some (pay_fee payload) @@ -1343,14 +1372,13 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Ok None | ( Some (loc2, acct2) , Some acct2' - , Some {data= {body; predicate}; authorization} ) -> + , Some { data = { body; predicate }; authorization } ) -> (* TODO: Make sure that body.delta is positive. I think - the Snapp_command.check function does this. *) + the Snapp_command.check function does this. *) (* Check the predicate *) let%bind () = check_predicate2 predicate ~state_view ~self:acct2 - ~other_prev:(Some acct1) ~other_next:(Some ()) - ~fee_payer_pk + ~other_prev:(Some acct1) ~other_next:(Some ()) ~fee_payer_pk |> with_err Predicate in (* Update *) @@ -1398,21 +1426,21 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ( Account_id.create a.public_key fee_token_id , match loc with `New -> None | `Existing _ -> Some a ) in - ( ( [(loc1, acct1')] @ Option.to_list lacct2' + ( ( [ (loc1, acct1') ] @ Option.to_list lacct2' @ Option.( to_list (map fee_payer_info ~f:(fun (loc, _, fp_acct') -> - (loc, fp_acct') ))) ) + (loc, fp_acct')))) ) , applied ( (account_id1, Some acct1) :: Option.( to_list (map lacct2 ~f:(fun (loc, a) -> - applied_per_account loc a ))) + applied_per_account loc a))) @ Option.( to_list (map fee_payer_info ~f:(fun (loc, a, _) -> - applied_per_account loc a ))) ) ) + applied_per_account loc a))) ) ) in let check_nonce nonce ~state_view:_ ~(self : Account.t) ~other_prev:_ ~other_next:_ ~fee_payer_pk:_ = @@ -1422,35 +1450,41 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ~fee_payer_pk:_ = Ok () in - let wrap_two (r : _ Inner.t) = {r with two= Some r.two} in + let wrap_two (r : _ Inner.t) = { r with two = Some r.two } in let signed_one (r : (_ Party.Authorized.Poly.t, _) Inner.t) = { r with - one= - {r.one with authorization= Control.Signature r.one.authorization} + one = + { r.one with + authorization = Control.Signature r.one.authorization + } } in let signed_two (r : (_, _ Party.Authorized.Poly.t) Inner.t) = { r with - two= - {r.two with authorization= Control.Signature r.two.authorization} + two = + { r.two with + authorization = Control.Signature r.two.authorization + } } in ( match c with | Proved_empty r -> f { r with - two= + two = Option.map r.two ~f:(fun two -> - {two with authorization= Control.None_given} ) } + { two with authorization = Control.None_given }) + } ~check_predicate1:Predicate.check ~check_predicate2:no_check ~account2_should_step:false | Signed_empty r -> f { r with - two= + two = Option.map r.two ~f:(fun two -> - {two with authorization= Control.None_given} ) - ; one= {r.one with authorization= Signature r.one.authorization} + { two with authorization = Control.None_given }) + ; one = + { r.one with authorization = Signature r.one.authorization } } ~check_predicate1:check_nonce ~check_predicate2:no_check ~account2_should_step:false @@ -1467,7 +1501,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct (wrap_two (signed_one (signed_two r))) ~check_predicate1:check_nonce ~check_predicate2:check_nonce ~account2_should_step:true ) - |> finish ) + |> finish) let update_timing_when_no_deduction ~txn_global_slot account = validate_timing ~txn_amount:Amount.zero ~txn_global_slot ~account @@ -1495,7 +1529,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let emptys = previous_empty_accounts action account_id in let%bind timing = modify_timing a in let%map balance = modify_balance action account_id a.balance ft.fee in - set t loc {a with balance; timing} ; + set t loc { a with balance; timing } ; (emptys, a.timing) | `Two (ft1, ft2) -> let account_id1 = Fee_transfer.Single.receiver ft1 in @@ -1509,10 +1543,8 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct if Account_id.equal account_id1 account_id2 then ( let%bind fee = error_opt "overflow" (Fee.add ft1.fee ft2.fee) in let%bind timing = modify_timing a1 in - let%map balance = - modify_balance action1 account_id1 a1.balance fee - in - set t l1 {a1 with balance; timing} ; + let%map balance = modify_balance action1 account_id1 a1.balance fee in + set t l1 { a1 with balance; timing } ; (emptys1, a1.timing) ) else (* TODO(#4496): Do not use get_or_create here; we should not create a @@ -1529,8 +1561,8 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let%map balance2 = modify_balance action2 account_id2 a2.balance ft2.fee in - set t l1 {a1 with balance= balance1} ; - set t l2 {a2 with balance= balance2; timing= timing2} ; + set t l1 { a1 with balance = balance1 } ; + set t l2 { a2 with balance = balance2; timing = timing2 } ; (emptys1 @ emptys2, a2.timing) let apply_fee_transfer ~constraint_constants ~txn_global_slot t transfer = @@ -1542,9 +1574,9 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let amount = Amount.of_fee f in sub_account_creation_fee ~constraint_constants action amount in - add_amount b amount ) + add_amount b amount) ~modify_timing:(fun acc -> - update_timing_when_no_deduction ~txn_global_slot acc ) + update_timing_when_no_deduction ~txn_global_slot acc) in let compute_balance account_id = match get_user_account_with_location t account_id with @@ -1556,56 +1588,58 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let balances = match Fee_transfer.to_singles transfer with | `One ft -> - { Transaction_status.Fee_transfer_balance_data.receiver1_balance= + { Transaction_status.Fee_transfer_balance_data.receiver1_balance = Option.value_exn (compute_balance (Fee_transfer.Single.receiver ft)) - ; receiver2_balance= None } + ; receiver2_balance = None + } | `Two (ft1, ft2) -> - { Transaction_status.Fee_transfer_balance_data.receiver1_balance= + { Transaction_status.Fee_transfer_balance_data.receiver1_balance = Option.value_exn (compute_balance (Fee_transfer.Single.receiver ft1)) - ; receiver2_balance= - compute_balance (Fee_transfer.Single.receiver ft2) } + ; receiver2_balance = + compute_balance (Fee_transfer.Single.receiver ft2) + } in Transaction_applied.Fee_transfer_applied. - { fee_transfer= transfer + { fee_transfer = transfer ; previous_empty_accounts ; receiver_timing - ; balances } + ; balances + } let undo_fee_transfer ~constraint_constants t - ({previous_empty_accounts; fee_transfer; receiver_timing; balances= _} : + ({ previous_empty_accounts; fee_transfer; receiver_timing; balances = _ } : Transaction_applied.Fee_transfer_applied.t) = let open Or_error.Let_syntax in let%map _ = process_fee_transfer t fee_transfer ~modify_balance:(fun _ aid b f -> let action = - if List.mem ~equal:Account_id.equal previous_empty_accounts aid - then `Added + if List.mem ~equal:Account_id.equal previous_empty_accounts aid then + `Added else `Existed in let%bind amount = sub_account_creation_fee ~constraint_constants action (Amount.of_fee f) in - sub_amount b amount ) + sub_amount b amount) ~modify_timing:(fun _ -> Ok receiver_timing) in remove_accounts_exn t previous_empty_accounts let apply_coinbase ~constraint_constants ~txn_global_slot t (* TODO: Better system needed for making atomic changes. Could use a monad. *) - ({receiver; fee_transfer; amount= coinbase_amount} as cb : Coinbase.t) = + ({ receiver; fee_transfer; amount = coinbase_amount } as cb : + Coinbase.t) = let open Or_error.Let_syntax in - let%bind ( receiver_reward - , emptys1 - , transferee_update - , transferee_timing_prev ) = + let%bind receiver_reward, emptys1, transferee_update, transferee_timing_prev + = match fee_transfer with | None -> return (coinbase_amount, [], None, None) - | Some ({receiver_pk= transferee; fee} as ft) -> + | Some ({ receiver_pk = transferee; fee } as ft) -> assert (not @@ Public_key.Compressed.equal transferee receiver) ; let transferee_id = Coinbase.Fee_transfer.receiver ft in let fee = Amount.of_fee fee in @@ -1633,7 +1667,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ( receiver_reward , emptys , Some - (transferee_location, {transferee_account with balance; timing}) + (transferee_location, { transferee_account with balance; timing }) , Some transferee_account.timing ) in let receiver_id = Account_id.create receiver Token_id.default in @@ -1664,32 +1698,36 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in set t receiver_location { receiver_account with - balance= receiver_balance - ; timing= coinbase_receiver_timing } ; + balance = receiver_balance + ; timing = coinbase_receiver_timing + } ; Option.iter transferee_update ~f:(fun (l, a) -> set t l a) ; Transaction_applied.Coinbase_applied. - { coinbase= cb - ; previous_empty_accounts= emptys1 @ emptys2 - ; receiver_timing= receiver_timing_for_applied - ; balances= - { Transaction_status.Coinbase_balance_data.coinbase_receiver_balance= + { coinbase = cb + ; previous_empty_accounts = emptys1 @ emptys2 + ; receiver_timing = receiver_timing_for_applied + ; balances = + { Transaction_status.Coinbase_balance_data.coinbase_receiver_balance = receiver_balance - ; fee_transfer_receiver_balance= - Option.map transferee_update ~f:(fun (_, a) -> a.balance) } } + ; fee_transfer_receiver_balance = + Option.map transferee_update ~f:(fun (_, a) -> a.balance) + } + } (* Don't have to be atomic here because these should never fail. In fact, none of - the undo functions should ever return an error. This should be fixed in the types. *) + the undo functions should ever return an error. This should be fixed in the types. *) let undo_coinbase ~constraint_constants t Transaction_applied.Coinbase_applied. - { coinbase= {receiver; fee_transfer; amount= coinbase_amount} + { coinbase = { receiver; fee_transfer; amount = coinbase_amount } ; previous_empty_accounts ; receiver_timing - ; balances= _ } = + ; balances = _ + } = let receiver_reward, receiver_timing = match fee_transfer with | None -> (coinbase_amount, Some receiver_timing) - | Some ({receiver_pk= _; fee} as ft) -> + | Some ({ receiver_pk = _; fee } as ft) -> let fee = Amount.of_fee fee in let transferee_id = Coinbase.Fee_transfer.receiver ft in let transferee_location = @@ -1715,8 +1753,9 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct in set t transferee_location { transferee_account with - balance= transferee_balance - ; timing= receiver_timing } ; + balance = transferee_balance + ; timing = receiver_timing + } ; (Option.value_exn (Amount.sub coinbase_amount fee), None) in let receiver_id = Account_id.create receiver Token_id.default in @@ -1742,19 +1781,22 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Option.value ~default:receiver_account.timing receiver_timing in set t receiver_location - {receiver_account with balance= receiver_balance; timing} ; + { receiver_account with balance = receiver_balance; timing } ; remove_accounts_exn t previous_empty_accounts let undo_user_command ~(constraint_constants : Genesis_constants.Constraint_constants.t) ledger - { Transaction_applied.Signed_command_applied.common= - { user_command= - { data= {payload; signer= _; signature= _} as user_command - ; status= _ } + { Transaction_applied.Signed_command_applied.common = + { user_command = + { data = { payload; signer = _; signature = _ } as user_command + ; status = _ + } ; previous_receipt_chain_hash ; fee_payer_timing - ; source_timing } - ; body } = + ; source_timing + } + ; body + } = let open Or_error.Let_syntax in (* Fee-payer information *) let fee_payer = Signed_command.fee_payer user_command in @@ -1773,14 +1815,15 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct { account with balance ; nonce - ; receipt_chain_hash= previous_receipt_chain_hash - ; timing= fee_payer_timing } + ; receipt_chain_hash = previous_receipt_chain_hash + ; timing = fee_payer_timing + } in (* Update the fee-payer's account. *) set ledger fee_payer_location fee_payer_account ; let next_available_token = match body with - | Create_new_token {created_token} -> + | Create_new_token { created_token } -> created_token | _ -> next_available_token ledger @@ -1798,7 +1841,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct | _, Failed -> (* The user command failed, only the fee was charged. *) return () - | Stake_delegation (Set_delegate _), Stake_delegation {previous_delegate} + | Stake_delegation (Set_delegate _), Stake_delegation { previous_delegate } -> let%bind source_location = location_of_account' ledger "source" source @@ -1806,17 +1849,15 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let%map source_account = get' ledger "source" source_location in set ledger source_location { source_account with - delegate= previous_delegate - ; timing= Option.value ~default:source_account.timing source_timing + delegate = previous_delegate + ; timing = Option.value ~default:source_account.timing source_timing } - | Payment {amount; _}, Payment {previous_empty_accounts} -> + | Payment { amount; _ }, Payment { previous_empty_accounts } -> let receiver = Signed_command.receiver ~next_available_token user_command in let%bind receiver_location, receiver_account = - let%bind location = - location_of_account' ledger "receiver" receiver - in + let%bind location = location_of_account' ledger "receiver" receiver in let%map account = get' ledger "receiver" location in let balance = (* NOTE: [sub_amount] is only [None] if the account creation fee @@ -1826,16 +1867,14 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Option.value ~default:Balance.zero (Balance.sub_amount account.balance amount) in - (location, {account with balance}) + (location, { account with balance }) in let%map source_location, source_account = let%bind location, account = if Account_id.equal source receiver then return (receiver_location, receiver_account) else - let%bind location = - location_of_account' ledger "source" source - in + let%bind location = location_of_account' ledger "source" source in let%map account = get' ledger "source" location in (location, account) in @@ -1843,7 +1882,8 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct ( location , { account with balance - ; timing= Option.value ~default:account.timing source_timing } ) + ; timing = Option.value ~default:account.timing source_timing + } ) in set ledger receiver_location receiver_account ; set ledger source_location source_account ; @@ -1859,7 +1899,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct (Balance.add_amount fee_payer_account.balance (Amount.of_fee constraint_constants.account_creation_fee)) in - {fee_payer_account with balance} + { fee_payer_account with balance } in let%bind source_location = location_of_account' ledger "source" source @@ -1874,41 +1914,38 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct set ledger fee_payer_location fee_payer_account ; set ledger source_location { source_account with - timing= Option.value ~default:source_account.timing source_timing + timing = Option.value ~default:source_account.timing source_timing } ; - remove_accounts_exn ledger [receiver] ; + remove_accounts_exn ledger [ receiver ] ; (* Restore to the previous [next_available_token]. This is a no-op if the [next_available_token] did not change. *) set_next_available_token ledger next_available_token - | Mint_tokens {amount; _}, Mint_tokens -> + | Mint_tokens { amount; _ }, Mint_tokens -> let receiver = Signed_command.receiver ~next_available_token user_command in let%bind receiver_location, receiver_account = - let%bind location = - location_of_account' ledger "receiver" receiver - in + let%bind location = location_of_account' ledger "receiver" receiver in let%map account = get' ledger "receiver" location in let balance = Option.value_exn (Balance.sub_amount account.balance amount) in - (location, {account with balance}) + (location, { account with balance }) in let%map source_location, source_account = let%map location, account = if Account_id.equal source receiver then return (receiver_location, receiver_account) else - let%bind location = - location_of_account' ledger "source" source - in + let%bind location = location_of_account' ledger "source" source in let%map account = get' ledger "source" location in (location, account) in ( location , { account with - timing= Option.value ~default:account.timing source_timing } ) + timing = Option.value ~default:account.timing source_timing + } ) in set ledger receiver_location receiver_account ; set ledger source_location source_account @@ -1916,10 +1953,10 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct failwith "Transaction_applied/command mismatch" let undo_snapp_command ~constraint_constants:_ ledger - {Transaction_applied.Snapp_command_applied.accounts; command= _} = + { Transaction_applied.Snapp_command_applied.accounts; command = _ } = let to_update, to_delete = List.partition_map accounts ~f:(fun (id, a) -> - match a with Some a -> `Fst (id, a) | None -> `Snd id ) + match a with Some a -> `Fst (id, a) | None -> `Snd id) in let to_update = List.dedup_and_sort @@ -1932,12 +1969,12 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct let%map loc = location_of_account' ledger (sprintf !"%{sexp:Account_id.t}" id) id in - (`Existing loc, a) ) + (`Existing loc, a)) |> Or_error.all in remove_accounts_exn ledger to_delete ; List.iter to_update ~f:(fun (location, account) -> - ignore @@ set_with_location ledger location account ) + ignore @@ set_with_location ledger location account) let undo : constraint_constants:Genesis_constants.Constraint_constants.t @@ -1959,7 +1996,7 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Ok () in Debug_assert.debug_assert (fun () -> - [%test_eq: Ledger_hash.t] applied.previous_hash (merkle_root ledger) ) ; + [%test_eq: Ledger_hash.t] applied.previous_hash (merkle_root ledger)) ; res let apply_transaction ~constraint_constants @@ -1974,25 +2011,23 @@ module Make (L : Ledger_intf) : S with type ledger := L.t = struct Or_error.map (apply_user_command_unchecked ~constraint_constants ~txn_global_slot ledger txn) ~f:(fun applied -> - Transaction_applied.Varying.Command (Signed_command applied) - ) + Transaction_applied.Varying.Command (Signed_command applied)) | Command (Snapp_command txn) -> Or_error.map (apply_snapp_command_unchecked ~state_view:txn_state_view ~constraint_constants ledger txn) ~f:(fun applied -> - Transaction_applied.Varying.Command (Snapp_command applied) - ) + Transaction_applied.Varying.Command (Snapp_command applied)) | Fee_transfer t -> Or_error.map (apply_fee_transfer ~constraint_constants ~txn_global_slot ledger t) ~f:(fun applied -> - Transaction_applied.Varying.Fee_transfer applied ) + Transaction_applied.Varying.Fee_transfer applied) | Coinbase t -> Or_error.map (apply_coinbase ~constraint_constants ~txn_global_slot ledger t) ~f:(fun applied -> Transaction_applied.Varying.Coinbase applied) ) - ~f:(fun varying -> {Transaction_applied.previous_hash; varying}) ) + ~f:(fun varying -> { Transaction_applied.previous_hash; varying })) let merkle_root_after_snapp_command_exn ~constraint_constants ~txn_state_view ledger payment = diff --git a/src/lib/mina_base/transaction_status.ml b/src/lib/mina_base/transaction_status.ml index 8d4a16dd225..38b7a3dcb38 100644 --- a/src/lib/mina_base/transaction_status.ml +++ b/src/lib/mina_base/transaction_status.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel @@ -123,8 +122,8 @@ module Failure = struct | Receiver_not_present -> "The receiver account does not exist" | Amount_insufficient_to_create_account -> - "Cannot create account: transaction amount is smaller than the \ - account creation fee" + "Cannot create account: transaction amount is smaller than the account \ + creation fee" | Cannot_pay_creation_fee_in_token -> "Cannot create account: account creation fees cannot be paid in \ non-default tokens" @@ -149,8 +148,7 @@ module Failure = struct | Incorrect_nonce -> "Incorrect nonce" - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] open Snark_params.Tick @@ -161,21 +159,22 @@ module Failure = struct module Poly = struct type 'bool t = - { predicate: 'bool - ; source_not_present: 'bool - ; receiver_not_present: 'bool - ; amount_insufficient_to_create_account: 'bool - ; cannot_pay_creation_fee_in_token: 'bool - ; source_insufficient_balance: 'bool - ; source_minimum_balance_violation: 'bool - ; receiver_already_exists: 'bool - ; not_token_owner: 'bool - ; mismatched_token_permissions: 'bool - ; overflow: 'bool - ; signed_command_on_snapp_account: 'bool - ; snapp_account_not_present: 'bool - ; update_not_permitted: 'bool - ; incorrect_nonce: 'bool } + { predicate : 'bool + ; source_not_present : 'bool + ; receiver_not_present : 'bool + ; amount_insufficient_to_create_account : 'bool + ; cannot_pay_creation_fee_in_token : 'bool + ; source_insufficient_balance : 'bool + ; source_minimum_balance_violation : 'bool + ; receiver_already_exists : 'bool + ; not_token_owner : 'bool + ; mismatched_token_permissions : 'bool + ; overflow : 'bool + ; signed_command_on_snapp_account : 'bool + ; snapp_account_not_present : 'bool + ; update_not_permitted : 'bool + ; incorrect_nonce : 'bool + } [@@deriving hlist, equal, sexp, compare] let map ~f @@ -193,41 +192,44 @@ module Failure = struct ; signed_command_on_snapp_account ; snapp_account_not_present ; update_not_permitted - ; incorrect_nonce } = - { predicate= f predicate - ; source_not_present= f source_not_present - ; receiver_not_present= f receiver_not_present - ; amount_insufficient_to_create_account= + ; incorrect_nonce + } = + { predicate = f predicate + ; source_not_present = f source_not_present + ; receiver_not_present = f receiver_not_present + ; amount_insufficient_to_create_account = f amount_insufficient_to_create_account - ; cannot_pay_creation_fee_in_token= f cannot_pay_creation_fee_in_token - ; source_insufficient_balance= f source_insufficient_balance - ; source_minimum_balance_violation= f source_minimum_balance_violation - ; receiver_already_exists= f receiver_already_exists - ; not_token_owner= f not_token_owner - ; mismatched_token_permissions= f mismatched_token_permissions - ; overflow= f overflow - ; signed_command_on_snapp_account= f signed_command_on_snapp_account - ; snapp_account_not_present= f snapp_account_not_present - ; update_not_permitted= f update_not_permitted - ; incorrect_nonce= f incorrect_nonce } + ; cannot_pay_creation_fee_in_token = f cannot_pay_creation_fee_in_token + ; source_insufficient_balance = f source_insufficient_balance + ; source_minimum_balance_violation = f source_minimum_balance_violation + ; receiver_already_exists = f receiver_already_exists + ; not_token_owner = f not_token_owner + ; mismatched_token_permissions = f mismatched_token_permissions + ; overflow = f overflow + ; signed_command_on_snapp_account = f signed_command_on_snapp_account + ; snapp_account_not_present = f snapp_account_not_present + ; update_not_permitted = f update_not_permitted + ; incorrect_nonce = f incorrect_nonce + } end type 'bool poly = 'bool Poly.t = - { predicate: 'bool - ; source_not_present: 'bool - ; receiver_not_present: 'bool - ; amount_insufficient_to_create_account: 'bool - ; cannot_pay_creation_fee_in_token: 'bool - ; source_insufficient_balance: 'bool - ; source_minimum_balance_violation: 'bool - ; receiver_already_exists: 'bool - ; not_token_owner: 'bool - ; mismatched_token_permissions: 'bool - ; overflow: 'bool - ; signed_command_on_snapp_account: 'bool - ; snapp_account_not_present: 'bool - ; update_not_permitted: 'bool - ; incorrect_nonce: 'bool } + { predicate : 'bool + ; source_not_present : 'bool + ; receiver_not_present : 'bool + ; amount_insufficient_to_create_account : 'bool + ; cannot_pay_creation_fee_in_token : 'bool + ; source_insufficient_balance : 'bool + ; source_minimum_balance_violation : 'bool + ; receiver_already_exists : 'bool + ; not_token_owner : 'bool + ; mismatched_token_permissions : 'bool + ; overflow : 'bool + ; signed_command_on_snapp_account : 'bool + ; snapp_account_not_present : 'bool + ; update_not_permitted : 'bool + ; incorrect_nonce : 'bool + } [@@deriving equal, sexp, compare] type t = bool poly [@@deriving equal, sexp, compare] @@ -283,7 +285,8 @@ module Failure = struct ; signed_command_on_snapp_account ; snapp_account_not_present ; update_not_permitted - ; incorrect_nonce } = + ; incorrect_nonce + } = let bool_to_int b = if b then 1 else 0 in let failures = bool_to_int predicate @@ -307,93 +310,95 @@ module Failure = struct let typ : (var, t) Typ.t = let bt = Boolean.typ in Typ.of_hlistable - [bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt] + [ bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt; bt ] ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist ~var_to_hlist:Poly.to_hlist ~var_of_hlist:Poly.of_hlist let none = - { predicate= false - ; source_not_present= false - ; receiver_not_present= false - ; amount_insufficient_to_create_account= false - ; cannot_pay_creation_fee_in_token= false - ; source_insufficient_balance= false - ; source_minimum_balance_violation= false - ; receiver_already_exists= false - ; not_token_owner= false - ; mismatched_token_permissions= false - ; overflow= false - ; signed_command_on_snapp_account= false - ; snapp_account_not_present= false - ; update_not_permitted= false - ; incorrect_nonce= false } - - let predicate = {none with predicate= true} - - let source_not_present = {none with source_not_present= true} - - let receiver_not_present = {none with receiver_not_present= true} + { predicate = false + ; source_not_present = false + ; receiver_not_present = false + ; amount_insufficient_to_create_account = false + ; cannot_pay_creation_fee_in_token = false + ; source_insufficient_balance = false + ; source_minimum_balance_violation = false + ; receiver_already_exists = false + ; not_token_owner = false + ; mismatched_token_permissions = false + ; overflow = false + ; signed_command_on_snapp_account = false + ; snapp_account_not_present = false + ; update_not_permitted = false + ; incorrect_nonce = false + } + + let predicate = { none with predicate = true } + + let source_not_present = { none with source_not_present = true } + + let receiver_not_present = { none with receiver_not_present = true } let amount_insufficient_to_create_account = - {none with amount_insufficient_to_create_account= true} + { none with amount_insufficient_to_create_account = true } let cannot_pay_creation_fee_in_token = - {none with cannot_pay_creation_fee_in_token= true} + { none with cannot_pay_creation_fee_in_token = true } let source_insufficient_balance = - {none with source_insufficient_balance= true} + { none with source_insufficient_balance = true } let source_minimum_balance_violation = - {none with source_minimum_balance_violation= true} + { none with source_minimum_balance_violation = true } - let receiver_already_exists = {none with receiver_already_exists= true} + let receiver_already_exists = { none with receiver_already_exists = true } - let not_token_owner = {none with not_token_owner= true} + let not_token_owner = { none with not_token_owner = true } let mismatched_token_permissions = - {none with mismatched_token_permissions= true} + { none with mismatched_token_permissions = true } - let overflow = {none with overflow= true} + let overflow = { none with overflow = true } let signed_command_on_snapp_account = - {none with signed_command_on_snapp_account= true} + { none with signed_command_on_snapp_account = true } - let snapp_account_not_present = {none with snapp_account_not_present= true} + let snapp_account_not_present = + { none with snapp_account_not_present = true } - let update_not_permitted = {none with update_not_permitted= true} + let update_not_permitted = { none with update_not_permitted = true } - let incorrect_nonce = {none with incorrect_nonce= true} + let incorrect_nonce = { none with incorrect_nonce = true } let to_enum = function - | {predicate= true; _} -> + | { predicate = true; _ } -> to_enum Predicate - | {source_not_present= true; _} -> + | { source_not_present = true; _ } -> to_enum Source_not_present - | {receiver_not_present= true; _} -> + | { receiver_not_present = true; _ } -> to_enum Receiver_not_present - | {amount_insufficient_to_create_account= true; _} -> + | { amount_insufficient_to_create_account = true; _ } -> to_enum Amount_insufficient_to_create_account - | {cannot_pay_creation_fee_in_token= true; _} -> + | { cannot_pay_creation_fee_in_token = true; _ } -> to_enum Cannot_pay_creation_fee_in_token - | {source_insufficient_balance= true; _} -> + | { source_insufficient_balance = true; _ } -> to_enum Source_insufficient_balance - | {source_minimum_balance_violation= true; _} -> + | { source_minimum_balance_violation = true; _ } -> to_enum Source_minimum_balance_violation - | {receiver_already_exists= true; _} -> + | { receiver_already_exists = true; _ } -> to_enum Receiver_already_exists - | {not_token_owner= true; _} -> + | { not_token_owner = true; _ } -> to_enum Not_token_owner - | {mismatched_token_permissions= true; _} -> + | { mismatched_token_permissions = true; _ } -> to_enum Mismatched_token_permissions - | {overflow= true; _} -> + | { overflow = true; _ } -> to_enum Overflow - | {signed_command_on_snapp_account= true; _} -> + | { signed_command_on_snapp_account = true; _ } -> to_enum Signed_command_on_snapp_account - | {snapp_account_not_present= true; _} -> + | { snapp_account_not_present = true; _ } -> to_enum Snapp_account_not_present - | {update_not_permitted= true; _} -> + | { update_not_permitted = true; _ } -> to_enum Update_not_permitted - | {incorrect_nonce= true; _} -> + | { incorrect_nonce = true; _ } -> to_enum Incorrect_nonce | _ -> 0 @@ -403,42 +408,42 @@ module Failure = struct | 0 -> Some none | _ -> ( - match of_enum enum with - | Some failure -> - Some - ( match failure with - | Predicate -> - predicate - | Source_not_present -> - source_not_present - | Receiver_not_present -> - receiver_not_present - | Amount_insufficient_to_create_account -> - amount_insufficient_to_create_account - | Cannot_pay_creation_fee_in_token -> - cannot_pay_creation_fee_in_token - | Source_insufficient_balance -> - source_insufficient_balance - | Source_minimum_balance_violation -> - source_minimum_balance_violation - | Receiver_already_exists -> - receiver_already_exists - | Not_token_owner -> - not_token_owner - | Mismatched_token_permissions -> - mismatched_token_permissions - | Overflow -> - overflow - | Signed_command_on_snapp_account -> - signed_command_on_snapp_account - | Snapp_account_not_present -> - snapp_account_not_present - | Update_not_permitted -> - update_not_permitted - | Incorrect_nonce -> - incorrect_nonce ) - | None -> - None ) + match of_enum enum with + | Some failure -> + Some + ( match failure with + | Predicate -> + predicate + | Source_not_present -> + source_not_present + | Receiver_not_present -> + receiver_not_present + | Amount_insufficient_to_create_account -> + amount_insufficient_to_create_account + | Cannot_pay_creation_fee_in_token -> + cannot_pay_creation_fee_in_token + | Source_insufficient_balance -> + source_insufficient_balance + | Source_minimum_balance_violation -> + source_minimum_balance_violation + | Receiver_already_exists -> + receiver_already_exists + | Not_token_owner -> + not_token_owner + | Mismatched_token_permissions -> + mismatched_token_permissions + | Overflow -> + overflow + | Signed_command_on_snapp_account -> + signed_command_on_snapp_account + | Snapp_account_not_present -> + snapp_account_not_present + | Update_not_permitted -> + update_not_permitted + | Incorrect_nonce -> + incorrect_nonce ) + | None -> + None ) let min = 0 @@ -452,7 +457,7 @@ module Failure = struct module Var : sig module Accumulators : sig - type t = private {user_command_failure: Boolean.var} + type t = private { user_command_failure : Boolean.var } end (** Canonical representation for user command failures in snarky. @@ -461,7 +466,7 @@ module Failure = struct enable us to do a cheap checking operation. The type is private to ensure that the invariants of this check are always satisfied. *) - type t = private {data: As_record.var; accumulators: Accumulators.t} + type t = private { data : As_record.var; accumulators : Accumulators.t } val min : int @@ -507,7 +512,7 @@ module Failure = struct end = struct module Accumulators = struct (* TODO: receiver, source accumulators *) - type t = {user_command_failure: Boolean.var} + type t = { user_command_failure : Boolean.var } let make_unsafe ({ predicate @@ -524,7 +529,8 @@ module Failure = struct ; signed_command_on_snapp_account ; snapp_account_not_present ; update_not_permitted - ; incorrect_nonce } : + ; incorrect_nonce + } : As_record.var) : t = let user_command_failure = Boolean.Unsafe.of_cvar @@ -543,29 +549,32 @@ module Failure = struct ; (signed_command_on_snapp_account :> Field.Var.t) ; (snapp_account_not_present :> Field.Var.t) ; (update_not_permitted :> Field.Var.t) - ; (incorrect_nonce :> Field.Var.t) ]) + ; (incorrect_nonce :> Field.Var.t) + ]) in - {user_command_failure} + { user_command_failure } - let check {user_command_failure} = + let check { user_command_failure } = Checked.ignore_m - @@ Checked.all [Boolean.of_field (user_command_failure :> Field.Var.t)] + @@ Checked.all + [ Boolean.of_field (user_command_failure :> Field.Var.t) ] end - type t = {data: As_record.var; accumulators: Accumulators.t} + type t = { data : As_record.var; accumulators : Accumulators.t } - let of_record data = {data; accumulators= Accumulators.make_unsafe data} + let of_record data = { data; accumulators = Accumulators.make_unsafe data } let typ : (t, As_record.t) Typ.t = let typ = As_record.typ in - { store= (fun data -> Typ.Store.map ~f:of_record (typ.store data)) - ; read= (fun {data; _} -> typ.read data) - ; alloc= Typ.Alloc.map ~f:of_record typ.alloc - ; check= + { store = (fun data -> Typ.Store.map ~f:of_record (typ.store data)) + ; read = (fun { data; _ } -> typ.read data) + ; alloc = Typ.Alloc.map ~f:of_record typ.alloc + ; check = Checked.( - fun {data; accumulators} -> + fun { data; accumulators } -> let%bind () = typ.check data in - Accumulators.check accumulators) } + Accumulators.check accumulators) + } let mk_var = Fn.compose of_record As_record.var_of_t @@ -607,7 +616,7 @@ module Failure = struct let incorrect_nonce = mk_var As_record.incorrect_nonce - let get {data; _} failure = As_record.get data failure + let get { data; _ } failure = As_record.get data failure let min = As_record.min @@ -664,7 +673,7 @@ module Failure = struct let typ : (var, t) Typ.t = Typ.transport Var.typ ~there:to_record ~back:(fun x -> - Option.value_exn (of_record_opt x) ) + Option.value_exn (of_record_opt x)) let typ_opt : (var, t option) Typ.t = Typ.transport Var.typ ~there:to_record_opt ~back:of_record_opt @@ -681,9 +690,10 @@ module Balance_data = struct module Stable = struct module V1 = struct type t = - { fee_payer_balance: Currency.Balance.Stable.V1.t option - ; source_balance: Currency.Balance.Stable.V1.t option - ; receiver_balance: Currency.Balance.Stable.V1.t option } + { fee_payer_balance : Currency.Balance.Stable.V1.t option + ; source_balance : Currency.Balance.Stable.V1.t option + ; receiver_balance : Currency.Balance.Stable.V1.t option + } [@@deriving sexp, yojson, equal, compare] let to_latest = Fn.id @@ -691,7 +701,7 @@ module Balance_data = struct end] let empty = - {fee_payer_balance= None; source_balance= None; receiver_balance= None} + { fee_payer_balance = None; source_balance = None; receiver_balance = None } end module Coinbase_balance_data = struct @@ -699,8 +709,9 @@ module Coinbase_balance_data = struct module Stable = struct module V1 = struct type t = - { coinbase_receiver_balance: Currency.Balance.Stable.V1.t - ; fee_transfer_receiver_balance: Currency.Balance.Stable.V1.t option } + { coinbase_receiver_balance : Currency.Balance.Stable.V1.t + ; fee_transfer_receiver_balance : Currency.Balance.Stable.V1.t option + } [@@deriving sexp, yojson, equal, compare] let to_latest = Fn.id @@ -708,7 +719,7 @@ module Coinbase_balance_data = struct end] let of_balance_data_exn - {Balance_data.fee_payer_balance; source_balance; receiver_balance} = + { Balance_data.fee_payer_balance; source_balance; receiver_balance } = ( match source_balance with | Some _ -> failwith @@ -724,13 +735,16 @@ module Coinbase_balance_data = struct "Missing fee-payer balance for \ Coinbase_balance_data.of_balance_data" in - {coinbase_receiver_balance; fee_transfer_receiver_balance= receiver_balance} - - let to_balance_data {coinbase_receiver_balance; fee_transfer_receiver_balance} - = - { Balance_data.fee_payer_balance= Some coinbase_receiver_balance - ; source_balance= None - ; receiver_balance= fee_transfer_receiver_balance } + { coinbase_receiver_balance + ; fee_transfer_receiver_balance = receiver_balance + } + + let to_balance_data + { coinbase_receiver_balance; fee_transfer_receiver_balance } = + { Balance_data.fee_payer_balance = Some coinbase_receiver_balance + ; source_balance = None + ; receiver_balance = fee_transfer_receiver_balance + } end module Fee_transfer_balance_data = struct @@ -738,8 +752,9 @@ module Fee_transfer_balance_data = struct module Stable = struct module V1 = struct type t = - { receiver1_balance: Currency.Balance.Stable.V1.t - ; receiver2_balance: Currency.Balance.Stable.V1.t option } + { receiver1_balance : Currency.Balance.Stable.V1.t + ; receiver2_balance : Currency.Balance.Stable.V1.t option + } [@@deriving sexp, yojson, equal, compare] let to_latest = Fn.id @@ -747,7 +762,7 @@ module Fee_transfer_balance_data = struct end] let of_balance_data_exn - {Balance_data.fee_payer_balance; source_balance; receiver_balance} = + { Balance_data.fee_payer_balance; source_balance; receiver_balance } = ( match source_balance with | Some _ -> failwith @@ -764,12 +779,13 @@ module Fee_transfer_balance_data = struct "Missing fee-payer balance for \ Fee_transfer_balance_data.of_balance_data" in - {receiver1_balance; receiver2_balance= receiver_balance} + { receiver1_balance; receiver2_balance = receiver_balance } - let to_balance_data {receiver1_balance; receiver2_balance} = - { Balance_data.fee_payer_balance= Some receiver1_balance - ; source_balance= None - ; receiver_balance= receiver2_balance } + let to_balance_data { receiver1_balance; receiver2_balance } = + { Balance_data.fee_payer_balance = Some receiver1_balance + ; source_balance = None + ; receiver_balance = receiver2_balance + } end module Internal_command_balance_data = struct @@ -791,11 +807,12 @@ module Auxiliary_data = struct module Stable = struct module V1 = struct type t = - { fee_payer_account_creation_fee_paid: + { fee_payer_account_creation_fee_paid : Currency.Amount.Stable.V1.t option - ; receiver_account_creation_fee_paid: + ; receiver_account_creation_fee_paid : Currency.Amount.Stable.V1.t option - ; created_token: Token_id.Stable.V1.t option } + ; created_token : Token_id.Stable.V1.t option + } [@@deriving sexp, yojson, equal, compare] let to_latest = Fn.id @@ -803,9 +820,10 @@ module Auxiliary_data = struct end] let empty = - { fee_payer_account_creation_fee_paid= None - ; receiver_account_creation_fee_paid= None - ; created_token= None } + { fee_payer_account_creation_fee_paid = None + ; receiver_account_creation_fee_paid = None + ; created_token = None + } end [%%versioned diff --git a/src/lib/mina_base/transaction_union.ml b/src/lib/mina_base/transaction_union.ml index 5d35ff8d166..1a90ce36262 100644 --- a/src/lib/mina_base/transaction_union.ml +++ b/src/lib/mina_base/transaction_union.ml @@ -6,7 +6,7 @@ module Tag = Transaction_union_tag module Payload = Transaction_union_payload type ('payload, 'pk, 'signature) t_ = - {payload: 'payload; signer: 'pk; signature: 'signature} + { payload : 'payload; signer : 'pk; signature : 'signature } [@@deriving equal, sexp, hash, hlist] type t = (Payload.t, Public_key.t, Signature.t) t_ @@ -14,7 +14,7 @@ type t = (Payload.t, Public_key.t, Signature.t) t_ type var = (Payload.var, Public_key.var, Signature.var) t_ let typ : (var, t) Typ.t = - let spec = Data_spec.[Payload.typ; Public_key.typ; Schnorr.Signature.typ] in + let spec = Data_spec.[ Payload.typ; Public_key.typ; Schnorr.Signature.typ ] in Typ.of_hlistable spec ~var_to_hlist:t__to_hlist ~var_of_hlist:t__of_hlist ~value_to_hlist:t__to_hlist ~value_of_hlist:t__of_hlist @@ -29,59 +29,69 @@ let typ : (var, t) Typ.t = *) let of_transaction : Signed_command.t Transaction.Poly.t -> t = function | Command cmd -> - let Signed_command.Poly.{payload; signer; signature} = + let Signed_command.Poly.{ payload; signer; signature } = (cmd :> Signed_command.t) in - { payload= Transaction_union_payload.of_user_command_payload payload + { payload = Transaction_union_payload.of_user_command_payload payload ; signer - ; signature } - | Coinbase {receiver; fee_transfer; amount} -> - let {Coinbase.Fee_transfer.receiver_pk= other_pk; fee= other_amount} = + ; signature + } + | Coinbase { receiver; fee_transfer; amount } -> + let { Coinbase.Fee_transfer.receiver_pk = other_pk; fee = other_amount } = Option.value ~default: (Coinbase.Fee_transfer.create ~receiver_pk:receiver ~fee:Fee.zero) fee_transfer in - { payload= - { common= - { fee= other_amount - ; fee_token= Token_id.default - ; fee_payer_pk= other_pk - ; nonce= Account.Nonce.zero - ; valid_until= Mina_numbers.Global_slot.max_value - ; memo= Signed_command_memo.empty } - ; body= - { source_pk= other_pk - ; receiver_pk= receiver - ; token_id= Token_id.default + { payload = + { common = + { fee = other_amount + ; fee_token = Token_id.default + ; fee_payer_pk = other_pk + ; nonce = Account.Nonce.zero + ; valid_until = Mina_numbers.Global_slot.max_value + ; memo = Signed_command_memo.empty + } + ; body = + { source_pk = other_pk + ; receiver_pk = receiver + ; token_id = Token_id.default ; amount - ; tag= Tag.Coinbase - ; token_locked= false } } - ; signer= Public_key.decompress_exn other_pk - ; signature= Signature.dummy } + ; tag = Tag.Coinbase + ; token_locked = false + } + } + ; signer = Public_key.decompress_exn other_pk + ; signature = Signature.dummy + } | Fee_transfer tr -> ( - let two {Fee_transfer.receiver_pk= pk1; fee= fee1; fee_token} - {Fee_transfer.receiver_pk= pk2; fee= fee2; fee_token= token_id} : t = - { payload= - { common= - { fee= fee2 + let two { Fee_transfer.receiver_pk = pk1; fee = fee1; fee_token } + { Fee_transfer.receiver_pk = pk2; fee = fee2; fee_token = token_id } : + t = + { payload = + { common = + { fee = fee2 ; fee_token - ; fee_payer_pk= pk2 - ; nonce= Account.Nonce.zero - ; valid_until= Mina_numbers.Global_slot.max_value - ; memo= Signed_command_memo.empty } - ; body= - { source_pk= pk2 - ; receiver_pk= pk1 + ; fee_payer_pk = pk2 + ; nonce = Account.Nonce.zero + ; valid_until = Mina_numbers.Global_slot.max_value + ; memo = Signed_command_memo.empty + } + ; body = + { source_pk = pk2 + ; receiver_pk = pk1 ; token_id - ; amount= Amount.of_fee fee1 - ; tag= Tag.Fee_transfer - ; token_locked= false } } - ; signer= Public_key.decompress_exn pk2 - ; signature= Signature.dummy } + ; amount = Amount.of_fee fee1 + ; tag = Tag.Fee_transfer + ; token_locked = false + } + } + ; signer = Public_key.decompress_exn pk2 + ; signature = Signature.dummy + } in match Fee_transfer.to_singles tr with - | `One ({receiver_pk; fee= _; fee_token} as t) -> + | `One ({ receiver_pk; fee = _; fee_token } as t) -> two t (Fee_transfer.Single.create ~receiver_pk ~fee:Fee.zero ~fee_token) | `Two (t1, t2) -> diff --git a/src/lib/mina_base/transaction_union_payload.ml b/src/lib/mina_base/transaction_union_payload.ml index c66d83a3596..574935e6862 100644 --- a/src/lib/mina_base/transaction_union_payload.ml +++ b/src/lib/mina_base/transaction_union_payload.ml @@ -1,12 +1,10 @@ (* transaction_union_payload.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick open Signature_lib @@ -25,12 +23,13 @@ module Tag = Transaction_union_tag module Body = struct type ('tag, 'public_key, 'token_id, 'amount, 'bool) t_ = - { tag: 'tag - ; source_pk: 'public_key - ; receiver_pk: 'public_key - ; token_id: 'token_id - ; amount: 'amount - ; token_locked: 'bool } + { tag : 'tag + ; source_pk : 'public_key + ; receiver_pk : 'public_key + ; token_id : 'token_id + ; amount : 'amount + ; token_locked : 'bool + } [@@deriving sexp, hlist] type t = @@ -39,42 +38,47 @@ module Body = struct let of_user_command_payload_body = function | Signed_command_payload.Body.Payment - {source_pk; receiver_pk; token_id; amount} -> - { tag= Tag.Payment + { source_pk; receiver_pk; token_id; amount } -> + { tag = Tag.Payment ; source_pk ; receiver_pk ; token_id ; amount - ; token_locked= false } - | Stake_delegation (Set_delegate {delegator; new_delegate}) -> - { tag= Tag.Stake_delegation - ; source_pk= delegator - ; receiver_pk= new_delegate - ; token_id= Token_id.default - ; amount= Currency.Amount.zero - ; token_locked= false } - | Create_new_token {token_owner_pk; disable_new_accounts} -> - { tag= Tag.Create_account - ; source_pk= token_owner_pk - ; receiver_pk= token_owner_pk - ; token_id= Token_id.invalid - ; amount= Currency.Amount.zero - ; token_locked= disable_new_accounts } + ; token_locked = false + } + | Stake_delegation (Set_delegate { delegator; new_delegate }) -> + { tag = Tag.Stake_delegation + ; source_pk = delegator + ; receiver_pk = new_delegate + ; token_id = Token_id.default + ; amount = Currency.Amount.zero + ; token_locked = false + } + | Create_new_token { token_owner_pk; disable_new_accounts } -> + { tag = Tag.Create_account + ; source_pk = token_owner_pk + ; receiver_pk = token_owner_pk + ; token_id = Token_id.invalid + ; amount = Currency.Amount.zero + ; token_locked = disable_new_accounts + } | Create_token_account - {token_id; token_owner_pk; receiver_pk; account_disabled} -> - { tag= Tag.Create_account - ; source_pk= token_owner_pk + { token_id; token_owner_pk; receiver_pk; account_disabled } -> + { tag = Tag.Create_account + ; source_pk = token_owner_pk ; receiver_pk ; token_id - ; amount= Currency.Amount.zero - ; token_locked= account_disabled } - | Mint_tokens {token_id; token_owner_pk; receiver_pk; amount} -> - { tag= Tag.Mint_tokens - ; source_pk= token_owner_pk + ; amount = Currency.Amount.zero + ; token_locked = account_disabled + } + | Mint_tokens { token_id; token_owner_pk; receiver_pk; amount } -> + { tag = Tag.Mint_tokens + ; source_pk = token_owner_pk ; receiver_pk ; token_id ; amount - ; token_locked= false } + ; token_locked = false + } let gen ~fee = let open Quickcheck.Generator.Let_syntax in @@ -96,8 +100,8 @@ module Body = struct (Amount.zero, max_amount_without_overflow) | Coinbase -> (* In this case, - amount - fee should be defined. In other words, - amount >= fee *) + amount - fee should be defined. In other words, + amount >= fee *) (Amount.of_fee fee, Amount.max_int) | Mint_tokens -> (Amount.zero, Amount.max_int) @@ -134,10 +138,9 @@ module Body = struct | Coinbase -> return Token_id.default in - {tag; source_pk; receiver_pk; token_id; amount; token_locked} + { tag; source_pk; receiver_pk; token_id; amount; token_locked } - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] type var = ( Tag.Unpacked.var @@ -154,7 +157,8 @@ module Body = struct ; Public_key.Compressed.typ ; Token_id.typ ; Currency.Amount.typ - ; Boolean.typ ] + ; Boolean.typ + ] let typ = Typ.of_hlistable spec ~var_to_hlist:t__to_hlist ~value_to_hlist:t__to_hlist @@ -162,16 +166,17 @@ module Body = struct module Checked = struct let constant - ({tag; source_pk; receiver_pk; token_id; amount; token_locked} : t) : + ({ tag; source_pk; receiver_pk; token_id; amount; token_locked } : t) : var = - { tag= Tag.unpacked_of_t tag - ; source_pk= Public_key.Compressed.var_of_t source_pk - ; receiver_pk= Public_key.Compressed.var_of_t receiver_pk - ; token_id= Token_id.var_of_t token_id - ; amount= Currency.Amount.var_of_t amount - ; token_locked= Boolean.var_of_value token_locked } - - let to_input {tag; source_pk; receiver_pk; token_id; amount; token_locked} + { tag = Tag.unpacked_of_t tag + ; source_pk = Public_key.Compressed.var_of_t source_pk + ; receiver_pk = Public_key.Compressed.var_of_t receiver_pk + ; token_id = Token_id.var_of_t token_id + ; amount = Currency.Amount.var_of_t amount + ; token_locked = Boolean.var_of_value token_locked + } + + let to_input { tag; source_pk; receiver_pk; token_id; amount; token_locked } = let%map token_id = Token_id.Checked.to_input token_id in Array.reduce_exn ~f:Random_oracle.Input.append @@ -180,29 +185,30 @@ module Body = struct ; Public_key.Compressed.Checked.to_input receiver_pk ; token_id ; Currency.Amount.var_to_input amount - ; Random_oracle.Input.bitstring [token_locked] |] + ; Random_oracle.Input.bitstring [ token_locked ] + |] end [%%endif] - let to_input {tag; source_pk; receiver_pk; token_id; amount; token_locked} = + let to_input { tag; source_pk; receiver_pk; token_id; amount; token_locked } = Array.reduce_exn ~f:Random_oracle.Input.append [| Tag.to_input tag ; Public_key.Compressed.to_input source_pk ; Public_key.Compressed.to_input receiver_pk ; Token_id.to_input token_id ; Currency.Amount.to_input amount - ; Random_oracle.Input.bitstring [token_locked] |] + ; Random_oracle.Input.bitstring [ token_locked ] + |] end -type t = - (Signed_command_payload.Common.t, Body.t) Signed_command_payload.Poly.t +type t = (Signed_command_payload.Common.t, Body.t) Signed_command_payload.Poly.t [@@deriving sexp] type payload = t [@@deriving sexp] -let of_user_command_payload ({common; body} : Signed_command_payload.t) : t = - {common; body= Body.of_user_command_payload_body body} +let of_user_command_payload ({ common; body } : Signed_command_payload.t) : t = + { common; body = Body.of_user_command_payload_body body } let gen = let open Quickcheck.Generator.Let_syntax in @@ -210,10 +216,9 @@ let gen = Signed_command_payload.Common.gen ~fee_token_id:Token_id.default () in let%map body = Body.gen ~fee:common.fee in - Signed_command_payload.Poly.{common; body} + Signed_command_payload.Poly.{ common; body } -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = (Signed_command_payload.Common.var, Body.var) Signed_command_payload.Poly.t @@ -224,26 +229,27 @@ let typ : (var, t) Typ.t = let to_hlist = Signed_command_payload.Poly.to_hlist in let of_hlist = Signed_command_payload.Poly.of_hlist in Typ.of_hlistable - [Signed_command_payload.Common.typ; Body.typ] + [ Signed_command_payload.Common.typ; Body.typ ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let payload_typ = typ module Checked = struct - let to_input ({common; body} : var) = + let to_input ({ common; body } : var) = let%map common = Signed_command_payload.Common.Checked.to_input common and body = Body.Checked.to_input body in Random_oracle.Input.append common body - let constant ({common; body} : t) : var = - { common= Signed_command_payload.Common.Checked.constant common - ; body= Body.Checked.constant body } + let constant ({ common; body } : t) : var = + { common = Signed_command_payload.Common.Checked.constant common + ; body = Body.Checked.constant body + } end [%%endif] -let to_input ({common; body} : t) = +let to_input ({ common; body } : t) = Random_oracle.Input.append (Signed_command_payload.Common.to_input common) (Body.to_input body) @@ -262,7 +268,8 @@ let excess (payload : t) : Amount.Signed.t = | Coinbase -> Amount.Signed.zero -let fee_excess ({body= {tag; amount; _}; common= {fee_token; fee; _}} : t) = +let fee_excess + ({ body = { tag; amount; _ }; common = { fee_token; fee; _ } } : t) = match tag with | Payment | Stake_delegation | Create_account | Mint_tokens -> (* For all user commands, the fee excess is just the fee. *) diff --git a/src/lib/mina_base/transaction_union_tag.ml b/src/lib/mina_base/transaction_union_tag.ml index 947df321cca..19faea026a8 100644 --- a/src/lib/mina_base/transaction_union_tag.ml +++ b/src/lib/mina_base/transaction_union_tag.ml @@ -1,12 +1,10 @@ (* transaction_union_tag.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -41,7 +39,7 @@ let to_string = function let gen = Quickcheck.Generator.map (Int.gen_incl min max) ~f:(fun i -> - Option.value_exn (of_enum i) ) + Option.value_exn (of_enum i)) module Bits = struct type t = bool * bool * bool [@@deriving equal] @@ -64,12 +62,11 @@ module Bits = struct let coinbase = of_t Coinbase - let to_bits (b1, b2, b3) = [b1; b2; b3] + let to_bits (b1, b2, b3) = [ b1; b2; b3 ] let to_input t = Random_oracle.Input.bitstring (to_bits t) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] type var = Boolean.var * Boolean.var * Boolean.var @@ -85,21 +82,21 @@ module Unpacked = struct (* Invariant: exactly one of the tag identifiers must be true. *) module Poly = struct type 'bool t = - { is_payment: 'bool - ; is_stake_delegation: 'bool - ; is_create_account: 'bool - ; is_mint_tokens: 'bool - ; is_fee_transfer: 'bool - ; is_coinbase: 'bool - ; is_user_command: 'bool } + { is_payment : 'bool + ; is_stake_delegation : 'bool + ; is_create_account : 'bool + ; is_mint_tokens : 'bool + ; is_fee_transfer : 'bool + ; is_coinbase : 'bool + ; is_user_command : 'bool + } [@@deriving equal, hlist] - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let typ (bool : ('bool_var, 'bool) Typ.t) : ('bool_var t, 'bool t) Typ.t = Typ.of_hlistable - [bool; bool; bool; bool; bool; bool; bool] + [ bool; bool; bool; bool; bool; bool; bool ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -110,27 +107,29 @@ module Unpacked = struct (* An invalid value with all types empty. Do not use directly. *) let empty : t = - { is_payment= false - ; is_stake_delegation= false - ; is_create_account= false - ; is_mint_tokens= false - ; is_fee_transfer= false - ; is_coinbase= false - ; is_user_command= false } + { is_payment = false + ; is_stake_delegation = false + ; is_create_account = false + ; is_mint_tokens = false + ; is_fee_transfer = false + ; is_coinbase = false + ; is_user_command = false + } - let payment = {empty with is_payment= true; is_user_command= true} + let payment = { empty with is_payment = true; is_user_command = true } let stake_delegation = - {empty with is_stake_delegation= true; is_user_command= true} + { empty with is_stake_delegation = true; is_user_command = true } let create_account = - {empty with is_create_account= true; is_user_command= true} + { empty with is_create_account = true; is_user_command = true } - let mint_tokens = {empty with is_mint_tokens= true; is_user_command= true} + let mint_tokens = { empty with is_mint_tokens = true; is_user_command = true } - let fee_transfer = {empty with is_fee_transfer= true; is_user_command= false} + let fee_transfer = + { empty with is_fee_transfer = true; is_user_command = false } - let coinbase = {empty with is_coinbase= true; is_user_command= false} + let coinbase = { empty with is_coinbase = true; is_user_command = false } let of_bits_t (bits : Bits.t) : t = match @@ -140,7 +139,8 @@ module Unpacked = struct ; (Bits.create_account, create_account) ; (Bits.mint_tokens, mint_tokens) ; (Bits.fee_transfer, fee_transfer) - ; (Bits.coinbase, coinbase) ] + ; (Bits.coinbase, coinbase) + ] bits with | Some t -> @@ -156,7 +156,8 @@ module Unpacked = struct ; (create_account, Bits.create_account) ; (mint_tokens, Bits.mint_tokens) ; (fee_transfer, Bits.fee_transfer) - ; (coinbase, Bits.coinbase) ] + ; (coinbase, Bits.coinbase) + ] t with | Some bits -> @@ -164,8 +165,7 @@ module Unpacked = struct | None -> raise (Invalid_argument "Transaction_union_tag.Unpacked.to_bits_t") - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] type var = Boolean.var Poly.t @@ -176,7 +176,8 @@ module Unpacked = struct ; is_mint_tokens ; is_fee_transfer ; is_coinbase - ; is_user_command= _ } : + ; is_user_command = _ + } : var) = (* For each bit, compute the sum of all the tags for which that bit is true in its bit representation. @@ -193,27 +194,28 @@ module Unpacked = struct ; (Bits.create_account, is_create_account) ; (Bits.mint_tokens, is_mint_tokens) ; (Bits.fee_transfer, is_fee_transfer) - ; (Bits.coinbase, is_coinbase) ] + ; (Bits.coinbase, is_coinbase) + ] ~f:(fun (acc1, acc2, acc3) ((bit1, bit2, bit3), bool_var) -> let add_if_true bit acc = if bit then Field.Var.add acc (bool_var :> Field.Var.t) else acc in - (add_if_true bit1 acc1, add_if_true bit2 acc2, add_if_true bit3 acc3) - ) + (add_if_true bit1 acc1, add_if_true bit2 acc2, add_if_true bit3 acc3)) in Boolean.Unsafe.(of_cvar b1, of_cvar b2, of_cvar b3) let typ : (var, t) Typ.t = let base_typ = Poly.typ Boolean.typ in { base_typ with - check= + check = (fun ( { is_payment ; is_stake_delegation ; is_create_account ; is_mint_tokens ; is_fee_transfer ; is_coinbase - ; is_user_command } as t ) -> + ; is_user_command + } as t ) -> let open Checked.Let_syntax in let%bind () = base_typ.check t in let%bind () = @@ -224,11 +226,13 @@ module Unpacked = struct ; is_create_account ; is_mint_tokens ; is_fee_transfer - ; is_coinbase ]) + ; is_coinbase + ]) in [%with_label "User command flag is correctly set"] (Boolean.Assert.exactly_one - [is_user_command; is_fee_transfer; is_coinbase]) ) } + [ is_user_command; is_fee_transfer; is_coinbase ])) + } let constant ({ is_payment @@ -237,30 +241,32 @@ module Unpacked = struct ; is_mint_tokens ; is_fee_transfer ; is_coinbase - ; is_user_command } : + ; is_user_command + } : t) : var = - { is_payment= Boolean.var_of_value is_payment - ; is_stake_delegation= Boolean.var_of_value is_stake_delegation - ; is_create_account= Boolean.var_of_value is_create_account - ; is_mint_tokens= Boolean.var_of_value is_mint_tokens - ; is_fee_transfer= Boolean.var_of_value is_fee_transfer - ; is_coinbase= Boolean.var_of_value is_coinbase - ; is_user_command= Boolean.var_of_value is_user_command } - - let is_payment ({is_payment; _} : var) = is_payment - - let is_stake_delegation ({is_stake_delegation; _} : var) = + { is_payment = Boolean.var_of_value is_payment + ; is_stake_delegation = Boolean.var_of_value is_stake_delegation + ; is_create_account = Boolean.var_of_value is_create_account + ; is_mint_tokens = Boolean.var_of_value is_mint_tokens + ; is_fee_transfer = Boolean.var_of_value is_fee_transfer + ; is_coinbase = Boolean.var_of_value is_coinbase + ; is_user_command = Boolean.var_of_value is_user_command + } + + let is_payment ({ is_payment; _ } : var) = is_payment + + let is_stake_delegation ({ is_stake_delegation; _ } : var) = is_stake_delegation - let is_create_account ({is_create_account; _} : var) = is_create_account + let is_create_account ({ is_create_account; _ } : var) = is_create_account - let is_mint_tokens ({is_mint_tokens; _} : var) = is_mint_tokens + let is_mint_tokens ({ is_mint_tokens; _ } : var) = is_mint_tokens - let is_fee_transfer ({is_fee_transfer; _} : var) = is_fee_transfer + let is_fee_transfer ({ is_fee_transfer; _ } : var) = is_fee_transfer - let is_coinbase ({is_coinbase; _} : var) = is_coinbase + let is_coinbase ({ is_coinbase; _ } : var) = is_coinbase - let is_user_command ({is_user_command; _} : var) = is_user_command + let is_user_command ({ is_user_command; _ } : var) = is_user_command let to_bits t = Bits.to_bits (to_bits_var t) @@ -287,8 +293,7 @@ let to_bits tag = Bits.to_bits (Unpacked.to_bits_t (unpacked_t_of_t tag)) let to_input tag = Random_oracle.Input.bitstring (to_bits tag) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let t_of_unpacked_t (unpacked : Unpacked.t) : t = match @@ -298,7 +303,8 @@ let t_of_unpacked_t (unpacked : Unpacked.t) : t = ; (Unpacked.create_account, Create_account) ; (Unpacked.mint_tokens, Mint_tokens) ; (Unpacked.fee_transfer, Fee_transfer) - ; (Unpacked.coinbase, Coinbase) ] + ; (Unpacked.coinbase, Coinbase) + ] unpacked with | Some t -> @@ -350,12 +356,12 @@ let%test_module "predicates" = let%test_unit "is_user_command" = test_predicate Unpacked.is_user_command - (one_of [Payment; Stake_delegation; Create_account; Mint_tokens]) + (one_of [ Payment; Stake_delegation; Create_account; Mint_tokens ]) let%test_unit "not_user_command" = test_predicate (fun x -> Boolean.not (Unpacked.is_user_command x)) - (one_of [Fee_transfer; Coinbase]) + (one_of [ Fee_transfer; Coinbase ]) let%test_unit "bit_representation" = for i = min to max do diff --git a/src/lib/mina_base/transaction_validator.ml b/src/lib/mina_base/transaction_validator.ml index 5d13c32a889..1f36865724b 100644 --- a/src/lib/mina_base/transaction_validator.ml +++ b/src/lib/mina_base/transaction_validator.ml @@ -2,9 +2,10 @@ open Base module Hashless_ledger = struct type t = - { base: Ledger.t - ; overlay: (Account.Identifier.t, Account.t) Hashtbl.t - ; mutable next_available_token: Token_id.t } + { base : Ledger.t + ; overlay : (Account.Identifier.t, Account.t) Hashtbl.t + ; mutable next_available_token : Token_id.t + } type location = Ours of Account.Identifier.t | Theirs of Ledger.Location.t @@ -16,15 +17,15 @@ module Hashless_ledger = struct | Ours key -> Hashtbl.find t.overlay key | Theirs loc -> ( - match Ledger.get t.base loc with - | Some a -> ( - match Hashtbl.find t.overlay (Account.identifier a) with + match Ledger.get t.base loc with + | Some a -> ( + match Hashtbl.find t.overlay (Account.identifier a) with + | None -> + Some a + | s -> + s ) | None -> - Some a - | s -> - s ) - | None -> - failwith (msg "get") ) + failwith (msg "get") ) let location_of_account t key = match Hashtbl.find t.overlay key with @@ -42,11 +43,11 @@ module Hashless_ledger = struct | Ours key -> Hashtbl.set t.overlay ~key ~data:acct | Theirs loc -> ( - match Ledger.get t.base loc with - | Some a -> - Hashtbl.set t.overlay ~key:(Account.identifier a) ~data:acct - | None -> - failwith (msg "set") ) + match Ledger.get t.base loc with + | Some a -> + Hashtbl.set t.overlay ~key:(Account.identifier a) ~data:acct + | None -> + failwith (msg "set") ) let get_or_create_account t key account = match location_of_account t key with @@ -72,16 +73,17 @@ module Hashless_ledger = struct let merkle_root _t = Ledger_hash.empty_hash let create l = - { base= l - ; overlay= Hashtbl.create (module Account_id) - ; next_available_token= Ledger.next_available_token l } + { base = l + ; overlay = Hashtbl.create (module Account_id) + ; next_available_token = Ledger.next_available_token l + } let with_ledger ~depth ~f = Ledger.with_ledger ~depth ~f:(fun l -> let t = create l in - f t ) + f t) - let next_available_token {next_available_token; _} = next_available_token + let next_available_token { next_available_token; _ } = next_available_token let set_next_available_token t tid = t.next_available_token <- tid end @@ -93,9 +95,8 @@ let create = Hashless_ledger.create let apply_user_command ~constraint_constants ~txn_global_slot l uc = Result.map ~f:(fun applied_txn -> - applied_txn.Transaction_applied.Signed_command_applied.common - .user_command - .status ) + applied_txn.Transaction_applied.Signed_command_applied.common.user_command + .status) (apply_user_command l ~constraint_constants ~txn_global_slot uc) let apply_transaction ~constraint_constants ~txn_state_view l txn = diff --git a/src/lib/mina_base/transaction_validator.mli b/src/lib/mina_base/transaction_validator.mli index 219ba1ef62e..a4481ba0224 100644 --- a/src/lib/mina_base/transaction_validator.mli +++ b/src/lib/mina_base/transaction_validator.mli @@ -32,7 +32,7 @@ module For_tests : sig account:Account.t -> txn_amount:Amount.t -> txn_global_slot:Global_slot.t - -> (Account.Timing.t * [> `Min_balance of Balance.t]) Or_error.t + -> (Account.Timing.t * [> `Min_balance of Balance.t ]) Or_error.t val validate_timing : account:Account.t diff --git a/src/lib/mina_base/user_command.ml b/src/lib/mina_base/user_command.ml index 7fb92a2faf5..8fc40c5677f 100644 --- a/src/lib/mina_base/user_command.ml +++ b/src/lib/mina_base/user_command.ml @@ -104,7 +104,7 @@ module Zero_one_or_two = struct [%%versioned module Stable = struct module V1 = struct - type 'a t = [`Zero | `One of 'a | `Two of 'a * 'a] + type 'a t = [ `Zero | `One of 'a | `Two of 'a * 'a ] [@@deriving sexp, compare, equal, hash, yojson] end end] @@ -138,9 +138,9 @@ let to_verifiable_exn (t : t) ~ledger ~get ~location_of_account = let of_list = function | [] -> `Zero - | [x] -> + | [ x ] -> `One x - | [x; y] -> + | [ x; y ] -> `Two (x, y) | _ -> failwith "of_list" @@ -152,11 +152,11 @@ let to_verifiable_exn (t : t) ~ledger ~get ~location_of_account = let pks = match c with | Proved_proved r -> - [r.one.data.body.pk; r.two.data.body.pk] + [ r.one.data.body.pk; r.two.data.body.pk ] | Proved_empty r -> - [r.one.data.body.pk] + [ r.one.data.body.pk ] | Proved_signed r -> - [r.one.data.body.pk] + [ r.one.data.body.pk ] | Signed_signed _ | Signed_empty _ -> [] in @@ -164,7 +164,7 @@ let to_verifiable_exn (t : t) ~ledger ~get ~location_of_account = let to_verifiable t ~ledger ~get ~location_of_account = Option.try_with (fun () -> - to_verifiable_exn t ~ledger ~get ~location_of_account ) + to_verifiable_exn t ~ledger ~get ~location_of_account) let fee_exn : t -> Currency.Fee.t = function | Signed_command x -> @@ -254,4 +254,4 @@ let filter_by_participant (commands : t list) public_key = ~f: (Fn.compose (Signature_lib.Public_key.Compressed.equal public_key) - Account_id.public_key) ) + Account_id.public_key)) diff --git a/src/lib/mina_base/with_status.ml b/src/lib/mina_base/with_status.ml index a9887e273c7..4d942ba47e0 100644 --- a/src/lib/mina_base/with_status.ml +++ b/src/lib/mina_base/with_status.ml @@ -3,15 +3,15 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type 'a t = {data: 'a; status: Transaction_status.Stable.V1.t} + type 'a t = { data : 'a; status : Transaction_status.Stable.V1.t } [@@deriving sexp, yojson, equal, compare, fields] end end] -let map ~f {data; status} = {data= f data; status} +let map ~f { data; status } = { data = f data; status } -let map_opt ~f {data; status} = - Option.map (f data) ~f:(fun data -> {data; status}) +let map_opt ~f { data; status } = + Option.map (f data) ~f:(fun data -> { data; status }) -let map_result ~f {data; status} = - Result.map (f data) ~f:(fun data -> {data; status}) +let map_result ~f { data; status } = + Result.map (f data) ~f:(fun data -> { data; status }) diff --git a/src/lib/mina_commands/mina_commands.ml b/src/lib/mina_commands/mina_commands.ml index d187bb57003..e2b57df7cb1 100644 --- a/src/lib/mina_commands/mina_commands.ml +++ b/src/lib/mina_commands/mina_commands.ml @@ -34,7 +34,7 @@ let get_keys_with_details t = List.map accounts ~f:(fun account -> ( string_of_public_key account , account.Account.Poly.balance |> Currency.Balance.to_int - , account.Account.Poly.nonce |> Account.Nonce.to_int ) ) + , account.Account.Poly.nonce |> Account.Nonce.to_int )) let get_nonce t (addr : Account_id.t) = let open Participating_state.Option.Let_syntax in @@ -68,7 +68,7 @@ let replace_block_production_keys keys pks = let%map kps = Mina_lib.wallets keys |> Secrets.Wallets.find_unlocked ~needle:pk in - (kps, pk) ) + (kps, pk)) in Mina_lib.replace_block_production_keypairs keys (Keypair.And_compressed_pk.Set.of_list kps) ; @@ -80,19 +80,19 @@ let setup_and_submit_user_command t (user_command_input : User_command_input.t) (* hack to get types to work out *) let%map () = return () in let open Deferred.Let_syntax in - let%map result = Mina_lib.add_transactions t [user_command_input] in + let%map result = Mina_lib.add_transactions t [ user_command_input ] in txn_count := !txn_count + 1 ; match result with - | Ok ([], [failed_txn]) -> + | Ok ([], [ failed_txn ]) -> Error (Error.of_string (sprintf !"%s" ( Network_pool.Transaction_pool.Resource_pool.Diff.Diff_error .to_yojson (snd failed_txn) |> Yojson.Safe.to_string ))) - | Ok ([Signed_command txn], []) -> + | Ok ([ Signed_command txn ], []) -> [%log' info (Mina_lib.top_level_logger t)] - ~metadata:[("command", User_command.to_yojson (Signed_command txn))] + ~metadata:[ ("command", User_command.to_yojson (Signed_command txn)) ] "Scheduled payment $command" ; Ok txn | Ok (valid_commands, invalid_commands) -> @@ -108,7 +108,8 @@ let setup_and_submit_user_command t (user_command_input : User_command_input.t) Network_pool.Transaction_pool.Resource_pool.Diff .Diff_error .to_yojson snd) - invalid_commands) ) ] + invalid_commands) ) + ] "Invalid result from scheduling a payment" ; Error (Error.of_string "Internal error while scheduling a payment") | Error e -> @@ -120,7 +121,7 @@ let setup_and_submit_user_commands t user_command_list = [%log' warn (Mina_lib.top_level_logger t)] "batch-send-payments does not yet report errors" ~metadata: - [("mina_command", `String "scheduling a batch of user transactions")] ; + [ ("mina_command", `String "scheduling a batch of user transactions") ] ; Mina_lib.add_transactions t user_command_list module Receipt_chain_verifier = Merkle_list_verifier.Make (struct @@ -166,8 +167,8 @@ let verify_payment t (addr : Account_id.t) (verifying_txn : User_command.t) (Receipt_chain_verifier.verify ~init:init_receipt proof resulting_receipt) ~error:(Error.createf "Merkle list proof of payment is invalid") in - if List.exists proof ~f:(fun txn -> User_command.equal verifying_txn txn) - then Ok () + if List.exists proof ~f:(fun txn -> User_command.equal verifying_txn txn) then + Ok () else Or_error.errorf !"Merkle list proof does not contain payment %{sexp:User_command.t}" @@ -176,12 +177,13 @@ let verify_payment t (addr : Account_id.t) (verifying_txn : User_command.t) let start_time = Time_ns.now () type active_state_fields = - { num_accounts: int option - ; blockchain_length: int option - ; ledger_merkle_root: string option - ; state_hash: string option - ; consensus_time_best_tip: Consensus.Data.Consensus_time.t option - ; global_slot_since_genesis_best_tip: int option } + { num_accounts : int option + ; blockchain_length : int option + ; ledger_merkle_root : string option + ; state_hash : string option + ; consensus_time_best_tip : Consensus.Data.Consensus_time.t option + ; global_slot_since_genesis_best_tip : int option + } let max_block_height = ref 1 @@ -236,34 +238,41 @@ let get_status ~flag t = | `Performance -> let rpc_timings = let open Daemon_rpcs.Types.Status.Rpc_timings in - { get_staged_ledger_aux= - { Rpc_pair.dispatch= r ~name:"rpc_dispatch_get_staged_ledger_aux" - ; impl= r ~name:"rpc_impl_get_staged_ledger_aux" } - ; answer_sync_ledger_query= - { Rpc_pair.dispatch= + { get_staged_ledger_aux = + { Rpc_pair.dispatch = r ~name:"rpc_dispatch_get_staged_ledger_aux" + ; impl = r ~name:"rpc_impl_get_staged_ledger_aux" + } + ; answer_sync_ledger_query = + { Rpc_pair.dispatch = r ~name:"rpc_dispatch_answer_sync_ledger_query" - ; impl= r ~name:"rpc_impl_answer_sync_ledger_query" } - ; get_ancestry= - { Rpc_pair.dispatch= r ~name:"rpc_dispatch_get_ancestry" - ; impl= r ~name:"rpc_impl_get_ancestry" } - ; get_transition_chain_proof= - { Rpc_pair.dispatch= + ; impl = r ~name:"rpc_impl_answer_sync_ledger_query" + } + ; get_ancestry = + { Rpc_pair.dispatch = r ~name:"rpc_dispatch_get_ancestry" + ; impl = r ~name:"rpc_impl_get_ancestry" + } + ; get_transition_chain_proof = + { Rpc_pair.dispatch = r ~name:"rpc_dispatch_get_transition_chain_proof" - ; impl= r ~name:"rpc_impl_get_transition_chain_proof" } - ; get_transition_chain= - { Rpc_pair.dispatch= r ~name:"rpc_dispatch_get_transition_chain" - ; impl= r ~name:"rpc_impl_get_transition_chain" } } + ; impl = r ~name:"rpc_impl_get_transition_chain_proof" + } + ; get_transition_chain = + { Rpc_pair.dispatch = r ~name:"rpc_dispatch_get_transition_chain" + ; impl = r ~name:"rpc_impl_get_transition_chain" + } + } in Some { Daemon_rpcs.Types.Status.Histograms.rpc_timings - ; external_transition_latency= r ~name:"external_transition_latency" - ; accepted_transition_local_latency= + ; external_transition_latency = r ~name:"external_transition_latency" + ; accepted_transition_local_latency = r ~name:"accepted_transition_local_latency" - ; accepted_transition_remote_latency= + ; accepted_transition_remote_latency = r ~name:"accepted_transition_remote_latency" - ; snark_worker_transition_time= + ; snark_worker_transition_time = r ~name:"snark_worker_transition_time" - ; snark_worker_merge_time= r ~name:"snark_worker_merge_time" } + ; snark_worker_merge_time = r ~name:"snark_worker_merge_time" + } | `None -> None in @@ -319,12 +328,13 @@ let get_status ~flag t = consensus_state in ( sync_status - , { num_accounts= Some num_accounts - ; blockchain_length= Some blockchain_length - ; ledger_merkle_root= Some ledger_merkle_root - ; state_hash= Some state_hash - ; consensus_time_best_tip= Some consensus_time_best_tip - ; global_slot_since_genesis_best_tip= Some global_slot_since_genesis } ) + , { num_accounts = Some num_accounts + ; blockchain_length = Some blockchain_length + ; ledger_merkle_root = Some ledger_merkle_root + ; state_hash = Some state_hash + ; consensus_time_best_tip = Some consensus_time_best_tip + ; global_slot_since_genesis_best_tip = Some global_slot_since_genesis + } ) in let ( sync_status , { num_accounts @@ -332,18 +342,20 @@ let get_status ~flag t = ; ledger_merkle_root ; state_hash ; consensus_time_best_tip - ; global_slot_since_genesis_best_tip } ) = + ; global_slot_since_genesis_best_tip + } ) = match active_status () with | `Active result -> result | `Bootstrapping -> ( `Bootstrap - , { num_accounts= None - ; blockchain_length= None - ; ledger_merkle_root= None - ; state_hash= None - ; consensus_time_best_tip= None - ; global_slot_since_genesis_best_tip= None } ) + , { num_accounts = None + ; blockchain_length = None + ; ledger_merkle_root = None + ; state_hash = None + ; consensus_time_best_tip = None + ; global_slot_since_genesis_best_tip = None + } ) in let next_block_production = Mina_lib.next_producer_timing t in let addrs_and_ports = @@ -364,15 +376,15 @@ let get_status ~flag t = ; sync_status ; catchup_status ; blockchain_length - ; highest_block_length_received= + ; highest_block_length_received = (*if this function is not called until after catchup max_block_height will be 1 and most_recent_valid_transition pipe might have the genesis block as the latest transition in which case return the best tip length*) max (Option.value ~default:1 blockchain_length) !max_block_height - ; highest_unvalidated_block_length_received= + ; highest_unvalidated_block_length_received = !Mina_metrics.Transition_frontier.max_unvalidated_blocklength_observed ; uptime_secs ; ledger_merkle_root ; state_hash - ; chain_id= config.chain_id + ; chain_id = config.chain_id ; consensus_time_best_tip ; global_slot_since_genesis_best_tip ; commit_id @@ -381,17 +393,18 @@ let get_status ~flag t = ; user_commands_sent ; snark_worker ; snark_work_fee - ; block_production_keys= + ; block_production_keys = Public_key.Compressed.Set.to_list block_production_keys |> List.map ~f:Public_key.Compressed.to_base58_check - ; coinbase_receiver= + ; coinbase_receiver = Option.map ~f:Public_key.Compressed.to_base58_check coinbase_receiver ; histograms ; next_block_production ; consensus_time_now ; consensus_mechanism ; consensus_configuration - ; addrs_and_ports } + ; addrs_and_ports + } let clear_hist_status ~flag t = Perf_histograms.wipe () ; get_status ~flag t diff --git a/src/lib/mina_compile_config/mina_compile_config.ml b/src/lib/mina_compile_config/mina_compile_config.ml index ebb9a0fd686..b673f9ed991 100644 --- a/src/lib/mina_compile_config/mina_compile_config.ml +++ b/src/lib/mina_compile_config/mina_compile_config.ml @@ -1,8 +1,6 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] module Currency = Currency_nonconsensus.Currency @@ -14,30 +12,23 @@ module Currency = Currency_nonconsensus.Currency tests and production. *) -[%%inject -"curve_size", curve_size] +[%%inject "curve_size", curve_size] -[%%inject -"genesis_ledger", genesis_ledger] +[%%inject "genesis_ledger", genesis_ledger] -[%%inject -"default_transaction_fee_string", default_transaction_fee] +[%%inject "default_transaction_fee_string", default_transaction_fee] -[%%inject -"default_snark_worker_fee_string", default_snark_worker_fee] +[%%inject "default_snark_worker_fee_string", default_snark_worker_fee] -[%%inject -"minimum_user_command_fee_string", minimum_user_command_fee] +[%%inject "minimum_user_command_fee_string", minimum_user_command_fee] -[%%ifndef -compaction_interval] +[%%ifndef compaction_interval] let compaction_interval_ms = None [%%else] -[%%inject -"compaction_interval", compaction_interval] +[%%inject "compaction_interval", compaction_interval] let compaction_interval_ms = Some compaction_interval @@ -52,8 +43,7 @@ let default_transaction_fee = let default_snark_worker_fee = Currency.Fee.of_formatted_string default_snark_worker_fee_string -[%%inject -"block_window_duration_ms", block_window_duration] +[%%inject "block_window_duration_ms", block_window_duration] let rpc_handshake_timeout_sec = 60.0 @@ -61,5 +51,4 @@ let rpc_heartbeat_timeout_sec = 60.0 let rpc_heartbeat_send_every_sec = 10.0 (*same as the default*) -[%%inject -"generate_genesis_proof", generate_genesis_proof] +[%%inject "generate_genesis_proof", generate_genesis_proof] diff --git a/src/lib/mina_graphql/mina_graphql.ml b/src/lib/mina_graphql/mina_graphql.ml index 4ace7d0765e..9b169518210 100644 --- a/src/lib/mina_graphql/mina_graphql.ml +++ b/src/lib/mina_graphql/mina_graphql.ml @@ -38,21 +38,21 @@ let result_of_or_error ?error v = | None -> str_error | Some error -> - sprintf "%s (%s)" error str_error ) + sprintf "%s (%s)" error str_error) let result_field_no_inputs ~resolve = Schema.io_field ~resolve:(fun resolve_info src -> - Deferred.return @@ resolve resolve_info src ) + Deferred.return @@ resolve resolve_info src) (* one input *) let result_field ~resolve = Schema.io_field ~resolve:(fun resolve_info src inputs -> - Deferred.return @@ resolve resolve_info src inputs ) + Deferred.return @@ resolve resolve_info src inputs) (* two inputs *) let result_field2 ~resolve = Schema.io_field ~resolve:(fun resolve_info src input1 input2 -> - Deferred.return @@ resolve resolve_info src input1 input2 ) + Deferred.return @@ resolve resolve_info src input1 input2) module Doc = struct let date ?(extra = "") s = @@ -71,7 +71,7 @@ module Reflection = struct let underToCamel s = Re2.replace_exn (Lazy.force regex) s ~f:(fun m -> let s = Re2.Match.get_exn ~sub:(`Index 1) m in - String.capitalize s ) + String.capitalize s) (** When Fields.folding, create graphql fields via reflection *) let reflect f ~typ acc x = @@ -125,7 +125,7 @@ module Reflection = struct | To_build_breadcrumb, _ -> "to_build_breadcrumb" | Root, _ -> - "root" )) ) + "root"))) ~typ:(list (non_null string)) a x @@ -166,7 +166,7 @@ module Types = struct (List.map Sync_status.all ~f:(fun status -> enum_value (String.map ~f:Char.uppercase @@ Sync_status.to_string status) - ~value:status )) + ~value:status)) let transaction_status : ('context, Transaction_inclusion_status.State.t option) typ = @@ -182,7 +182,8 @@ module Types = struct ; enum_value "UNKNOWN" ~value:Unknown ~doc: "The transaction has either been snarked, reached finality \ - through consensus or has been dropped" ] + through consensus or has been dropped" + ] let consensus_time = let module C = Consensus.Data.Consensus_time in @@ -196,21 +197,22 @@ module Types = struct ; field "globalSlot" ~typ:(non_null uint32) ~args:Arg.[] ~resolve:(fun _ (global_slot : Consensus.Data.Consensus_time.t) -> - C.to_uint32 global_slot ) + C.to_uint32 global_slot) ; field "startTime" ~typ:(non_null string) ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} global_slot -> + ~resolve:(fun { ctx = coda; _ } global_slot -> let constants = (Mina_lib.config coda).precomputed_values.consensus_constants in - Block_time.to_string @@ C.start_time ~constants global_slot ) + Block_time.to_string @@ C.start_time ~constants global_slot) ; field "endTime" ~typ:(non_null string) ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} global_slot -> + ~resolve:(fun { ctx = coda; _ } global_slot -> let constants = (Mina_lib.config coda).precomputed_values.consensus_constants in - Block_time.to_string @@ C.end_time ~constants global_slot ) ] ) + Block_time.to_string @@ C.end_time ~constants global_slot) + ]) let consensus_time_with_global_slot_since_genesis = obj "ConsensusTimeGlobalSlot" @@ -218,14 +220,15 @@ module Types = struct ~fields:(fun _ -> [ field "consensusTime" ~typ:(non_null consensus_time) ~doc: - "Time in terms of slot number in an epoch, start and end time \ - of the slot since UTC epoch" + "Time in terms of slot number in an epoch, start and end time of \ + the slot since UTC epoch" ~args:Arg.[] ~resolve:(fun _ (time, _) -> time) ; field "globalSlotSinceGenesis" ~args:Arg.[] ~typ:(non_null uint32) - ~resolve:(fun _ (_, slot) -> slot) ] ) + ~resolve:(fun _ (_, slot) -> slot) + ]) let block_producer_timing : (_, Daemon_rpcs.Types.Status.Next_producer_timing.t option) typ = @@ -239,8 +242,8 @@ module Types = struct ~doc:"Next block production time" ~args:Arg.[] ~resolve: - (fun {ctx= coda; _} - {Daemon_rpcs.Types.Status.Next_producer_timing.timing; _} -> + (fun { ctx = coda; _ } + { Daemon_rpcs.Types.Status.Next_producer_timing.timing; _ } -> let consensus_constants = (Mina_lib.config coda).precomputed_values.consensus_constants in @@ -248,40 +251,42 @@ module Types = struct | Daemon_rpcs.Types.Status.Next_producer_timing.Check_again _ -> [] | Produce info -> - [of_time info.time ~consensus_constants] + [ of_time info.time ~consensus_constants ] | Produce_now info -> - [of_time ~consensus_constants info.time] ) + [ of_time ~consensus_constants info.time ]) ; field "globalSlotSinceGenesis" ~typ:(non_null @@ list @@ non_null uint32) ~doc:"Next block production global-slot-since-genesis " ~args:Arg.[] ~resolve: - (fun _ {Daemon_rpcs.Types.Status.Next_producer_timing.timing; _} -> + (fun _ { Daemon_rpcs.Types.Status.Next_producer_timing.timing; _ } -> match timing with | Daemon_rpcs.Types.Status.Next_producer_timing.Check_again _ -> [] | Produce info -> - [info.for_slot.global_slot_since_genesis] + [ info.for_slot.global_slot_since_genesis ] | Produce_now info -> - [info.for_slot.global_slot_since_genesis] ) + [ info.for_slot.global_slot_since_genesis ]) ; field "generatedFromConsensusAt" ~typ:(non_null consensus_time_with_global_slot_since_genesis) ~doc: - "Consensus time of the block that was used to determine the \ - next block production time" + "Consensus time of the block that was used to determine the next \ + block production time" ~args:Arg.[] ~resolve: - (fun {ctx= coda; _} + (fun { ctx = coda; _ } { Daemon_rpcs.Types.Status.Next_producer_timing - .generated_from_consensus_at= - {slot; global_slot_since_genesis} - ; _ } -> + .generated_from_consensus_at = + { slot; global_slot_since_genesis } + ; _ + } -> let consensus_constants = (Mina_lib.config coda).precomputed_values.consensus_constants in ( Consensus.Data.Consensus_time.of_global_slot ~constants:consensus_constants slot - , global_slot_since_genesis ) ) ] ) + , global_slot_since_genesis )) + ]) module DaemonStatus = struct type t = Daemon_rpcs.Types.Status.t @@ -291,12 +296,12 @@ module Types = struct [ field "start" ~typ:(non_null string) ~args:Arg.[] ~resolve:(fun _ (start, _) -> - Time.Span.to_ms start |> Int64.of_float |> Int64.to_string ) + Time.Span.to_ms start |> Int64.of_float |> Int64.to_string) ; field "stop" ~typ:(non_null string) ~args:Arg.[] ~resolve:(fun _ (_, end_) -> - Time.Span.to_ms end_ |> Int64.of_float |> Int64.to_string ) ] - ) + Time.Span.to_ms end_ |> Int64.of_float |> Int64.to_string) + ]) let histogram : (_, Perf_histograms.Report.t option) typ = obj "Histogram" ~fields:(fun _ -> @@ -305,7 +310,7 @@ module Types = struct @@ Perf_histograms.Report.Fields.fold ~init:[] ~values:(id ~typ:Schema.(non_null (list (non_null int)))) ~intervals:(id ~typ:(non_null (list (non_null interval)))) - ~underflow:nn_int ~overflow:nn_int ) + ~underflow:nn_int ~overflow:nn_int) module Rpc_timings = Daemon_rpcs.Types.Status.Rpc_timings module Rpc_pair = Rpc_timings.Rpc_pair @@ -313,7 +318,7 @@ module Types = struct let rpc_pair : (_, Perf_histograms.Report.t option Rpc_pair.t option) typ = let h = Reflection.Shorthand.id ~typ:histogram in obj "RpcPair" ~fields:(fun _ -> - List.rev @@ Rpc_pair.Fields.fold ~init:[] ~dispatch:h ~impl:h ) + List.rev @@ Rpc_pair.Fields.fold ~init:[] ~dispatch:h ~impl:h) let rpc_timings : (_, Rpc_timings.t option) typ = let fd = Reflection.Shorthand.id ~typ:(non_null rpc_pair) in @@ -321,7 +326,7 @@ module Types = struct List.rev @@ Rpc_timings.Fields.fold ~init:[] ~get_staged_ledger_aux:fd ~answer_sync_ledger_query:fd ~get_ancestry:fd - ~get_transition_chain_proof:fd ~get_transition_chain:fd ) + ~get_transition_chain_proof:fd ~get_transition_chain:fd) module Histograms = Daemon_rpcs.Types.Status.Histograms @@ -335,7 +340,7 @@ module Types = struct ~external_transition_latency:h ~accepted_transition_local_latency:h ~accepted_transition_remote_latency:h - ~snark_worker_transition_time:h ~snark_worker_merge_time:h ) + ~snark_worker_transition_time:h ~snark_worker_merge_time:h) let consensus_configuration : (_, Consensus.Configuration.t option) typ = obj "ConsensusConfiguration" ~fields:(fun _ -> @@ -344,14 +349,14 @@ module Types = struct @@ Consensus.Configuration.Fields.fold ~init:[] ~delta:nn_int ~k:nn_int ~slots_per_epoch:nn_int ~slot_duration:nn_int ~epoch_duration:nn_int ~acceptable_network_delay:nn_int - ~genesis_state_timestamp:nn_time ) + ~genesis_state_timestamp:nn_time) let peer : (_, Network_peer.Peer.Display.t option) typ = obj "Peer" ~fields:(fun _ -> let open Reflection.Shorthand in List.rev @@ Network_peer.Peer.Display.Fields.fold ~init:[] ~host:nn_string - ~libp2p_port:nn_int ~peer_id:nn_string ) + ~libp2p_port:nn_int ~peer_id:nn_string) let addrs_and_ports : (_, Node_addrs_and_ports.Display.t option) typ = obj "AddrsAndPorts" ~fields:(fun _ -> @@ -359,7 +364,7 @@ module Types = struct List.rev @@ Node_addrs_and_ports.Display.Fields.fold ~init:[] ~external_ip:nn_string ~bind_ip:nn_string ~client_port:nn_int - ~libp2p_port:nn_int ~peer:(id ~typ:peer) ) + ~libp2p_port:nn_int ~peer:(id ~typ:peer)) let t : (_, Daemon_rpcs.Types.Status.t option) typ = obj "DaemonStatus" ~fields:(fun _ -> @@ -387,7 +392,7 @@ module Types = struct ~consensus_configuration: (id ~typ:(non_null consensus_configuration)) ~highest_block_length_received:nn_int - ~highest_unvalidated_block_length_received:nn_int ) + ~highest_unvalidated_block_length_received:nn_int) end let fee_transfer = @@ -396,26 +401,27 @@ module Types = struct ~args:Arg.[] ~doc:"Public key of fee transfer recipient" ~typ:(non_null public_key) - ~resolve:(fun _ ({Fee_transfer.receiver_pk= pk; _}, _) -> pk) + ~resolve:(fun _ ({ Fee_transfer.receiver_pk = pk; _ }, _) -> pk) ; field "fee" ~typ:(non_null uint64) ~args:Arg.[] ~doc:"Amount that the recipient is paid in this fee transfer" - ~resolve:(fun _ ({Fee_transfer.fee; _}, _) -> - Currency.Fee.to_uint64 fee ) + ~resolve:(fun _ ({ Fee_transfer.fee; _ }, _) -> + Currency.Fee.to_uint64 fee) ; field "type" ~typ:(non_null string) ~args:Arg.[] ~doc: "Fee_transfer|Fee_transfer_via_coinbase Snark worker fees \ deducted from the coinbase amount are of type \ - 'Fee_transfer_via_coinbase', rest are deducted from \ - transaction fees" + 'Fee_transfer_via_coinbase', rest are deducted from transaction \ + fees" ~resolve:(fun _ (_, transfer_type) -> match transfer_type with | Filtered_external_transition.Fee_transfer_type .Fee_transfer_via_coinbase -> "Fee_transfer_via_coinbase" | Fee_transfer -> - "Fee_transfer" ) ] ) + "Fee_transfer") + ]) let account_timing : (Mina_lib.t, Account_timing.t option) typ = obj "AccountTiming" ~fields:(fun _ -> @@ -427,8 +433,7 @@ module Types = struct | Account_timing.Untimed -> None | Timed timing_info -> - Some (Balance.to_uint64 timing_info.initial_minimum_balance) - ) + Some (Balance.to_uint64 timing_info.initial_minimum_balance)) ; field "cliff_time" ~typ:uint32 ~doc:"The cliff time for a time-locked account" ~args:Arg.[] @@ -437,7 +442,7 @@ module Types = struct | Account_timing.Untimed -> None | Timed timing_info -> - Some timing_info.cliff_time ) + Some timing_info.cliff_time) ; field "cliff_amount" ~typ:uint64 ~doc:"The cliff amount for a time-locked account" ~args:Arg.[] @@ -446,7 +451,7 @@ module Types = struct | Account_timing.Untimed -> None | Timed timing_info -> - Some (Currency.Amount.to_uint64 timing_info.cliff_amount) ) + Some (Currency.Amount.to_uint64 timing_info.cliff_amount)) ; field "vesting_period" ~typ:uint32 ~doc:"The vesting period for a time-locked account" ~args:Arg.[] @@ -455,7 +460,7 @@ module Types = struct | Account_timing.Untimed -> None | Timed timing_info -> - Some timing_info.vesting_period ) + Some timing_info.vesting_period) ; field "vesting_increment" ~typ:uint64 ~doc:"The vesting increment for a time-locked account" ~args:Arg.[] @@ -464,31 +469,31 @@ module Types = struct | Account_timing.Untimed -> None | Timed timing_info -> - Some - (Currency.Amount.to_uint64 timing_info.vesting_increment) - ) ] ) + Some (Currency.Amount.to_uint64 timing_info.vesting_increment)) + ]) let completed_work = obj "CompletedWork" ~doc:"Completed snark works" ~fields:(fun _ -> [ field "prover" ~args:Arg.[] ~doc:"Public key of the prover" ~typ:(non_null public_key) - ~resolve:(fun _ {Transaction_snark_work.Info.prover; _} -> prover) + ~resolve:(fun _ { Transaction_snark_work.Info.prover; _ } -> prover) ; field "fee" ~typ:(non_null uint64) ~args:Arg.[] ~doc:"Amount the prover is paid for the snark work" - ~resolve:(fun _ {Transaction_snark_work.Info.fee; _} -> - Currency.Fee.to_uint64 fee ) + ~resolve:(fun _ { Transaction_snark_work.Info.fee; _ } -> + Currency.Fee.to_uint64 fee) ; field "workIds" ~doc:"Unique identifier for the snark work purchased" ~typ:(non_null @@ list @@ non_null int) ~args:Arg.[] - ~resolve:(fun _ {Transaction_snark_work.Info.work_ids; _} -> - One_or_two.to_list work_ids ) ] ) + ~resolve:(fun _ { Transaction_snark_work.Info.work_ids; _ } -> + One_or_two.to_list work_ids) + ]) let sign = enum "sign" ~values: - [enum_value "PLUS" ~value:Sgn.Pos; enum_value "MINUS" ~value:Sgn.Neg] + [ enum_value "PLUS" ~value:Sgn.Pos; enum_value "MINUS" ~value:Sgn.Neg ] let signed_fee = obj "SignedFee" ~doc:"Signed fee" ~fields:(fun _ -> @@ -498,7 +503,8 @@ module Types = struct ; field "feeMagnitude" ~typ:(non_null uint64) ~doc:"Fee" ~args:Arg.[] ~resolve:(fun _ fee -> - Currency.Amount.(to_uint64 (Signed.magnitude fee)) ) ] ) + Currency.Amount.(to_uint64 (Signed.magnitude fee))) + ]) let work_statement = obj "WorkDescription" @@ -508,13 +514,13 @@ module Types = struct [ field "sourceLedgerHash" ~typ:(non_null string) ~doc:"Base58Check-encoded hash of the source ledger" ~args:Arg.[] - ~resolve:(fun _ {Transaction_snark.Statement.source; _} -> - Frozen_ledger_hash.to_string source ) + ~resolve:(fun _ { Transaction_snark.Statement.source; _ } -> + Frozen_ledger_hash.to_string source) ; field "targetLedgerHash" ~typ:(non_null string) ~doc:"Base58Check-encoded hash of the target ledger" ~args:Arg.[] - ~resolve:(fun _ {Transaction_snark.Statement.target; _} -> - Frozen_ledger_hash.to_string target ) + ~resolve:(fun _ { Transaction_snark.Statement.target; _ } -> + Frozen_ledger_hash.to_string target) ; field "feeExcess" ~typ:(non_null signed_fee) ~doc: "Total transaction fee that is not accounted for in the \ @@ -522,21 +528,23 @@ module Types = struct ~args:Arg.[] ~resolve: (fun _ - ({fee_excess= {fee_excess_l; _}; _} : + ({ fee_excess = { fee_excess_l; _ }; _ } : Transaction_snark.Statement.t) -> (* TODO: Expose full fee excess data. *) { fee_excess_l with - magnitude= Currency.Amount.of_fee fee_excess_l.magnitude } ) + magnitude = Currency.Amount.of_fee fee_excess_l.magnitude + }) ; field "supplyIncrease" ~typ:(non_null uint64) ~doc:"Increase in total coinbase reward " ~args:Arg.[] ~resolve: - (fun _ ({supply_increase; _} : Transaction_snark.Statement.t) -> - Currency.Amount.to_uint64 supply_increase ) + (fun _ ({ supply_increase; _ } : Transaction_snark.Statement.t) -> + Currency.Amount.to_uint64 supply_increase) ; field "workId" ~doc:"Unique identifier for a snark work" ~typ:(non_null int) ~args:Arg.[] - ~resolve:(fun _ w -> Transaction_snark.Statement.hash w) ] ) + ~resolve:(fun _ w -> Transaction_snark.Statement.hash w) + ]) let pending_work = obj "PendingSnarkWork" @@ -546,7 +554,8 @@ module Types = struct ~args:Arg.[] ~doc:"Work bundle with one or two snark work" ~typ:(non_null @@ list @@ non_null work_statement) - ~resolve:(fun _ w -> One_or_two.to_list w) ] ) + ~resolve:(fun _ w -> One_or_two.to_list w) + ]) let blockchain_state : ( 'context @@ -560,7 +569,7 @@ module Types = struct let timestamp = Mina_state.Blockchain_state.timestamp blockchain_state in - Block_time.to_string timestamp ) + Block_time.to_string timestamp) ; field "utcDate" ~typ:(non_null string) ~doc: (Doc.date @@ -569,24 +578,23 @@ module Types = struct time instead of genesis time." "utcDate") ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} t -> + ~resolve:(fun { ctx = coda; _ } t -> let blockchain_state, _ = t in let timestamp = Mina_state.Blockchain_state.timestamp blockchain_state in Block_time.to_string_system_time (Mina_lib.time_controller coda) - timestamp ) + timestamp) ; field "snarkedLedgerHash" ~typ:(non_null string) ~doc:"Base58Check-encoded hash of the snarked ledger" ~args:Arg.[] ~resolve:(fun _ t -> let blockchain_state, _ = t in let snarked_ledger_hash = - Mina_state.Blockchain_state.snarked_ledger_hash - blockchain_state + Mina_state.Blockchain_state.snarked_ledger_hash blockchain_state in - Frozen_ledger_hash.to_string snarked_ledger_hash ) + Frozen_ledger_hash.to_string snarked_ledger_hash) ; field "stagedLedgerHash" ~typ:(non_null string) ~doc:"Base58Check-encoded hash of the staged ledger" ~args:Arg.[] @@ -596,15 +604,14 @@ module Types = struct Mina_state.Blockchain_state.staged_ledger_hash blockchain_state in Mina_base.Ledger_hash.to_string - @@ Staged_ledger_hash.ledger_hash staged_ledger_hash ) + @@ Staged_ledger_hash.ledger_hash staged_ledger_hash) ; field "stagedLedgerProofEmitted" ~typ:bool ~doc: - "Block finished a staged ledger, and a proof was emitted from \ - it and included into this block's proof. If there is no \ - transition frontier available or no block found, this will \ - return null." + "Block finished a staged ledger, and a proof was emitted from it \ + and included into this block's proof. If there is no transition \ + frontier available or no block found, this will return null." ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} t -> + ~resolve:(fun { ctx = coda; _ } t -> let open Option.Let_syntax in let _, hash = t in let%bind frontier = @@ -615,8 +622,8 @@ module Types = struct | None -> None | Some b -> - Some (Transition_frontier.Breadcrumb.just_emitted_a_proof b) - ) ] ) + Some (Transition_frontier.Breadcrumb.just_emitted_a_proof b)) + ]) let protocol_state : ( 'context @@ -630,14 +637,14 @@ module Types = struct ~args:Arg.[] ~resolve:(fun _ t -> let protocol_state, _ = t in - State_hash.to_base58_check protocol_state.previous_state_hash ) + State_hash.to_base58_check protocol_state.previous_state_hash) ; field "blockchainState" ~doc:"State which is agnostic of a particular consensus algorithm" ~typ:(non_null blockchain_state) ~args:Arg.[] ~resolve:(fun _ t -> let protocol_state, state_hash = t in - (protocol_state.blockchain_state, state_hash) ) + (protocol_state.blockchain_state, state_hash)) ; field "consensusState" ~doc: "State specific to the Codaboros Proof of Stake consensus \ @@ -646,36 +653,39 @@ module Types = struct ~args:Arg.[] ~resolve:(fun _ t -> let protocol_state, _ = t in - protocol_state.consensus_state ) ] ) + protocol_state.consensus_state) + ]) - let chain_reorganization_status : ('contxt, [`Changed] option) typ = + let chain_reorganization_status : ('contxt, [ `Changed ] option) typ = enum "ChainReorganizationStatus" ~doc:"Status for whenever the blockchain is reorganized" - ~values:[enum_value "CHANGED" ~value:`Changed] + ~values:[ enum_value "CHANGED" ~value:`Changed ] let genesis_constants = obj "GenesisConstants" ~fields:(fun _ -> [ field "accountCreationFee" ~typ:(non_null uint64) ~doc:"The fee charged to create a new account" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} () -> + ~resolve:(fun { ctx = coda; _ } () -> (Mina_lib.config coda).precomputed_values.constraint_constants - .account_creation_fee |> Currency.Fee.to_uint64 ) + .account_creation_fee |> Currency.Fee.to_uint64) ; field "coinbase" ~typ:(non_null uint64) ~doc: "The amount received as a coinbase reward for producing a block" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} () -> + ~resolve:(fun { ctx = coda; _ } () -> (Mina_lib.config coda).precomputed_values.constraint_constants - .coinbase_amount |> Currency.Amount.to_uint64 ) ] ) + .coinbase_amount |> Currency.Amount.to_uint64) + ]) module AccountObj = struct module AnnotatedBalance = struct type t = - { total: Balance.t - ; unknown: Balance.t - ; timing: Mina_base.Account_timing.t - ; breadcrumb: Transition_frontier.Breadcrumb.t option } + { total : Balance.t + ; unknown : Balance.t + ; timing : Mina_base.Account_timing.t + ; breadcrumb : Transition_frontier.Breadcrumb.t option + } let min_balance (b : t) = match (b.timing, b.breadcrumb) with @@ -727,11 +737,10 @@ module Types = struct let total_balance : uint64 = Balance.to_uint64 b.total in let min_balance_uint64 = Balance.to_uint64 min_balance in if - Unsigned.UInt64.compare total_balance - min_balance_uint64 + Unsigned.UInt64.compare total_balance min_balance_uint64 > 0 then Unsigned.UInt64.sub total_balance min_balance_uint64 - else Unsigned.UInt64.zero ) ) + else Unsigned.UInt64.zero)) ; field "locked" ~typ:uint64 ~doc: "The amount of mina owned by the account which is currently \ @@ -739,7 +748,7 @@ module Types = struct ~deprecated:(Deprecated None) ~args:Arg.[] ~resolve:(fun _ (b : t) -> - Option.map (min_balance b) ~f:Balance.to_uint64 ) + Option.map (min_balance b) ~f:Balance.to_uint64) ; field "blockHeight" ~typ:(non_null uint32) ~doc:"Block height at which balance was measured" ~args:Arg.[] @@ -748,21 +757,20 @@ module Types = struct | None -> Unsigned.UInt32.zero | Some crumb -> - Transition_frontier.Breadcrumb.blockchain_length crumb ) + Transition_frontier.Breadcrumb.blockchain_length crumb) (* TODO: Mutually recurse with "block" instead -- #5396 *) ; field "stateHash" ~typ:string ~doc: - "Hash of block at which balance was measured. Can be null \ - if bootstrapping. Guaranteed to be non-null for direct \ - account lookup queries when not bootstrapping. Can also be \ - null when accessed as nested properties (eg. via \ - delegators). " + "Hash of block at which balance was measured. Can be null if \ + bootstrapping. Guaranteed to be non-null for direct account \ + lookup queries when not bootstrapping. Can also be null \ + when accessed as nested properties (eg. via delegators). " ~args:Arg.[] ~resolve:(fun _ (b : t) -> Option.map b.breadcrumb ~f:(fun crumb -> State_hash.to_base58_check - @@ Transition_frontier.Breadcrumb.state_hash crumb ) ) ] - ) + @@ Transition_frontier.Breadcrumb.state_hash crumb)) + ]) end module Partial_account = struct @@ -777,7 +785,8 @@ module Types = struct ; voting_for ; timing ; permissions - ; snapp } = + ; snapp + } = let open Option.Let_syntax in let%bind public_key = public_key in let%bind token_permissions = token_permissions in @@ -798,7 +807,8 @@ module Types = struct ; voting_for ; timing ; permissions - ; snapp } + ; snapp + } let of_full_account ?breadcrumb { Account.Poly.public_key @@ -811,22 +821,25 @@ module Types = struct ; voting_for ; timing ; permissions - ; snapp } = + ; snapp + } = { Account.Poly.public_key ; token_id - ; token_permissions= Some token_permissions - ; nonce= Some nonce - ; balance= - { AnnotatedBalance.total= balance - ; unknown= balance + ; token_permissions = Some token_permissions + ; nonce = Some nonce + ; balance = + { AnnotatedBalance.total = balance + ; unknown = balance ; timing - ; breadcrumb } - ; receipt_chain_hash= Some receipt_chain_hash + ; breadcrumb + } + ; receipt_chain_hash = Some receipt_chain_hash ; delegate - ; voting_for= Some voting_for + ; voting_for = Some voting_for ; timing - ; permissions= Some permissions - ; snapp } + ; permissions = Some permissions + ; snapp + } let of_account_id coda account_id = let account = @@ -838,35 +851,37 @@ module Types = struct in Ledger.location_of_account ledger account_id |> Option.bind ~f:(Ledger.get ledger) - |> Option.map ~f:(fun account -> (account, tip)) ) + |> Option.map ~f:(fun account -> (account, tip))) in match account with | Some (account, breadcrumb) -> of_full_account ~breadcrumb account | None -> Account. - { Poly.public_key= Account_id.public_key account_id - ; token_id= Account_id.token_id account_id - ; token_permissions= None - ; nonce= None - ; delegate= None - ; balance= - { AnnotatedBalance.total= Balance.zero - ; unknown= Balance.zero - ; timing= Timing.Untimed - ; breadcrumb= None } - ; receipt_chain_hash= None - ; voting_for= None - ; timing= Timing.Untimed - ; permissions= None - ; snapp= None } + { Poly.public_key = Account_id.public_key account_id + ; token_id = Account_id.token_id account_id + ; token_permissions = None + ; nonce = None + ; delegate = None + ; balance = + { AnnotatedBalance.total = Balance.zero + ; unknown = Balance.zero + ; timing = Timing.Untimed + ; breadcrumb = None + } + ; receipt_chain_hash = None + ; voting_for = None + ; timing = Timing.Untimed + ; permissions = None + ; snapp = None + } let of_pk coda pk = of_account_id coda (Account_id.create pk Token_id.default) end type t = - { account: + { account : ( Public_key.Compressed.t , Token_id.t , Token_permissions.t option @@ -879,31 +894,33 @@ module Types = struct , Permissions.t option , Snapp_account.t option ) Account.Poly.t - ; locked: bool option - ; is_actively_staking: bool - ; path: string - ; index: Account.Index.t option } + ; locked : bool option + ; is_actively_staking : bool + ; path : string + ; index : Account.Index.t option + } let lift coda pk account = let block_production_pubkeys = Mina_lib.block_production_pubkeys coda in let accounts = Mina_lib.wallets coda in let best_tip_ledger = Mina_lib.best_ledger coda in { account - ; locked= Secrets.Wallets.check_locked accounts ~needle:pk - ; is_actively_staking= + ; locked = Secrets.Wallets.check_locked accounts ~needle:pk + ; is_actively_staking = ( if Token_id.(equal default) account.token_id then Public_key.Compressed.Set.mem block_production_pubkeys pk else (* Non-default token accounts cannot stake. *) false ) - ; path= Secrets.Wallets.get_path accounts pk - ; index= + ; path = Secrets.Wallets.get_path accounts pk + ; index = ( match best_tip_ledger with | `Active ledger -> Option.try_with (fun () -> Ledger.index_of_account_exn ledger - (Account_id.create account.public_key account.token_id) ) + (Account_id.create account.public_key account.token_id)) | _ -> - None ) } + None ) + } let get_best_ledger_account coda aid = lift coda @@ -913,7 +930,7 @@ module Types = struct let get_best_ledger_account_pk coda pk = lift coda pk (Partial_account.of_pk coda pk) - let account_id {Account.Poly.public_key; token_id; _} = + let account_id { Account.Poly.public_key; token_id; _ } = Account_id.create public_key token_id let rec account = @@ -923,36 +940,37 @@ module Types = struct [ field "publicKey" ~typ:(non_null public_key) ~doc:"The public identity of the account" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> - account.Account.Poly.public_key ) + ~resolve:(fun _ { account; _ } -> + account.Account.Poly.public_key) ; field "token" ~typ:(non_null token_id) ~doc:"The token associated with this account" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> account.Account.Poly.token_id) + ~resolve:(fun _ { account; _ } -> + account.Account.Poly.token_id) ; field "timing" ~typ:(non_null account_timing) ~doc:"The timing associated with this account" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> account.Account.Poly.timing) + ~resolve:(fun _ { account; _ } -> account.Account.Poly.timing) ; field "balance" ~typ:(non_null AnnotatedBalance.obj) ~doc:"The amount of mina owned by the account" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> account.Account.Poly.balance) + ~resolve:(fun _ { account; _ } -> account.Account.Poly.balance) ; field "nonce" ~typ:string ~doc: "A natural number that increases with each transaction \ (stringified uint32)" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> + ~resolve:(fun _ { account; _ } -> Option.map ~f:Account.Nonce.to_string - account.Account.Poly.nonce ) + account.Account.Poly.nonce) ; field "inferredNonce" ~typ:string ~doc: "Like the `nonce` field, except it includes the scheduled \ transactions (transactions not yet included in a block) \ (stringified uint32)" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {account; _} -> + ~resolve:(fun { ctx = coda; _ } { account; _ } -> let account_id = account_id account in match Mina_lib @@ -962,56 +980,57 @@ module Types = struct | `Active (Some nonce) -> Some (Account.Nonce.to_string nonce) | `Active None | `Bootstrapping -> - None ) + None) ; field "epochDelegateAccount" ~typ:(Lazy.force account) ~doc: "The account that you delegated on the staking ledger of \ the current block's epoch" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {account; _} -> + ~resolve:(fun { ctx = coda; _ } { account; _ } -> let open Option.Let_syntax in let account_id = account_id account in match%bind Mina_lib.staking_ledger coda with | Genesis_epoch_ledger staking_ledger -> ( - match - let open Option.Let_syntax in - account_id - |> Ledger.location_of_account staking_ledger - >>= Ledger.get staking_ledger - with - | Some delegate_account -> + match + let open Option.Let_syntax in + account_id + |> Ledger.location_of_account staking_ledger + >>= Ledger.get staking_ledger + with + | Some delegate_account -> + let delegate_key = delegate_account.public_key in + Some (get_best_ledger_account_pk coda delegate_key) + | None -> + [%log' warn (Mina_lib.top_level_logger coda)] + "Could not retrieve delegate account from the \ + genesis ledger. The account was not present in \ + the ledger." ; + None ) + | Ledger_db staking_ledger -> ( + try + let index = + Mina_base.Ledger.Db.index_of_account_exn + staking_ledger account_id + in + let delegate_account = + Mina_base.Ledger.Db.get_at_index_exn staking_ledger + index + in let delegate_key = delegate_account.public_key in Some (get_best_ledger_account_pk coda delegate_key) - | None -> + with e -> [%log' warn (Mina_lib.top_level_logger coda)] - "Could not retrieve delegate account from the \ - genesis ledger. The account was not present in \ - the ledger." ; - None ) - | Ledger_db staking_ledger -> ( - try - let index = - Mina_base.Ledger.Db.index_of_account_exn - staking_ledger account_id - in - let delegate_account = - Mina_base.Ledger.Db.get_at_index_exn staking_ledger - index - in - let delegate_key = delegate_account.public_key in - Some (get_best_ledger_account_pk coda delegate_key) - with e -> - [%log' warn (Mina_lib.top_level_logger coda)] - ~metadata:[("error", `String (Exn.to_string e))] - "Could not retrieve delegate account from sparse \ - ledger. The account may not be in the ledger: $error" ; - None ) ) + ~metadata:[ ("error", `String (Exn.to_string e)) ] + "Could not retrieve delegate account from sparse \ + ledger. The account may not be in the ledger: \ + $error" ; + None )) ; field "receiptChainHash" ~typ:string ~doc:"Top hash of the receipt chain merkle-list" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> + ~resolve:(fun _ { account; _ } -> Option.map ~f:Receipt.Chain_hash.to_string - account.Account.Poly.receipt_chain_hash ) + account.Account.Poly.receipt_chain_hash) ; field "delegate" ~typ:public_key ~doc: "The public key to which you are delegating - if you are \ @@ -1019,16 +1038,17 @@ module Types = struct key" ~args:Arg.[] ~deprecated:(Deprecated (Some "use delegateAccount instead")) - ~resolve:(fun _ {account; _} -> account.Account.Poly.delegate) + ~resolve:(fun _ { account; _ } -> + account.Account.Poly.delegate) ; field "delegateAccount" ~typ:(Lazy.force account) ~doc: "The account to which you are delegating - if you are not \ delegating to anybody, this would return your public key" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {account; _} -> + ~resolve:(fun { ctx = coda; _ } { account; _ } -> Option.map ~f:(get_best_ledger_account_pk coda) - account.Account.Poly.delegate ) + account.Account.Poly.delegate) ; field "delegators" ~typ:(list @@ non_null @@ Lazy.force account) ~doc: @@ -1036,7 +1056,7 @@ module Types = struct that the info is recorded in the last epoch so it might \ not be up to date with the current account status)" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {account; _} -> + ~resolve:(fun { ctx = coda; _ } { account; _ } -> let open Option.Let_syntax in let pk = account.Account.Poly.public_key in let%map delegators = @@ -1045,28 +1065,29 @@ module Types = struct let best_tip_ledger = Mina_lib.best_ledger coda in List.map ~f:(fun a -> - { account= Partial_account.of_full_account a - ; locked= None - ; is_actively_staking= true - ; path= "" - ; index= + { account = Partial_account.of_full_account a + ; locked = None + ; is_actively_staking = true + ; path = "" + ; index = ( match best_tip_ledger with | `Active ledger -> Option.try_with (fun () -> Ledger.index_of_account_exn ledger - (Account.identifier a) ) + (Account.identifier a)) | _ -> - None ) } ) - delegators ) + None ) + }) + delegators) ; field "lastEpochDelegators" ~typ:(list @@ non_null @@ Lazy.force account) ~doc: "The list of accounts which are delegating to you in the \ last epoch (note that the info is recorded in the one \ - before last epoch epoch so it might not be up to date \ - with the current account status)" + before last epoch epoch so it might not be up to date with \ + the current account status)" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {account; _} -> + ~resolve:(fun { ctx = coda; _ } { account; _ } -> let open Option.Let_syntax in let pk = account.Account.Poly.public_key in let%map delegators = @@ -1075,72 +1096,74 @@ module Types = struct let best_tip_ledger = Mina_lib.best_ledger coda in List.map ~f:(fun a -> - { account= Partial_account.of_full_account a - ; locked= None - ; is_actively_staking= true - ; path= "" - ; index= + { account = Partial_account.of_full_account a + ; locked = None + ; is_actively_staking = true + ; path = "" + ; index = ( match best_tip_ledger with | `Active ledger -> Option.try_with (fun () -> Ledger.index_of_account_exn ledger - (Account.identifier a) ) + (Account.identifier a)) | _ -> - None ) } ) - delegators ) + None ) + }) + delegators) ; field "votingFor" ~typ:string ~doc: "The previous epoch lock hash of the chain which you are \ voting for" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> + ~resolve:(fun _ { account; _ } -> Option.map ~f:Mina_base.State_hash.to_base58_check - account.Account.Poly.voting_for ) + account.Account.Poly.voting_for) ; field "stakingActive" ~typ:(non_null bool) ~doc: "True if you are actively staking with this account on the \ - current daemon - this may not yet have been updated if \ - the staking key was changed recently" + current daemon - this may not yet have been updated if the \ + staking key was changed recently" ~args:Arg.[] - ~resolve:(fun _ {is_actively_staking; _} -> - is_actively_staking ) + ~resolve:(fun _ { is_actively_staking; _ } -> + is_actively_staking) ; field "privateKeyPath" ~typ:(non_null string) ~doc:"Path of the private key file for this account" ~args:Arg.[] - ~resolve:(fun _ {path; _} -> path) + ~resolve:(fun _ { path; _ } -> path) ; field "locked" ~typ:bool ~doc: "True if locked, false if unlocked, null if the account \ isn't tracked by the queried daemon" ~args:Arg.[] - ~resolve:(fun _ {locked; _} -> locked) + ~resolve:(fun _ { locked; _ } -> locked) ; field "isTokenOwner" ~typ:bool ~doc:"True if this account owns its associated token" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> + ~resolve:(fun _ { account; _ } -> match%map.Option account.token_permissions with | Token_owned _ -> true | Not_owned _ -> - false ) + false) ; field "isDisabled" ~typ:bool ~doc: - "True if this account has been disabled by the owner of \ - the associated token" + "True if this account has been disabled by the owner of the \ + associated token" ~args:Arg.[] - ~resolve:(fun _ {account; _} -> + ~resolve:(fun _ { account; _ } -> match%map.Option account.token_permissions with | Token_owned _ -> false - | Not_owned {account_disabled} -> - account_disabled ) + | Not_owned { account_disabled } -> + account_disabled) ; field "index" ~typ:int ~doc: "The index of this account in the ledger, or null if this \ - account does not yet have a known position in the best \ - tip ledger" + account does not yet have a known position in the best tip \ + ledger" ~args:Arg.[] - ~resolve:(fun _ {index; _} -> index) ] )) + ~resolve:(fun _ { index; _ } -> index) + ])) let account = Lazy.force account end @@ -1155,8 +1178,7 @@ module Types = struct | `Mint_tokens ] option ) typ = - scalar "UserCommandKind" ~doc:"The kind of user command" - ~coerce:(function + scalar "UserCommandKind" ~doc:"The kind of user command" ~coerce:(function | `Payment -> `String "PAYMENT" | `Stake_delegation -> @@ -1166,7 +1188,7 @@ module Types = struct | `Create_token_account -> `String "CREATE_TOKEN_ACCOUNT" | `Mint_tokens -> - `String "MINT_TOKENS" ) + `String "MINT_TOKENS") let to_kind (t : Signed_command.t) = match Signed_command.payload t |> Signed_command_payload.body with @@ -1239,7 +1261,8 @@ module Types = struct ~args:[] ~doc:"Account of the receiver" ~deprecated:(Deprecated (Some "use receiver field instead")) ; abstract_field "failureReason" ~typ:string ~args:[] - ~doc:"null is no failure, reason for failure otherwise." ] ) + ~doc:"null is no failure, reason for failure otherwise." + ]) module Status = struct type t = @@ -1249,14 +1272,14 @@ module Types = struct end module With_status = struct - type 'a t = {data: 'a; status: Status.t} + type 'a t = { data : 'a; status : Status.t } - let map t ~f = {t with data= f t.data} + let map t ~f = { t with data = f t.data } end let field_no_status ?doc ?deprecated lab ~typ ~args ~resolve = field ?doc ?deprecated lab ~typ ~args ~resolve:(fun c uc -> - resolve c uc.With_status.data ) + resolve c uc.With_status.data) let user_command_shared_fields : ( Mina_lib.t @@ -1265,10 +1288,10 @@ module Types = struct list = [ field_no_status "id" ~typ:(non_null guid) ~args:[] ~resolve:(fun _ user_command -> - Signed_command.to_base58_check user_command.With_hash.data ) + Signed_command.to_base58_check user_command.With_hash.data) ; field_no_status "hash" ~typ:(non_null string) ~args:[] ~resolve:(fun _ user_command -> - Transaction_hash.to_base58_check user_command.With_hash.hash ) + Transaction_hash.to_base58_check user_command.With_hash.hash) ; field_no_status "kind" ~typ:(non_null kind) ~args:[] ~doc:"String describing the kind of user command" ~resolve:(fun _ cmd -> to_kind cmd.With_hash.data) @@ -1277,27 +1300,27 @@ module Types = struct ~resolve:(fun _ payment -> Signed_command_payload.nonce @@ Signed_command.payload payment.With_hash.data - |> Account.Nonce.to_int ) + |> Account.Nonce.to_int) ; field_no_status "source" ~typ:(non_null AccountObj.account) ~args:[] ~doc:"Account that the command is sent from" - ~resolve:(fun {ctx= coda; _} cmd -> + ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda (Signed_command.source ~next_available_token:Token_id.invalid - cmd.With_hash.data) ) + cmd.With_hash.data)) ; field_no_status "receiver" ~typ:(non_null AccountObj.account) ~args:[] ~doc:"Account that the command applies to" - ~resolve:(fun {ctx= coda; _} cmd -> + ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda (Signed_command.receiver ~next_available_token:Token_id.invalid - cmd.With_hash.data) ) + cmd.With_hash.data)) ; field_no_status "feePayer" ~typ:(non_null AccountObj.account) ~args:[] ~doc:"Account that pays the fees for the command" - ~resolve:(fun {ctx= coda; _} cmd -> + ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda - (Signed_command.fee_payer cmd.With_hash.data) ) + (Signed_command.fee_payer cmd.With_hash.data)) ; field_no_status "token" ~typ:(non_null token_id) ~args:[] ~doc:"Token used for the transaction" ~resolve:(fun _ cmd -> - Signed_command.token cmd.With_hash.data ) + Signed_command.token cmd.With_hash.data) ; field_no_status "amount" ~typ:(non_null uint64) ~args:[] ~doc: "Amount that the source is sending to receiver; 0 for commands \ @@ -1306,15 +1329,15 @@ module Types = struct | Some amount -> Currency.Amount.to_uint64 amount | None -> - Unsigned.UInt64.zero ) + Unsigned.UInt64.zero) ; field_no_status "feeToken" ~typ:(non_null token_id) ~args:[] ~doc:"Token used to pay the fee" ~resolve:(fun _ cmd -> - Signed_command.fee_token cmd.With_hash.data ) + Signed_command.fee_token cmd.With_hash.data) ; field_no_status "fee" ~typ:(non_null uint64) ~args:[] ~doc: "Fee that the fee-payer is willing to pay for making the \ transaction" ~resolve:(fun _ cmd -> - Signed_command.fee cmd.With_hash.data |> Currency.Fee.to_uint64 ) + Signed_command.fee cmd.With_hash.data |> Currency.Fee.to_uint64) ; field_no_status "memo" ~typ:(non_null string) ~args:[] ~doc: (sprintf @@ -1325,7 +1348,7 @@ module Types = struct ~resolve:(fun _ payment -> Signed_command_payload.memo @@ Signed_command.payload payment.With_hash.data - |> Signed_command_memo.to_string ) + |> Signed_command_memo.to_string) ; field_no_status "isDelegation" ~typ:(non_null bool) ~args:[] ~doc:"If true, this command represents a delegation of stake" ~deprecated:(Deprecated (Some "use kind field instead")) @@ -1337,7 +1360,7 @@ module Types = struct | Stake_delegation _ -> true | _ -> - false ) + false) ; field_no_status "from" ~typ:(non_null public_key) ~args:[] ~doc:"Public key of the sender" ~deprecated:(Deprecated (Some "use feePayer field instead")) @@ -1345,9 +1368,9 @@ module Types = struct ; field_no_status "fromAccount" ~typ:(non_null AccountObj.account) ~args:[] ~doc:"Account of the sender" ~deprecated:(Deprecated (Some "use feePayer field instead")) - ~resolve:(fun {ctx= coda; _} payment -> + ~resolve:(fun { ctx = coda; _ } payment -> AccountObj.get_best_ledger_account coda - @@ Signed_command.fee_payer payment.With_hash.data ) + @@ Signed_command.fee_payer payment.With_hash.data) ; field_no_status "to" ~typ:(non_null public_key) ~args:[] ~doc:"Public key of the receiver" ~deprecated:(Deprecated (Some "use receiver field instead")) @@ -1357,10 +1380,10 @@ module Types = struct ~doc:"Account of the receiver" ~deprecated:(Deprecated (Some "use receiver field instead")) ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} cmd -> + ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda @@ Signed_command.receiver ~next_available_token:Token_id.invalid - cmd.With_hash.data ) + cmd.With_hash.data) ; field "failureReason" ~typ:string ~args:[] ~doc: "null is no failure or status unknown, reason for failure \ @@ -1369,7 +1392,8 @@ module Types = struct | Applied | Unknown -> None | Included_but_failed failure -> - Some (Transaction_status.Failure.to_string failure) ) ] + Some (Transaction_status.Failure.to_string failure)) + ] let payment = obj "UserCommandPayment" ~fields:(fun _ -> user_command_shared_fields) @@ -1379,17 +1403,16 @@ module Types = struct let stake_delegation = obj "UserCommandDelegation" ~fields:(fun _ -> field_no_status "delegator" ~typ:(non_null AccountObj.account) - ~args:[] ~resolve:(fun {ctx= coda; _} cmd -> + ~args:[] ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda (Signed_command.source ~next_available_token:Token_id.invalid - cmd.With_hash.data) ) + cmd.With_hash.data)) :: field_no_status "delegatee" ~typ:(non_null AccountObj.account) - ~args:[] ~resolve:(fun {ctx= coda; _} cmd -> + ~args:[] ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda (Signed_command.receiver - ~next_available_token:Token_id.invalid cmd.With_hash.data) - ) - :: user_command_shared_fields ) + ~next_available_token:Token_id.invalid cmd.With_hash.data)) + :: user_command_shared_fields) let mk_stake_delegation = add_type user_command_interface stake_delegation @@ -1398,22 +1421,20 @@ module Types = struct field_no_status "tokenOwner" ~typ:(non_null public_key) ~args:[] ~doc:"Public key to set as the owner of the new token" ~resolve:(fun _ cmd -> Signed_command.source_pk cmd.With_hash.data) - :: field_no_status "newAccountsDisabled" ~typ:(non_null bool) - ~args:[] + :: field_no_status "newAccountsDisabled" ~typ:(non_null bool) ~args:[] ~doc:"Whether new accounts created in this token are disabled" ~resolve:(fun _ cmd -> match Signed_command_payload.body @@ Signed_command.payload cmd.With_hash.data with - | Create_new_token {disable_new_accounts; _} -> + | Create_new_token { disable_new_accounts; _ } -> disable_new_accounts | _ -> (* We cannot exclude this at the type level. *) failwith - "Type error: Expected a Create_new_token user command" - ) - :: user_command_shared_fields ) + "Type error: Expected a Create_new_token user command") + :: user_command_shared_fields) let mk_create_new_token = add_type user_command_interface create_new_token @@ -1421,10 +1442,10 @@ module Types = struct obj "UserCommandNewAccount" ~fields:(fun _ -> field_no_status "tokenOwner" ~typ:(non_null AccountObj.account) ~args:[] ~doc:"The account that owns the token for the new account" - ~resolve:(fun {ctx= coda; _} cmd -> + ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda (Signed_command.source ~next_available_token:Token_id.invalid - cmd.With_hash.data) ) + cmd.With_hash.data)) :: field_no_status "disabled" ~typ:(non_null bool) ~args:[] ~doc: "Whether this account should be disabled upon creation. If \ @@ -1435,14 +1456,13 @@ module Types = struct Signed_command_payload.body @@ Signed_command.payload cmd.With_hash.data with - | Create_token_account {account_disabled; _} -> + | Create_token_account { account_disabled; _ } -> account_disabled | _ -> (* We cannot exclude this at the type level. *) failwith - "Type error: Expected a Create_new_token user command" - ) - :: user_command_shared_fields ) + "Type error: Expected a Create_new_token user command") + :: user_command_shared_fields) let mk_create_token_account = add_type user_command_interface create_token_account @@ -1451,17 +1471,17 @@ module Types = struct obj "UserCommandMintTokens" ~fields:(fun _ -> field_no_status "tokenOwner" ~typ:(non_null AccountObj.account) ~args:[] ~doc:"The account that owns the token to mint" - ~resolve:(fun {ctx= coda; _} cmd -> + ~resolve:(fun { ctx = coda; _ } cmd -> AccountObj.get_best_ledger_account coda (Signed_command.source ~next_available_token:Token_id.invalid - cmd.With_hash.data) ) - :: user_command_shared_fields ) + cmd.With_hash.data)) + :: user_command_shared_fields) let mk_mint_tokens = add_type user_command_interface mint_tokens let mk_user_command - (cmd : - (Signed_command.t, Transaction_hash.t) With_hash.t With_status.t) = + (cmd : (Signed_command.t, Transaction_hash.t) With_hash.t With_status.t) + = match Signed_command_payload.body @@ Signed_command.payload cmd.data.data with @@ -1485,11 +1505,11 @@ module Types = struct ~fields:(fun _ -> [ field "userCommands" ~doc: - "List of user commands (payments and stake delegations) \ - included in this block" + "List of user commands (payments and stake delegations) included \ + in this block" ~typ:(non_null @@ list @@ non_null user_command) ~args:Arg.[] - ~resolve:(fun _ {commands; _} -> + ~resolve:(fun _ { commands; _ } -> List.filter_map commands ~f:(fun t -> match t.data.data with | Signed_command c -> @@ -1502,26 +1522,28 @@ module Types = struct in Some (UserCommand.mk_user_command - {status; data= {t.data with data= c}}) + { status; data = { t.data with data = c } }) | Snapp_command _ -> (* TODO: This should be supported in some graph QL query *) - None ) ) + None)) ; field "feeTransfer" ~doc:"List of fee transfers included in this block" ~typ:(non_null @@ list @@ non_null fee_transfer) ~args:Arg.[] - ~resolve:(fun _ {fee_transfers; _} -> fee_transfers) + ~resolve:(fun _ { fee_transfers; _ } -> fee_transfers) ; field "coinbase" ~typ:(non_null uint64) ~doc:"Amount of mina granted to the producer of this block" ~args:Arg.[] - ~resolve:(fun _ {coinbase; _} -> Currency.Amount.to_uint64 coinbase) + ~resolve:(fun _ { coinbase; _ } -> + Currency.Amount.to_uint64 coinbase) ; field "coinbaseReceiverAccount" ~typ:AccountObj.account ~doc:"Account to which the coinbase for this block was granted" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {coinbase_receiver; _} -> + ~resolve:(fun { ctx = coda; _ } { coinbase_receiver; _ } -> Option.map ~f:(AccountObj.get_best_ledger_account_pk coda) - coinbase_receiver ) ] ) + coinbase_receiver) + ]) let protocol_state_proof : (Mina_lib.t, Proof.t option) typ = obj "protocolStateProof" ~fields:(fun _ -> @@ -1531,7 +1553,8 @@ module Types = struct (* Use the precomputed block proof encoding, for consistency. *) Some (Mina_transition.External_transition.Precomputed_block.Proof - .to_bin_string proof) ) ] ) + .to_bin_string proof)) + ]) let block : ( Mina_lib.t @@ -1543,46 +1566,47 @@ module Types = struct ~doc:"Public key of account that produced this block" ~deprecated:(Deprecated (Some "use creatorAccount field instead")) ~args:Arg.[] - ~resolve:(fun _ {With_hash.data; _} -> data.creator) + ~resolve:(fun _ { With_hash.data; _ } -> data.creator) ; field "creatorAccount" ~typ:(non_null AccountObj.account) ~doc:"Account that produced this block" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {With_hash.data; _} -> - AccountObj.get_best_ledger_account_pk coda data.creator ) + ~resolve:(fun { ctx = coda; _ } { With_hash.data; _ } -> + AccountObj.get_best_ledger_account_pk coda data.creator) ; field "winnerAccount" ~typ:(non_null AccountObj.account) ~doc:"Account that won the slot (Delegator/Staker)" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} {With_hash.data; _} -> - AccountObj.get_best_ledger_account_pk coda data.winner ) + ~resolve:(fun { ctx = coda; _ } { With_hash.data; _ } -> + AccountObj.get_best_ledger_account_pk coda data.winner) ; field "stateHash" ~typ:(non_null string) ~doc:"Base58Check-encoded hash of the state after this block" ~args:Arg.[] - ~resolve:(fun _ {With_hash.hash; _} -> - State_hash.to_base58_check hash ) + ~resolve:(fun _ { With_hash.hash; _ } -> + State_hash.to_base58_check hash) ; field "stateHashField" ~typ:(non_null string) ~doc: "Experimental: Bigint field-element representation of stateHash" ~args:Arg.[] - ~resolve:(fun _ {With_hash.hash; _} -> - State_hash.to_decimal_string hash ) + ~resolve:(fun _ { With_hash.hash; _ } -> + State_hash.to_decimal_string hash) ; field "protocolState" ~typ:(non_null protocol_state) ~args:Arg.[] - ~resolve:(fun _ {With_hash.data; With_hash.hash; _} -> - (data.protocol_state, hash) ) + ~resolve:(fun _ { With_hash.data; With_hash.hash; _ } -> + (data.protocol_state, hash)) ; field "protocolStateProof" ~typ:(non_null protocol_state_proof) ~doc:"Snark proof of blockchain state" ~args:Arg.[] - ~resolve:(fun _ {With_hash.data; _} -> data.proof) + ~resolve:(fun _ { With_hash.data; _ } -> data.proof) ; field "transactions" ~typ:(non_null transactions) ~args:Arg.[] - ~resolve:(fun _ {With_hash.data; _} -> data.transactions) + ~resolve:(fun _ { With_hash.data; _ } -> data.transactions) ; field "snarkJobs" ~typ:(non_null @@ list @@ non_null completed_work) ~args:Arg.[] - ~resolve:(fun _ {With_hash.data; _} -> data.snark_jobs) ] ) + ~resolve:(fun _ { With_hash.data; _ } -> data.snark_jobs) + ]) let snark_worker = obj "SnarkWorker" ~fields:(fun _ -> @@ -1595,13 +1619,14 @@ module Types = struct ~typ:(non_null AccountObj.account) ~doc:"Account of the current snark worker" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} (key, _) -> - AccountObj.get_best_ledger_account_pk coda key ) + ~resolve:(fun { ctx = coda; _ } (key, _) -> + AccountObj.get_best_ledger_account_pk coda key) ; field "fee" ~typ:(non_null uint64) ~doc:"Fee that snark worker is charging to generate a snark proof" ~args:Arg.[] ~resolve:(fun (_ : Mina_lib.t resolve_info) (_, fee) -> - Currency.Fee.to_uint64 fee ) ] ) + Currency.Fee.to_uint64 fee) + ]) module Payload = struct let peer : ('context, Network_peer.Peer.t option) typ = @@ -1613,10 +1638,11 @@ module Types = struct ~typ:(non_null string) ~args:Arg.[] ~resolve:(fun _ peer -> - Unix.Inet_addr.to_string peer.Network_peer.Peer.host ) + Unix.Inet_addr.to_string peer.Network_peer.Peer.host) ; field "libp2p_port" ~typ:(non_null int) ~args:Arg.[] - ~resolve:(fun _ peer -> peer.Network_peer.Peer.libp2p_port) ] ) + ~resolve:(fun _ peer -> peer.Network_peer.Peer.libp2p_port) + ]) let create_account : (Mina_lib.t, Account.key option) typ = obj "AddAccountPayload" ~fields:(fun _ -> @@ -1629,8 +1655,9 @@ module Types = struct ~typ:(non_null AccountObj.account) ~doc:"Details of created account" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} key -> - AccountObj.get_best_ledger_account_pk coda key ) ] ) + ~resolve:(fun { ctx = coda; _ } key -> + AccountObj.get_best_ledger_account_pk coda key) + ]) let unlock_account : (Mina_lib.t, Account.key option) typ = obj "UnlockPayload" ~fields:(fun _ -> @@ -1643,8 +1670,9 @@ module Types = struct ~typ:(non_null AccountObj.account) ~doc:"Details of unlocked account" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} key -> - AccountObj.get_best_ledger_account_pk coda key ) ] ) + ~resolve:(fun { ctx = coda; _ } key -> + AccountObj.get_best_ledger_account_pk coda key) + ]) let lock_account : (Mina_lib.t, Account.key option) typ = obj "LockPayload" ~fields:(fun _ -> @@ -1656,22 +1684,25 @@ module Types = struct ~typ:(non_null AccountObj.account) ~doc:"Details of locked account" ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} key -> - AccountObj.get_best_ledger_account_pk coda key ) ] ) + ~resolve:(fun { ctx = coda; _ } key -> + AccountObj.get_best_ledger_account_pk coda key) + ]) let delete_account = obj "DeleteAccountPayload" ~fields:(fun _ -> [ field "publicKey" ~typ:(non_null public_key) ~doc:"Public key of the deleted account" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let reload_accounts = obj "ReloadAccountsPayload" ~fields:(fun _ -> [ field "success" ~typ:(non_null bool) ~doc:"True when the reload was successful" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let import_account = obj "ImportAccountPayload" ~fields:(fun _ -> @@ -1686,7 +1717,8 @@ module Types = struct ~resolve:(fun _ -> snd) ; field "success" ~typ:(non_null bool) ~args:Arg.[] - ~resolve:(fun _ _ -> true) ] ) + ~resolve:(fun _ _ -> true) + ]) let string_of_banned_status = function | Trust_system.Banned_status.Unbanned -> @@ -1700,31 +1732,34 @@ module Types = struct [ field "ip_addr" ~typ:(non_null string) ~doc:"IP address" ~args:Arg.[] ~resolve:(fun _ (peer, _) -> - Unix.Inet_addr.to_string peer.Network_peer.Peer.host ) + Unix.Inet_addr.to_string peer.Network_peer.Peer.host) ; field "peer_id" ~typ:(non_null string) ~doc:"libp2p Peer ID" ~args:Arg.[] ~resolve:(fun _ (peer, __) -> peer.Network_peer.Peer.peer_id) ; field "trust" ~typ:(non_null float) ~doc:"Trust score" ~args:Arg.[] - ~resolve:(fun _ (_, {trust; _}) -> trust) + ~resolve:(fun _ (_, { trust; _ }) -> trust) ; field "banned_status" ~typ:string ~doc:"Banned status" ~args:Arg.[] - ~resolve:(fun _ (_, {banned; _}) -> - string_of_banned_status banned ) ] ) + ~resolve:(fun _ (_, { banned; _ }) -> + string_of_banned_status banned) + ]) let send_payment = obj "SendPaymentPayload" ~fields:(fun _ -> [ field "payment" ~typ:(non_null user_command) ~doc:"Payment that was sent" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let send_delegation = obj "SendDelegationPayload" ~fields:(fun _ -> [ field "delegation" ~typ:(non_null user_command) ~doc:"Delegation change that was sent" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let create_token = obj "SendCreateTokenPayload" ~fields:(fun _ -> @@ -1732,7 +1767,8 @@ module Types = struct ~typ:(non_null UserCommand.create_new_token) ~doc:"Token creation command that was sent" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let create_token_account = obj "SendCreateTokenAccountPayload" ~fields:(fun _ -> @@ -1740,7 +1776,8 @@ module Types = struct ~typ:(non_null UserCommand.create_token_account) ~doc:"Token account creation command that was sent" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let mint_tokens = obj "SendMintTokensPayload" ~fields:(fun _ -> @@ -1748,7 +1785,8 @@ module Types = struct ~typ:(non_null UserCommand.mint_tokens) ~doc:"Token minting command that was sent" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let send_rosetta_transaction = obj "SendRosettaTransactionPayload" ~fields:(fun _ -> @@ -1756,7 +1794,8 @@ module Types = struct ~typ:(non_null UserCommand.user_command_interface) ~doc:"Command that was sent" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let export_logs = obj "ExportLogsPayload" ~fields:(fun _ -> @@ -1765,16 +1804,19 @@ module Types = struct (non_null (obj "TarFile" ~fields:(fun _ -> [ field "tarfile" ~typ:(non_null string) ~args:[] - ~resolve:(fun _ basename -> basename) ] ))) + ~resolve:(fun _ basename -> basename) + ]))) ~doc:"Tar archive containing logs" ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let add_payment_receipt = obj "AddPaymentReceiptPayload" ~fields:(fun _ -> [ field "payment" ~typ:(non_null user_command) ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let set_staking = obj "SetStakingPayload" ~fields:(fun _ -> @@ -1794,7 +1836,8 @@ module Types = struct ~doc:"Returns the public keys that are now staking their funds" ~typ:(non_null (list (non_null public_key))) ~args:Arg.[] - ~resolve:(fun _ (_, _, currentStaking) -> currentStaking) ] ) + ~resolve:(fun _ (_, _, currentStaking) -> currentStaking) + ]) let set_coinbase_receiver = obj "SetCoinbaseReceiverPayload" ~fields:(fun _ -> @@ -1807,18 +1850,20 @@ module Types = struct ~resolve:(fun _ (last_receiver, _) -> last_receiver) ; field "currentCoinbaseReceiver" ~doc: - "Returns the public key that will receive coinbase, or none \ - if it will be the block producer" + "Returns the public key that will receive coinbase, or none if \ + it will be the block producer" ~typ:public_key ~args:Arg.[] - ~resolve:(fun _ (_, current_receiver) -> current_receiver) ] ) + ~resolve:(fun _ (_, current_receiver) -> current_receiver) + ]) let set_snark_work_fee = obj "SetSnarkWorkFeePayload" ~fields:(fun _ -> [ field "lastFee" ~doc:"Returns the last fee set to do snark work" ~typ:(non_null uint64) ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let set_snark_worker = obj "SetSnarkWorkerPayload" ~fields:(fun _ -> @@ -1827,7 +1872,8 @@ module Types = struct "Returns the last public key that was designated for snark work" ~typ:public_key ~args:Arg.[] - ~resolve:(fun _ -> Fn.id) ] ) + ~resolve:(fun _ -> Fn.id) + ]) let set_connection_gating_config = obj "SetConnectionGatingConfigPayload" ~fields:(fun _ -> @@ -1848,7 +1894,8 @@ module Types = struct "If true, no connections will be allowed unless they are from \ a trusted peer" ~args:Arg.[] - ~resolve:(fun _ config -> config.Mina_net2.isolate) ] ) + ~resolve:(fun _ config -> config.Mina_net2.isolate) + ]) end module Arguments = struct @@ -1867,13 +1914,14 @@ module Types = struct try Ok Network_peer.Peer. - {peer_id; host= Unix.Inet_addr.of_string host; libp2p_port} - with _ -> Error "Invalid format for NetworkPeer.host" ) + { peer_id; host = Unix.Inet_addr.of_string host; libp2p_port } + with _ -> Error "Invalid format for NetworkPeer.host") ~fields: [ arg "peer_id" ~doc:"base58-encoded peer ID" ~typ:(non_null string) ; arg "host" ~doc:"IP address of the remote host" ~typ:(non_null string) - ; arg "libp2p_port" ~typ:(non_null int) ] + ; arg "libp2p_port" ~typ:(non_null int) + ] let public_key_arg = scalar "PublicKey" ~doc:"Base58Check-encoded public key string" @@ -1881,10 +1929,10 @@ module Types = struct match key with | `String s -> Result.try_with (fun () -> - Public_key.of_base58_check_decompress_exn s ) + Public_key.of_base58_check_decompress_exn s) |> Result.map_error ~f:(fun e -> Exn.to_string e) | _ -> - Error "Invalid format for public key." ) + Error "Invalid format for public key.") let token_id_arg = scalar "TokenId" @@ -1896,20 +1944,19 @@ module Types = struct Ok (Token_id.of_string token) | _ -> Error "Invalid format for token." - with _ -> Error "Invalid format for token." ) + with _ -> Error "Invalid format for token.") let precomputed_block = - scalar "PrecomputedBlock" - ~doc:"Block encoded in precomputed block format" ~coerce:(fun json -> + scalar "PrecomputedBlock" ~doc:"Block encoded in precomputed block format" + ~coerce:(fun json -> let json = to_yojson json in - Mina_transition.External_transition.Precomputed_block.of_yojson json - ) + Mina_transition.External_transition.Precomputed_block.of_yojson json) let extensional_block = - scalar "ExtensionalBlock" - ~doc:"Block encoded in extensional block format" ~coerce:(fun json -> + scalar "ExtensionalBlock" ~doc:"Block encoded in extensional block format" + ~coerce:(fun json -> let json = to_yojson json in - Archive_lib.Extensional.Block.of_yojson json ) + Archive_lib.Extensional.Block.of_yojson json) module type Numeric_type = sig type t @@ -1928,42 +1975,42 @@ module Types = struct scalar name ~doc: (sprintf - "String or Integer representation of a %s number. If the input \ - is a string, it must represent the number in base 10" + "String or Integer representation of a %s number. If the input is \ + a string, it must represent the number in base 10" lower_name) ~coerce:(fun key -> match key with | `String s -> ( - try - let n = Numeric.of_string s in - let s' = Numeric.to_string n in - (* Here, we check that the string that was passed converts to - the numeric type, and that it is in range, by converting - back to a string and checking that it is equal to the one - passed. This prevents the following weirdnesses in the - [Unsigned.UInt*] parsers: - * if the absolute value is greater than [max_int], the value - returned is [max_int] - - ["99999999999999999999999999999999999"] is [max_int] - - ["-99999999999999999999999999999999999"] is [max_int] - * if otherwise the value is negative, the value returned is - [max_int - (x - 1)] - - ["-1"] is [max_int] - * if there is a non-numeric character part-way through the - string, the numeric prefix is treated as a number - - ["1_000_000"] is [1] - - ["-1_000_000"] is [max_int] - - ["1.1"] is [1] - - ["0x15"] is [0] - * leading spaces are ignored - - [" 1"] is [1] - This is annoying to document, none of these behaviors are - useful to users, and unexpectedly triggering one of them - could have nasty consequences. Thus, we raise an error - rather than silently misinterpreting their input. + try + let n = Numeric.of_string s in + let s' = Numeric.to_string n in + (* Here, we check that the string that was passed converts to + the numeric type, and that it is in range, by converting + back to a string and checking that it is equal to the one + passed. This prevents the following weirdnesses in the + [Unsigned.UInt*] parsers: + * if the absolute value is greater than [max_int], the value + returned is [max_int] + - ["99999999999999999999999999999999999"] is [max_int] + - ["-99999999999999999999999999999999999"] is [max_int] + * if otherwise the value is negative, the value returned is + [max_int - (x - 1)] + - ["-1"] is [max_int] + * if there is a non-numeric character part-way through the + string, the numeric prefix is treated as a number + - ["1_000_000"] is [1] + - ["-1_000_000"] is [max_int] + - ["1.1"] is [1] + - ["0x15"] is [0] + * leading spaces are ignored + - [" 1"] is [1] + This is annoying to document, none of these behaviors are + useful to users, and unexpectedly triggering one of them + could have nasty consequences. Thus, we raise an error + rather than silently misinterpreting their input. *) - assert (String.equal s s') ; - Ok n - with _ -> Error (sprintf "Could not decode %s." lower_name) ) + assert (String.equal s s') ; + Ok n + with _ -> Error (sprintf "Could not decode %s." lower_name) ) | `Int n -> if n < 0 then Error @@ -1971,7 +2018,7 @@ module Types = struct lower_name) else Ok (Numeric.of_int n) | _ -> - Error (sprintf "Invalid format for %s type." lower_name) ) + Error (sprintf "Invalid format for %s type." lower_name)) let uint64_arg = make_numeric_arg ~name:"UInt64" (module Unsigned.UInt64) @@ -1987,33 +2034,35 @@ module Types = struct (Signature.Raw.decode signature) ~error:"rawSignature decoding error" | None -> ( - match (field, scalar) with - | Some field, Some scalar -> - Ok (Field.of_string field, Inner_curve.Scalar.of_string scalar) - | _ -> - Error "Either field+scalar or rawSignature must by non-null" ) - ) + match (field, scalar) with + | Some field, Some scalar -> + Ok (Field.of_string field, Inner_curve.Scalar.of_string scalar) + | _ -> + Error "Either field+scalar or rawSignature must by non-null" )) ~doc: "A cryptographic signature -- you must provide either field+scalar \ or rawSignature" ~fields: [ arg "field" ~typ:string ~doc:"Field component of signature" ; arg "scalar" ~typ:string ~doc:"Scalar component of signature" - ; arg "rawSignature" ~typ:string ~doc:"Raw encoded signature" ] + ; arg "rawSignature" ~typ:string ~doc:"Raw encoded signature" + ] let vrf_message = obj "VrfMessageInput" ~doc:"The inputs to a vrf evaluation" ~coerce:(fun global_slot epoch_seed delegator_index -> { Consensus_vrf.Layout.Message.global_slot - ; epoch_seed= Mina_base.Epoch_seed.of_base58_check_exn epoch_seed - ; delegator_index } ) + ; epoch_seed = Mina_base.Epoch_seed.of_base58_check_exn epoch_seed + ; delegator_index + }) ~fields: [ arg "globalSlot" ~typ:(non_null uint32_arg) ; arg "epochSeed" ~doc:"Formatted with base58check" ~typ:(non_null string) ; arg "delegatorIndex" ~doc:"Position in the ledger of the delegator's account" - ~typ:(non_null int) ] + ~typ:(non_null int) + ] let vrf_threshold = obj "VrfThresholdInput" @@ -2021,9 +2070,10 @@ module Types = struct "The amount of stake delegated, used to determine the threshold for \ a vrf evaluation producing a block" ~coerce:(fun delegated_stake total_stake -> - { Consensus_vrf.Layout.Threshold.delegated_stake= + { Consensus_vrf.Layout.Threshold.delegated_stake = Currency.Balance.of_uint64 delegated_stake - ; total_stake= Currency.Amount.of_uint64 total_stake } ) + ; total_stake = Currency.Amount.of_uint64 total_stake + }) ~fields: [ arg "delegatedStake" ~doc: @@ -2036,29 +2086,31 @@ module Types = struct ~doc: "The total amount of stake across all accounts in the epoch's \ staking ledger." - ~typ:(non_null uint64_arg) ] + ~typ:(non_null uint64_arg) + ] let vrf_evaluation = obj "VrfEvaluationInput" ~doc:"The witness to a vrf evaluation" - ~coerce: - (fun message public_key c s scaled_message_hash vrf_threshold -> + ~coerce:(fun message public_key c s scaled_message_hash vrf_threshold -> { Consensus_vrf.Layout.Evaluation.message - ; public_key= Public_key.decompress_exn public_key - ; c= Snark_params.Tick.Inner_curve.Scalar.of_string c - ; s= Snark_params.Tick.Inner_curve.Scalar.of_string s - ; scaled_message_hash= + ; public_key = Public_key.decompress_exn public_key + ; c = Snark_params.Tick.Inner_curve.Scalar.of_string c + ; s = Snark_params.Tick.Inner_curve.Scalar.of_string s + ; scaled_message_hash = Consensus_vrf.Group.of_string_list_exn scaled_message_hash ; vrf_threshold - ; vrf_output= None - ; vrf_output_fractional= None - ; threshold_met= None } ) + ; vrf_output = None + ; vrf_output_fractional = None + ; threshold_met = None + }) ~fields: [ arg "message" ~typ:(non_null vrf_message) ; arg "publicKey" ~typ:(non_null public_key_arg) ; arg "c" ~typ:(non_null string) ; arg "s" ~typ:(non_null string) ; arg "scaledMessageHash" ~typ:(non_null (list (non_null string))) - ; arg "vrfThreshold" ~typ:vrf_threshold ] + ; arg "vrfThreshold" ~typ:vrf_threshold + ] module Fields = struct let from ~doc = arg "from" ~typ:(non_null public_key_arg) ~doc @@ -2099,16 +2151,16 @@ module Types = struct let signature = arg "signature" ~typ:signature_arg ~doc: - "If a signature is provided, this transaction is considered \ - signed and will be broadcasted to the network without requiring \ - a private key" + "If a signature is provided, this transaction is considered signed \ + and will be broadcasted to the network without requiring a \ + private key" end let send_payment = let open Fields in obj "SendPaymentInput" ~coerce:(fun from to_ token amount fee valid_until memo nonce -> - (from, to_, token, amount, fee, valid_until, memo, nonce) ) + (from, to_, token, amount, fee, valid_until, memo, nonce)) ~fields: [ from ~doc:"Public key of sender of payment" ; to_ ~doc:"Public key of recipient of payment" @@ -2118,35 +2170,37 @@ module Types = struct ; fee ~doc:"Fee amount in order to send payment" ; valid_until ; memo - ; nonce ] + ; nonce + ] let send_delegation = let open Fields in obj "SendDelegationInput" ~coerce:(fun from to_ fee valid_until memo nonce -> - (from, to_, fee, valid_until, memo, nonce) ) + (from, to_, fee, valid_until, memo, nonce)) ~fields: [ from ~doc:"Public key of sender of a stake delegation" ; to_ ~doc:"Public key of the account being delegated to" ; fee ~doc:"Fee amount in order to send a stake delegation" ; valid_until ; memo - ; nonce ] + ; nonce + ] let create_token = let open Fields in obj "SendCreateTokenInput" ~coerce:(fun fee_payer token_owner fee valid_until memo nonce -> - (fee_payer, token_owner, fee, valid_until, memo, nonce) ) + (fee_payer, token_owner, fee, valid_until, memo, nonce)) ~fields: [ fee_payer_opt - ~doc: - "Public key to pay the fee from (defaults to the tokenOwner)" + ~doc:"Public key to pay the fee from (defaults to the tokenOwner)" ; token_owner ~doc:"Public key to create the token for" ; fee ~doc:"Fee amount in order to create a token" ; valid_until ; memo - ; nonce ] + ; nonce + ] let create_token_account = let open Fields in @@ -2160,7 +2214,7 @@ module Types = struct , fee_payer , valid_until , memo - , nonce ) ) + , nonce )) ~fields: [ token_owner ~doc:"Public key of the token's owner" ; token ~doc:"Token to create an account for" @@ -2168,19 +2222,19 @@ module Types = struct ; fee ~doc:"Fee amount in order to create a token account" ; fee_payer_opt ~doc: - "Public key to pay the fees from and sign the transaction \ - with (defaults to the receiver)" + "Public key to pay the fees from and sign the transaction with \ + (defaults to the receiver)" ; valid_until ; memo - ; nonce ] + ; nonce + ] let mint_tokens = let open Fields in obj "SendMintTokensInput" ~coerce: (fun token_owner token receiver amount fee valid_until memo nonce -> - (token_owner, token, receiver, amount, fee, valid_until, memo, nonce) - ) + (token_owner, token, receiver, amount, fee, valid_until, memo, nonce)) ~fields: [ token_owner ~doc:"Public key of the token's owner" ; token ~doc:"Token to mint more of" @@ -2194,20 +2248,22 @@ module Types = struct ; fee ~doc:"Fee amount in order to mint tokens" ; valid_until ; memo - ; nonce ] + ; nonce + ] let rosetta_transaction = Schema.Arg.scalar "RosettaTransaction" ~doc:"A transaction encoded in the rosetta format" ~coerce:(fun graphql_json -> Rosetta_lib.Transaction.to_mina_signed (to_yojson graphql_json) - |> Result.map_error ~f:Error.to_string_hum ) + |> Result.map_error ~f:Error.to_string_hum) let create_account = obj "AddAccountInput" ~coerce:Fn.id ~fields: [ arg "password" ~doc:"Password used to encrypt the new account" - ~typ:(non_null string) ] + ~typ:(non_null string) + ] let unlock_account = obj "UnlockInput" @@ -2215,31 +2271,34 @@ module Types = struct ~fields: [ arg "password" ~doc:"Password for the account to be unlocked" ~typ:(non_null string) - ; arg "publicKey" - ~doc:"Public key specifying which account to unlock" - ~typ:(non_null public_key_arg) ] + ; arg "publicKey" ~doc:"Public key specifying which account to unlock" + ~typ:(non_null public_key_arg) + ] let create_hd_account = obj "CreateHDAccountInput" ~coerce:Fn.id ~fields: [ arg "index" ~doc:"Index of the account in hardware wallet" - ~typ:(non_null uint32_arg) ] + ~typ:(non_null uint32_arg) + ] let lock_account = obj "LockInput" ~coerce:Fn.id ~fields: [ arg "publicKey" ~doc:"Public key specifying which account to lock" - ~typ:(non_null public_key_arg) ] + ~typ:(non_null public_key_arg) + ] let delete_account = obj "DeleteAccountInput" ~coerce:Fn.id ~fields: [ arg "publicKey" ~doc:"Public key of account to be deleted" - ~typ:(non_null public_key_arg) ] + ~typ:(non_null public_key_arg) + ] let reset_trust_status = obj "ResetTrustStatusInput" ~coerce:Fn.id - ~fields:[arg "ipAddress" ~typ:(non_null string)] + ~fields:[ arg "ipAddress" ~typ:(non_null string) ] (* TODO: Treat cases where filter_input has a null argument *) let block_filter_input = @@ -2249,7 +2308,8 @@ module Types = struct ~doc: "A public key of a user who has their\n\ \ transaction in the block, or produced the block" - ~typ:(non_null public_key_arg) ] + ~typ:(non_null public_key_arg) + ] let user_command_filter_input = obj "UserCommandFilterType" ~coerce:Fn.id @@ -2258,7 +2318,8 @@ module Types = struct ~doc: "Public key of sender or receiver of transactions you are \ looking for" - ~typ:(non_null public_key_arg) ] + ~typ:(non_null public_key_arg) + ] let set_staking = obj "SetStakingInput" ~coerce:Fn.id @@ -2267,7 +2328,8 @@ module Types = struct ~typ:(non_null (list (non_null public_key_arg))) ~doc: "Public keys of accounts you wish to stake with - these must \ - be accounts that are in trackedAccounts" ] + be accounts that are in trackedAccounts" + ] let set_coinbase_receiver = obj "SetCoinbaseReceiverInput" ~coerce:Fn.id @@ -2280,7 +2342,8 @@ module Types = struct let set_snark_work_fee = obj "SetSnarkWorkFee" - ~fields:[Fields.fee ~doc:"Fee to get rewarded for producing snark work"] + ~fields: + [ Fields.fee ~doc:"Fee to get rewarded for producing snark work" ] ~coerce:Fn.id let set_snark_worker = @@ -2289,14 +2352,15 @@ module Types = struct [ arg "publicKey" ~typ:public_key_arg ~doc: "Public key you wish to start snark-working on; null to stop \ - doing any snark work" ] + doing any snark work" + ] module AddPaymentReceipt = struct - type t = {payment: string; added_time: string} + type t = { payment : string; added_time : string } let typ = obj "AddPaymentReceiptInput" - ~coerce:(fun payment added_time -> {payment; added_time}) + ~coerce:(fun payment added_time -> { payment; added_time }) ~fields: [ arg "payment" ~doc:(Doc.bin_prot "Serialized payment") @@ -2306,7 +2370,8 @@ module Types = struct ~doc: (Doc.date "Time that a payment gets added to another clients \ - transaction database") ] + transaction database") + ] end let set_connection_gating_config = @@ -2315,7 +2380,7 @@ module Types = struct let open Result.Let_syntax in let%bind trusted_peers = Result.all trusted_peers in let%map banned_peers = Result.all banned_peers in - Mina_net2.{isolate; trusted_peers; banned_peers} ) + Mina_net2.{ isolate; trusted_peers; banned_peers }) ~fields: Arg. [ arg "trustedPeers" @@ -2324,12 +2389,13 @@ module Types = struct ; arg "bannedPeers" ~typ:(non_null (list (non_null peer))) ~doc: - "Peers we will never allow connections from (unless they \ - are also trusted!)" + "Peers we will never allow connections from (unless they are \ + also trusted!)" ; arg "isolate" ~typ:(non_null bool) ~doc: "If true, no connections will be allowed unless they are \ - from a trusted peer" ] + from a trusted peer" + ] end let vrf_message : ('context, Consensus_vrf.Layout.Message.t option) typ = @@ -2337,15 +2403,16 @@ module Types = struct obj "VrfMessage" ~doc:"The inputs to a vrf evaluation" ~fields:(fun _ -> [ field "globalSlot" ~typ:(non_null uint32) ~args:Arg.[] - ~resolve:(fun _ {global_slot; _} -> global_slot) + ~resolve:(fun _ { global_slot; _ } -> global_slot) ; field "epochSeed" ~typ:(non_null epoch_seed) ~args:Arg.[] - ~resolve:(fun _ {epoch_seed; _} -> epoch_seed) + ~resolve:(fun _ { epoch_seed; _ } -> epoch_seed) ; field "delegatorIndex" ~doc:"Position in the ledger of the delegator's account" ~typ:(non_null int) ~args:Arg.[] - ~resolve:(fun _ {delegator_index; _} -> delegator_index) ] ) + ~resolve:(fun _ { delegator_index; _ } -> delegator_index) + ]) let vrf_threshold = obj "VrfThreshold" @@ -2355,20 +2422,20 @@ module Types = struct [ field "delegatedStake" ~doc: "The amount of stake delegated to the vrf evaluator by the \ - delegating account. This should match the amount in the \ - epoch's staking ledger, which may be different to the amount \ - in the current ledger." - ~args:[] ~typ:(non_null uint64) - ~resolve:(fun _ - {Consensus_vrf.Layout.Threshold.delegated_stake; _} - -> Currency.Balance.to_uint64 delegated_stake ) + delegating account. This should match the amount in the epoch's \ + staking ledger, which may be different to the amount in the \ + current ledger." ~args:[] ~typ:(non_null uint64) + ~resolve:(fun + _ + { Consensus_vrf.Layout.Threshold.delegated_stake; _ } + -> Currency.Balance.to_uint64 delegated_stake) ; field "totalStake" ~doc: "The total amount of stake across all accounts in the epoch's \ - staking ledger." - ~args:[] ~typ:(non_null uint64) - ~resolve:(fun _ {Consensus_vrf.Layout.Threshold.total_stake; _} -> - Currency.Amount.to_uint64 total_stake ) ] ) + staking ledger." ~args:[] ~typ:(non_null uint64) + ~resolve:(fun _ { Consensus_vrf.Layout.Threshold.total_stake; _ } -> + Currency.Amount.to_uint64 total_stake) + ]) let vrf_evaluation : ('context, Consensus_vrf.Layout.Evaluation.t option) typ = @@ -2378,31 +2445,31 @@ module Types = struct ~fields:(fun _ -> [ field "message" ~typ:(non_null vrf_message) ~args:Arg.[] - ~resolve:(fun _ {message; _} -> message) + ~resolve:(fun _ { message; _ } -> message) ; field "publicKey" ~typ:(non_null public_key) ~args:Arg.[] - ~resolve:(fun _ {public_key; _} -> Public_key.compress public_key) + ~resolve:(fun _ { public_key; _ } -> Public_key.compress public_key) ; field "c" ~typ:(non_null string) ~args:Arg.[] - ~resolve:(fun _ {c; _} -> Consensus_vrf.Scalar.to_string c) + ~resolve:(fun _ { c; _ } -> Consensus_vrf.Scalar.to_string c) ; field "s" ~typ:(non_null string) ~args:Arg.[] - ~resolve:(fun _ {s; _} -> Consensus_vrf.Scalar.to_string s) + ~resolve:(fun _ { s; _ } -> Consensus_vrf.Scalar.to_string s) ; field "scaledMessageHash" ~typ:(non_null (list (non_null string))) ~doc:"A group element represented as 2 field elements" ~args:Arg.[] - ~resolve:(fun _ {scaled_message_hash; _} -> - Consensus_vrf.Group.to_string_list_exn scaled_message_hash ) + ~resolve:(fun _ { scaled_message_hash; _ } -> + Consensus_vrf.Group.to_string_list_exn scaled_message_hash) ; field "vrfThreshold" ~typ:vrf_threshold ~args:Arg.[] - ~resolve:(fun _ {vrf_threshold; _} -> vrf_threshold) + ~resolve:(fun _ { vrf_threshold; _ } -> vrf_threshold) ; field "vrfOutput" ~typ:string ~doc: "The vrf output derived from the evaluation witness. If null, \ the vrf witness was invalid." ~args:Arg.[] - ~resolve:(fun {ctx= mina; _} t -> + ~resolve:(fun { ctx = mina; _ } t -> let vrf_opt = match t.vrf_output with | Some vrf -> @@ -2416,7 +2483,7 @@ module Types = struct |> Option.map ~f:Consensus_vrf.Output.truncate in Option.map ~f:Consensus_vrf.Output.Truncated.to_base58_check - vrf_opt ) + vrf_opt) ; field "vrfOutputFractional" ~typ:float ~doc: "The vrf output derived from the evaluation witness, as a \ @@ -2424,7 +2491,7 @@ module Types = struct (1 - (1 / 4)^(delegated_balance / total_stake)). If null, the \ vrf witness was invalid." ~args:Arg.[] - ~resolve:(fun {ctx= mina; _} t -> + ~resolve:(fun { ctx = mina; _ } t -> match t.vrf_output_fractional with | Some f -> Some f @@ -2444,18 +2511,19 @@ module Types = struct Option.map ~f:(fun vrf -> Consensus_vrf.Output.Truncated.to_fraction vrf - |> Bignum.to_float ) - vrf_opt ) + |> Bignum.to_float) + vrf_opt) ; field "thresholdMet" ~typ:bool ~doc: "Whether the threshold to produce a block was met, if specified" ~args: Arg. [ arg "input" ~doc:"Override for delegation threshold" - ~typ:Input.vrf_threshold ] - ~resolve:(fun {ctx= mina; _} t input -> + ~typ:Input.vrf_threshold + ] + ~resolve:(fun { ctx = mina; _ } t input -> match input with - | Some {delegated_stake; total_stake} -> + | Some { delegated_stake; total_stake } -> let constraint_constants = (Mina_lib.config mina).precomputed_values .constraint_constants @@ -2464,7 +2532,8 @@ module Types = struct ~constraint_constants t ~delegated_stake ~total_stake) .threshold_met | None -> - t.threshold_met ) ] ) + t.threshold_met) + ]) end module Subscriptions = struct @@ -2476,9 +2545,9 @@ module Subscriptions = struct ~deprecated:NotDeprecated ~typ:(non_null Types.sync_status) ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} -> + ~resolve:(fun { ctx = coda; _ } -> Mina_lib.sync_status coda |> Mina_incremental.Status.to_pipe - |> Deferred.Result.return ) + |> Deferred.Result.return) let new_block = subscription_field "newBlock" @@ -2491,10 +2560,11 @@ module Subscriptions = struct ~args: Arg. [ arg "publicKey" ~doc:"Public key that is included in the block" - ~typ:Types.Input.public_key_arg ] - ~resolve:(fun {ctx= coda; _} public_key -> + ~typ:Types.Input.public_key_arg + ] + ~resolve:(fun { ctx = coda; _ } public_key -> Deferred.Result.return - @@ Mina_commands.Subscriptions.new_block coda public_key ) + @@ Mina_commands.Subscriptions.new_block coda public_key) let chain_reorganization = subscription_field "chainReorganization" @@ -2503,21 +2573,19 @@ module Subscriptions = struct trivial extension of the existing one" ~typ:(non_null Types.chain_reorganization_status) ~args:Arg.[] - ~resolve:(fun {ctx= coda; _} -> + ~resolve:(fun { ctx = coda; _ } -> Deferred.Result.return - @@ Mina_commands.Subscriptions.reorganization coda ) + @@ Mina_commands.Subscriptions.reorganization coda) - let commands = [new_sync_update; new_block; chain_reorganization] + let commands = [ new_sync_update; new_block; chain_reorganization ] end module Mutations = struct open Schema - let create_account_resolver {ctx= t; _} () password = + let create_account_resolver { ctx = t; _ } () password = let password = lazy (return (Bytes.of_string password)) in - let%map pk = - Mina_lib.wallets t |> Secrets.Wallets.generate_new ~password - in + let%map pk = Mina_lib.wallets t |> Secrets.Wallets.generate_new ~password in Mina_lib.subscriptions t |> Mina_lib.Subscriptions.add_new_subscription ~pk ; Result.return pk @@ -2528,27 +2596,27 @@ module Mutations = struct daemon" ~deprecated:(Deprecated (Some "use createAccount instead")) ~typ:(non_null Types.Payload.create_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.create_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.create_account) ] ~resolve:create_account_resolver let create_account = io_field "createAccount" ~doc: - "Create a new account - this will create a new keypair and store it \ - in the daemon" + "Create a new account - this will create a new keypair and store it in \ + the daemon" ~typ:(non_null Types.Payload.create_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.create_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.create_account) ] ~resolve:create_account_resolver let create_hd_account : (Mina_lib.t, unit) field = io_field "createHDAccount" ~doc:Secrets.Hardware_wallets.create_hd_account_summary ~typ:(non_null Types.Payload.create_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.create_hd_account)] - ~resolve:(fun {ctx= coda; _} () hd_index -> - Mina_lib.wallets coda |> Secrets.Wallets.create_hd_account ~hd_index ) + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.create_hd_account) ] + ~resolve:(fun { ctx = coda; _ } () hd_index -> + Mina_lib.wallets coda |> Secrets.Wallets.create_hd_account ~hd_index) - let unlock_account_resolver {ctx= t; _} () (password, pk) = + let unlock_account_resolver { ctx = t; _ } () (password, pk) = let password = lazy (return (Bytes.of_string password)) in match%map Mina_lib.wallets t |> Secrets.Wallets.unlock ~needle:pk ~password @@ -2565,17 +2633,17 @@ module Mutations = struct ~doc:"Allow transactions to be sent from the unlocked account" ~deprecated:(Deprecated (Some "use unlockAccount instead")) ~typ:(non_null Types.Payload.unlock_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.unlock_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.unlock_account) ] ~resolve:unlock_account_resolver let unlock_account = io_field "unlockAccount" ~doc:"Allow transactions to be sent from the unlocked account" ~typ:(non_null Types.Payload.unlock_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.unlock_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.unlock_account) ] ~resolve:unlock_account_resolver - let lock_account_resolver {ctx= t; _} () pk = + let lock_account_resolver { ctx = t; _ } () pk = Mina_lib.wallets t |> Secrets.Wallets.lock ~needle:pk ; pk @@ -2584,23 +2652,23 @@ module Mutations = struct ~doc:"Lock an unlocked account to prevent transaction being sent from it" ~deprecated:(Deprecated (Some "use lockAccount instead")) ~typ:(non_null Types.Payload.lock_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.lock_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.lock_account) ] ~resolve:lock_account_resolver let lock_account = field "lockAccount" ~doc:"Lock an unlocked account to prevent transaction being sent from it" ~typ:(non_null Types.Payload.lock_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.lock_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.lock_account) ] ~resolve:lock_account_resolver - let delete_account_resolver {ctx= coda; _} () public_key = + let delete_account_resolver { ctx = coda; _ } () public_key = let open Deferred.Result.Let_syntax in let wallets = Mina_lib.wallets coda in let%map () = Deferred.Result.map_error ~f:(fun `Not_found -> - "Could not find account with specified public key" ) + "Could not find account with specified public key") (Secrets.Wallets.delete wallets public_key) in public_key @@ -2610,25 +2678,24 @@ module Mutations = struct ~doc:"Delete the private key for an account that you track" ~deprecated:(Deprecated (Some "use deleteAccount instead")) ~typ:(non_null Types.Payload.delete_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.delete_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.delete_account) ] ~resolve:delete_account_resolver let delete_account = io_field "deleteAccount" ~doc:"Delete the private key for an account that you track" ~typ:(non_null Types.Payload.delete_account) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.delete_account)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.delete_account) ] ~resolve:delete_account_resolver - let reload_account_resolver {ctx= coda; _} () = + let reload_account_resolver { ctx = coda; _ } () = let%map _ = Secrets.Wallets.reload ~logger:(Logger.create ()) (Mina_lib.wallets coda) in Ok true let reload_wallets = - io_field "reloadWallets" - ~doc:"Reload tracked account information from disk" + io_field "reloadWallets" ~doc:"Reload tracked account information from disk" ~deprecated:(Deprecated (Some "use reloadAccounts instead")) ~typ:(non_null Types.Payload.reload_accounts) ~args:Arg.[] @@ -2642,8 +2709,7 @@ module Mutations = struct ~resolve:reload_account_resolver let import_account = - io_field "importAccount" - ~doc:"Reload tracked account information from disk" + io_field "importAccount" ~doc:"Reload tracked account information from disk" ~typ:(non_null Types.Payload.import_account) ~args: Arg. @@ -2653,13 +2719,14 @@ module Mutations = struct working directory." ~typ:(non_null string) ; arg "password" ~doc:"Password for the account to import" - ~typ:(non_null string) ] - ~resolve:(fun {ctx= coda; _} () privkey_path password -> + ~typ:(non_null string) + ] + ~resolve:(fun { ctx = coda; _ } () privkey_path password -> let open Deferred.Result.Let_syntax in let password = Lazy.return (Deferred.return (Bytes.of_string password)) in - let%bind ({Keypair.public_key; _} as keypair) = + let%bind ({ Keypair.public_key; _ } as keypair) = Secrets.Keypair.read ~privkey_path ~password |> Deferred.Result.map_error ~f:Secrets.Privkey_error.to_string in @@ -2672,20 +2739,20 @@ module Mutations = struct let%map.Async.Deferred pk = Secrets.Wallets.import_keypair wallets keypair ~password in - Ok (pk, false) ) + Ok (pk, false)) let reset_trust_status = io_field "resetTrustStatus" ~doc:"Reset trust status for all peers at a given IP address" ~typ:(list (non_null Types.Payload.trust_status)) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.reset_trust_status)] - ~resolve:(fun {ctx= coda; _} () ip_address_input -> + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.reset_trust_status) ] + ~resolve:(fun { ctx = coda; _ } () ip_address_input -> let open Deferred.Result.Let_syntax in let%map ip_address = Deferred.return @@ Types.Arguments.ip_address ~name:"ip_address" ip_address_input in - Some (Mina_commands.reset_trust_status coda ip_address) ) + Some (Mina_commands.reset_trust_status coda ip_address)) let send_user_command coda user_command_input = match @@ -2695,8 +2762,9 @@ module Mutations = struct match%map f with | Ok user_command -> Ok - { Types.UserCommand.With_status.data= user_command - ; status= Unknown } + { Types.UserCommand.With_status.data = user_command + ; status = Unknown + } | Error e -> Error ("Couldn't send user_command: " ^ Error.to_string_hum e) ) | `Bootstrapping -> @@ -2734,7 +2802,7 @@ module Mutations = struct Option.value_map memo ~default:(Ok Signed_command_memo.empty) ~f:(fun memo -> result_of_exn Signed_command_memo.create_from_string_exn memo - ~error:"Invalid `memo` provided." ) + ~error:"Invalid `memo` provided.") in User_command_input.create ~signer ~fee ~fee_token ~fee_payer_pk ?nonce:nonce_opt ~valid_until ~memo ~body ~sign_choice () @@ -2760,8 +2828,9 @@ module Mutations = struct in let%map cmd = send_user_command coda user_command_input in Types.UserCommand.With_status.map cmd ~f:(fun cmd -> - { With_hash.data= cmd - ; hash= Transaction_hash.hash_command (Signed_command cmd) } ) + { With_hash.data = cmd + ; hash = Transaction_hash.hash_command (Signed_command cmd) + }) let send_unsigned_user_command ~coda ~nonce_opt ~signer ~memo ~fee ~fee_token ~fee_payer_pk ~valid_until ~body = @@ -2781,12 +2850,13 @@ module Mutations = struct in let%map cmd = send_user_command coda user_command_input in Types.UserCommand.With_status.map cmd ~f:(fun cmd -> - { With_hash.data= cmd - ; hash= Transaction_hash.hash_command (Signed_command cmd) } ) + { With_hash.data = cmd + ; hash = Transaction_hash.hash_command (Signed_command cmd) + }) let export_logs ~coda basename_opt = let open Mina_lib in - let Config.{conf_dir; _} = Mina_lib.config coda in + let Config.{ conf_dir; _ } = Mina_lib.config coda in Conf_dir.export_logs_to_tar ?basename:basename_opt ~conf_dir let send_delegation = @@ -2796,13 +2866,14 @@ module Mutations = struct ~args: Arg. [ arg "input" ~typ:(non_null Types.Input.send_delegation) - ; Types.Input.Fields.signature ] + ; Types.Input.Fields.signature + ] ~resolve: - (fun {ctx= coda; _} () (from, to_, fee, valid_until, memo, nonce_opt) + (fun { ctx = coda; _ } () (from, to_, fee, valid_until, memo, nonce_opt) signature -> let body = Signed_command_payload.Body.Stake_delegation - (Set_delegate {delegator= from; new_delegate= to_}) + (Set_delegate { delegator = from; new_delegate = to_ }) in let fee_token = Token_id.default in match signature with @@ -2814,7 +2885,7 @@ module Mutations = struct let%bind signature = signature |> Deferred.return in send_signed_user_command ~coda ~nonce_opt ~signer:from ~memo ~fee ~fee_token ~fee_payer_pk:from ~valid_until ~body ~signature - |> Deferred.Result.map ~f:Types.UserCommand.mk_user_command ) + |> Deferred.Result.map ~f:Types.UserCommand.mk_user_command) let send_payment = io_field "sendPayment" ~doc:"Send a payment" @@ -2822,17 +2893,19 @@ module Mutations = struct ~args: Arg. [ arg "input" ~typ:(non_null Types.Input.send_payment) - ; Types.Input.Fields.signature ] + ; Types.Input.Fields.signature + ] ~resolve: - (fun {ctx= coda; _} () + (fun { ctx = coda; _ } () (from, to_, token_id, amount, fee, valid_until, memo, nonce_opt) signature -> let body = Signed_command_payload.Body.Payment - { source_pk= from - ; receiver_pk= to_ - ; token_id= Option.value ~default:Token_id.default token_id - ; amount= Amount.of_uint64 amount } + { source_pk = from + ; receiver_pk = to_ + ; token_id = Option.value ~default:Token_id.default token_id + ; amount = Amount.of_uint64 amount + } in let fee_token = Token_id.default in match signature with @@ -2843,7 +2916,7 @@ module Mutations = struct | Some signature -> send_signed_user_command ~coda ~nonce_opt ~signer:from ~memo ~fee ~fee_token ~fee_payer_pk:from ~valid_until ~body ~signature - |> Deferred.Result.map ~f:Types.UserCommand.mk_user_command ) + |> Deferred.Result.map ~f:Types.UserCommand.mk_user_command) let create_token = io_field "createToken" ~doc:"Create a new token" @@ -2851,18 +2924,20 @@ module Mutations = struct ~args: Arg. [ arg "input" ~typ:(non_null Types.Input.create_token) - ; Types.Input.Fields.signature ] + ; Types.Input.Fields.signature + ] ~resolve: - (fun {ctx= coda; _} () + (fun { ctx = coda; _ } () (fee_payer_pk, token_owner, fee, valid_until, memo, nonce_opt) signature -> let fee_payer_pk = Option.value ~default:token_owner fee_payer_pk in let body = Signed_command_payload.Body.Create_new_token - { token_owner_pk= token_owner - ; disable_new_accounts= + { token_owner_pk = token_owner + ; disable_new_accounts = (* TODO(5274): Expose when permissions commands are merged. *) - false } + false + } in let fee_token = Token_id.default in match signature with @@ -2871,7 +2946,7 @@ module Mutations = struct ~memo ~fee ~fee_token ~fee_payer_pk ~valid_until ~body | Some signature -> send_signed_user_command ~coda ~nonce_opt ~signer:token_owner ~memo - ~fee ~fee_token ~fee_payer_pk ~valid_until ~body ~signature ) + ~fee ~fee_token ~fee_payer_pk ~valid_until ~body ~signature) let create_token_account = io_field "createTokenAccount" ~doc:"Create a new account for a token" @@ -2879,9 +2954,10 @@ module Mutations = struct ~args: Arg. [ arg "input" ~typ:(non_null Types.Input.create_token_account) - ; Types.Input.Fields.signature ] + ; Types.Input.Fields.signature + ] ~resolve: - (fun {ctx= coda; _} () + (fun { ctx = coda; _ } () ( token_owner , token , receiver @@ -2892,12 +2968,13 @@ module Mutations = struct , nonce_opt ) signature -> let body = Signed_command_payload.Body.Create_token_account - { token_id= token - ; token_owner_pk= token_owner - ; receiver_pk= receiver - ; account_disabled= + { token_id = token + ; token_owner_pk = token_owner + ; receiver_pk = receiver + ; account_disabled = (* TODO(5274): Expose when permissions commands are merged. *) - false } + false + } in let fee_token = Token_id.default in let fee_payer_pk = Option.value ~default:receiver fee_payer in @@ -2906,9 +2983,8 @@ module Mutations = struct send_unsigned_user_command ~coda ~nonce_opt ~signer:fee_payer_pk ~memo ~fee ~fee_token ~fee_payer_pk ~valid_until ~body | Some signature -> - send_signed_user_command ~coda ~nonce_opt ~signer:fee_payer_pk - ~memo ~fee ~fee_token ~fee_payer_pk ~valid_until ~body ~signature - ) + send_signed_user_command ~coda ~nonce_opt ~signer:fee_payer_pk ~memo + ~fee ~fee_token ~fee_payer_pk ~valid_until ~body ~signature) let mint_tokens = io_field "mintTokens" ~doc:"Mint more of a token" @@ -2916,9 +2992,10 @@ module Mutations = struct ~args: Arg. [ arg "input" ~typ:(non_null Types.Input.mint_tokens) - ; Types.Input.Fields.signature ] + ; Types.Input.Fields.signature + ] ~resolve: - (fun {ctx= coda; _} () + (fun { ctx = coda; _ } () ( token_owner , token , receiver @@ -2929,43 +3006,45 @@ module Mutations = struct , nonce_opt ) signature -> let body = Signed_command_payload.Body.Mint_tokens - { token_id= token - ; token_owner_pk= token_owner - ; receiver_pk= Option.value ~default:token_owner receiver - ; amount= Amount.of_uint64 amount } + { token_id = token + ; token_owner_pk = token_owner + ; receiver_pk = Option.value ~default:token_owner receiver + ; amount = Amount.of_uint64 amount + } in let fee_token = Token_id.default in match signature with | None -> send_unsigned_user_command ~coda ~nonce_opt ~signer:token_owner - ~memo ~fee ~fee_token ~fee_payer_pk:token_owner ~valid_until - ~body + ~memo ~fee ~fee_token ~fee_payer_pk:token_owner ~valid_until ~body | Some signature -> send_signed_user_command ~coda ~nonce_opt ~signer:token_owner ~memo ~fee ~fee_token ~fee_payer_pk:token_owner ~valid_until ~body - ~signature ) + ~signature) let send_rosetta_transaction = io_field "sendRosettaTransaction" ~doc:"Send a transaction in rosetta format" ~typ:(non_null Types.Payload.send_rosetta_transaction) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.rosetta_transaction)] - ~resolve:(fun {ctx= mina; _} () signed_command -> + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.rosetta_transaction) ] + ~resolve:(fun { ctx = mina; _ } () signed_command -> match%map Mina_lib.add_full_transactions mina - [User_command.Signed_command signed_command] + [ User_command.Signed_command signed_command ] with - | Ok ([(User_command.Signed_command signed_command as transaction)], _) + | Ok ([ (User_command.Signed_command signed_command as transaction) ], _) -> Ok (Types.UserCommand.mk_user_command - { status= Unknown - ; data= - { With_hash.data= signed_command - ; hash= Transaction_hash.hash_command transaction } }) + { status = Unknown + ; data = + { With_hash.data = signed_command + ; hash = Transaction_hash.hash_command transaction + } + }) | Error err -> Error (Error.to_string_hum err) - | Ok ([], [(_, diff_error)]) -> + | Ok ([], [ (_, diff_error) ]) -> let diff_error = Network_pool.Transaction_pool.Resource_pool.Diff.Diff_error .to_string_hum diff_error @@ -2974,23 +3053,22 @@ module Mutations = struct (sprintf "Transaction could not be entered into the pool: %s" diff_error) | Ok _ -> - Error - "Internal error: response from transaction pool was malformed" ) + Error "Internal error: response from transaction pool was malformed") let export_logs = io_field "exportLogs" ~doc:"Export daemon logs to tar archive" - ~args:Arg.[arg "basename" ~typ:string] + ~args:Arg.[ arg "basename" ~typ:string ] ~typ:(non_null Types.Payload.export_logs) - ~resolve:(fun {ctx= coda; _} () basename_opt -> + ~resolve:(fun { ctx = coda; _ } () basename_opt -> let%map result = export_logs ~coda basename_opt in Result.map_error result - ~f:(Fn.compose Yojson.Safe.to_string Error_json.error_to_yojson) ) + ~f:(Fn.compose Yojson.Safe.to_string Error_json.error_to_yojson)) let set_staking = field "setStaking" ~doc:"Set keys you wish to stake with" - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.set_staking)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.set_staking) ] ~typ:(non_null Types.Payload.set_staking) - ~resolve:(fun {ctx= coda; _} () pks -> + ~resolve:(fun { ctx = coda; _ } () pks -> let old_block_production_keys = Mina_lib.block_production_pubkeys coda in @@ -3001,7 +3079,7 @@ module Mutations = struct | Some kp -> `Fst (kp, pk) | None -> - `Snd pk ) + `Snd pk) in [%log' info (Mina_lib.top_level_logger coda)] ~metadata: @@ -3009,20 +3087,22 @@ module Mutations = struct , [%to_yojson: Public_key.Compressed.t list] (Public_key.Compressed.Set.to_list old_block_production_keys) ) - ; ("new", [%to_yojson: Public_key.Compressed.t list] pks) ] + ; ("new", [%to_yojson: Public_key.Compressed.t list] pks) + ] !"Block production key replacement; old: $old, new: $new" ; ignore @@ Mina_lib.replace_block_production_keypairs coda (Keypair.And_compressed_pk.Set.of_list unlocked) ; ( Public_key.Compressed.Set.to_list old_block_production_keys , locked - , List.map ~f:Tuple.T2.get2 unlocked ) ) + , List.map ~f:Tuple.T2.get2 unlocked )) let set_coinbase_receiver = field "setCoinbaseReceiver" ~doc:"Set the key to receive coinbases" - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.set_coinbase_receiver)] + ~args: + Arg.[ arg "input" ~typ:(non_null Types.Input.set_coinbase_receiver) ] ~typ:(non_null Types.Payload.set_coinbase_receiver) - ~resolve:(fun {ctx= mina; _} () coinbase_receiver -> + ~resolve:(fun { ctx = mina; _ } () coinbase_receiver -> let old_coinbase_receiver = match Mina_lib.coinbase_receiver mina with | `Producer -> @@ -3038,24 +3118,24 @@ module Mutations = struct `Other pk in Mina_lib.replace_coinbase_receiver mina coinbase_receiver_full ; - (old_coinbase_receiver, coinbase_receiver) ) + (old_coinbase_receiver, coinbase_receiver)) let set_snark_worker = io_field "setSnarkWorker" ~doc:"Set key you wish to snark work with or disable snark working" - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.set_snark_worker)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.set_snark_worker) ] ~typ:(non_null Types.Payload.set_snark_worker) - ~resolve:(fun {ctx= coda; _} () pk -> + ~resolve:(fun { ctx = coda; _ } () pk -> let old_snark_worker_key = Mina_lib.snark_worker_key coda in let%map () = Mina_lib.replace_snark_worker_key coda pk in - Ok old_snark_worker_key ) + Ok old_snark_worker_key) let set_snark_work_fee = result_field "setSnarkWorkFee" ~doc:"Set fee that you will like to receive for doing snark work" - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.set_snark_work_fee)] + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.set_snark_work_fee) ] ~typ:(non_null Types.Payload.set_snark_work_fee) - ~resolve:(fun {ctx= coda; _} () raw_fee -> + ~resolve:(fun { ctx = coda; _ } () raw_fee -> let open Result.Let_syntax in let%map fee = result_of_exn Currency.Fee.of_uint64 raw_fee @@ -3063,38 +3143,40 @@ module Mutations = struct in let last_fee = Mina_lib.snark_work_fee coda in Mina_lib.set_snark_work_fee coda fee ; - Currency.Fee.to_uint64 last_fee ) + Currency.Fee.to_uint64 last_fee) let set_connection_gating_config = io_field "setConnectionGatingConfig" ~args: Arg. - [arg "input" ~typ:(non_null Types.Input.set_connection_gating_config)] + [ arg "input" ~typ:(non_null Types.Input.set_connection_gating_config) + ] ~doc: "Set the connection gating config, returning the current config after \ the application (which may have failed)" ~typ:(non_null Types.Payload.set_connection_gating_config) - ~resolve:(fun {ctx= coda; _} () config -> + ~resolve:(fun { ctx = coda; _ } () config -> let open Deferred.Result.Let_syntax in let%bind config = Deferred.return config in let open Deferred.Let_syntax in Mina_networking.set_connection_gating_config (Mina_lib.net coda) config - >>| Result.return ) + >>| Result.return) let add_peer = io_field "addPeers" ~args: Arg. [ arg "peers" ~typ:(non_null @@ list @@ non_null @@ Types.Input.peer) - ; arg "seed" ~typ:bool ] + ; arg "seed" ~typ:bool + ] ~doc:"Connect to the given peers" ~typ:(non_null @@ list @@ non_null Types.DaemonStatus.peer) - ~resolve:(fun {ctx= coda; _} () peers seed -> + ~resolve:(fun { ctx = coda; _ } () peers seed -> let open Deferred.Result.Let_syntax in let%bind peers = Result.combine_errors peers |> Result.map_error ~f:(fun errs -> - Option.value ~default:"Empty peers list" (List.hd errs) ) + Option.value ~default:"Empty peers list" (List.hd errs)) |> Deferred.return in let net = Mina_lib.net coda in @@ -3108,7 +3190,7 @@ module Mutations = struct | Ok () -> None | Error err -> - Some (Error (Error.to_string_hum err)) ) + Some (Error (Error.to_string_hum err))) in let%map () = match maybe_failure with @@ -3117,21 +3199,23 @@ module Mutations = struct | Some err -> Deferred.return err in - List.map ~f:Network_peer.Peer.to_display peers ) + List.map ~f:Network_peer.Peer.to_display peers) let archive_precomputed_block = io_field "archivePrecomputedBlock" ~args: Arg. [ arg "block" ~doc:"Block encoded in precomputed block format" - ~typ:(non_null Types.Input.precomputed_block) ] + ~typ:(non_null Types.Input.precomputed_block) + ] ~typ: (non_null (obj "Applied" ~fields:(fun _ -> [ field "applied" ~typ:(non_null bool) ~args:Arg.[] - ~resolve:(fun _ _ -> true) ] ))) - ~resolve:(fun {ctx= coda; _} () block -> + ~resolve:(fun _ _ -> true) + ]))) + ~resolve:(fun { ctx = coda; _ } () block -> let open Deferred.Result.Let_syntax in let%bind archive_location = match (Mina_lib.config coda).archive_process_location with @@ -3146,21 +3230,23 @@ module Mutations = struct block |> Deferred.Result.map_error ~f:Error.to_string_hum in - () ) + ()) let archive_extensional_block = io_field "archiveExtensionalBlock" ~args: Arg. [ arg "block" ~doc:"Block encoded in extensional block format" - ~typ:(non_null Types.Input.extensional_block) ] + ~typ:(non_null Types.Input.extensional_block) + ] ~typ: (non_null (obj "Applied" ~fields:(fun _ -> [ field "applied" ~typ:(non_null bool) ~args:Arg.[] - ~resolve:(fun _ _ -> true) ] ))) - ~resolve:(fun {ctx= coda; _} () block -> + ~resolve:(fun _ _ -> true) + ]))) + ~resolve:(fun { ctx = coda; _ } () block -> let open Deferred.Result.Let_syntax in let%bind archive_location = match (Mina_lib.config coda).archive_process_location with @@ -3175,7 +3261,7 @@ module Mutations = struct block |> Deferred.Result.map_error ~f:Error.to_string_hum in - () ) + ()) let commands = [ add_wallet @@ -3204,7 +3290,8 @@ module Mutations = struct ; add_peer ; archive_precomputed_block ; archive_extensional_block - ; send_rosetta_transaction ] + ; send_rosetta_transaction + ] end module Queries = struct @@ -3223,8 +3310,9 @@ module Queries = struct ~typ:Types.Input.public_key_arg ; arg "hashes" ~doc:"Hashes of the commands to find in the pool" ~typ:(list (non_null string)) - ; arg "ids" ~typ:(list (non_null guid)) ~doc:"Ids of UserCommands" ] - ~resolve:(fun {ctx= coda; _} () opt_pk opt_hashes opt_txns -> + ; arg "ids" ~typ:(list (non_null guid)) ~doc:"Ids of UserCommands" + ] + ~resolve:(fun { ctx = coda; _ } () opt_pk opt_hashes opt_txns -> let transaction_pool = Mina_lib.transaction_pool coda in let resource_pool = Network_pool.Transaction_pool.resource_pool transaction_pool @@ -3246,7 +3334,7 @@ module Queries = struct |> Option.bind ~f: (Network_pool.Transaction_pool.Resource_pool - .find_by_hash resource_pool) ) + .find_by_hash resource_pool)) | None -> [] in @@ -3271,8 +3359,8 @@ module Queries = struct (Signed_command signed_command) in Transaction_hash.User_command_with_valid_signature - .create cmd ) - |> Result.ok ) + .create cmd) + |> Result.ok) | None -> [] in @@ -3287,56 +3375,56 @@ module Queries = struct |> Transaction_hash.User_command_with_valid_signature .command |> User_command.fee_payer |> Account_id.public_key - |> Public_key.Compressed.equal pk ) ) ) + |> Public_key.Compressed.equal pk) ) ) |> List.filter_map ~f:(fun x -> let x = - Transaction_hash.User_command_with_valid_signature - .forget_check x + Transaction_hash.User_command_with_valid_signature.forget_check + x in match x.data with | Signed_command data -> Some (Types.UserCommand.mk_user_command - {status= Unknown; data= {x with data}}) + { status = Unknown; data = { x with data } }) | Snapp_command _ -> - None ) ) + None)) let sync_status = io_field "syncStatus" ~doc:"Network sync status" ~args:[] - ~typ:(non_null Types.sync_status) ~resolve:(fun {ctx= coda; _} () -> + ~typ:(non_null Types.sync_status) ~resolve:(fun { ctx = coda; _ } () -> let open Deferred.Let_syntax in (* pull out sync status from status, so that result here agrees with status; see issue #8251 - *) - let%map {sync_status; _} = + *) + let%map { sync_status; _ } = Mina_commands.get_status ~flag:`Performance coda in - Ok sync_status ) + Ok sync_status) let daemon_status = io_field "daemonStatus" ~doc:"Get running daemon status" ~args:[] - ~typ:(non_null Types.DaemonStatus.t) ~resolve:(fun {ctx= coda; _} () -> - Mina_commands.get_status ~flag:`Performance coda >>| Result.return ) + ~typ:(non_null Types.DaemonStatus.t) ~resolve:(fun { ctx = coda; _ } () -> + Mina_commands.get_status ~flag:`Performance coda >>| Result.return) let trust_status = field "trustStatus" ~typ:(list (non_null Types.Payload.trust_status)) - ~args:Arg.[arg "ipAddress" ~typ:(non_null string)] + ~args:Arg.[ arg "ipAddress" ~typ:(non_null string) ] ~doc:"Trust status for an IPv4 or IPv6 address" - ~resolve:(fun {ctx= coda; _} () (ip_addr_string : string) -> + ~resolve:(fun { ctx = coda; _ } () (ip_addr_string : string) -> match Types.Arguments.ip_address ~name:"ipAddress" ip_addr_string with | Ok ip_addr -> Some (Mina_commands.get_trust_status coda ip_addr) | Error _ -> - None ) + None) let trust_status_all = field "trustStatusAll" ~typ:(non_null @@ list @@ non_null Types.Payload.trust_status) ~args:Arg.[] ~doc:"IP address and trust status for all peers" - ~resolve:(fun {ctx= coda; _} () -> - Mina_commands.get_trust_status_all coda ) + ~resolve:(fun { ctx = coda; _ } () -> + Mina_commands.get_trust_status_all coda) let version = field "version" ~typ:string @@ -3344,26 +3432,27 @@ module Queries = struct ~doc:"The version of the node (git commit hash)" ~resolve:(fun _ _ -> Some Mina_version.commit_id) - let tracked_accounts_resolver {ctx= coda; _} () = + let tracked_accounts_resolver { ctx = coda; _ } () = let wallets = Mina_lib.wallets coda in let block_production_pubkeys = Mina_lib.block_production_pubkeys coda in let best_tip_ledger = Mina_lib.best_ledger coda in wallets |> Secrets.Wallets.pks |> List.map ~f:(fun pk -> - { Types.AccountObj.account= + { Types.AccountObj.account = Types.AccountObj.Partial_account.of_pk coda pk - ; locked= Secrets.Wallets.check_locked wallets ~needle:pk - ; is_actively_staking= + ; locked = Secrets.Wallets.check_locked wallets ~needle:pk + ; is_actively_staking = Public_key.Compressed.Set.mem block_production_pubkeys pk - ; path= Secrets.Wallets.get_path wallets pk - ; index= + ; path = Secrets.Wallets.get_path wallets pk + ; index = ( match best_tip_ledger with | `Active ledger -> Option.try_with (fun () -> Ledger.index_of_account_exn ledger - (Account_id.create pk Token_id.default) ) + (Account_id.create pk Token_id.default)) | _ -> - None ) } ) + None ) + }) let owned_wallets = field "ownedWallets" @@ -3380,7 +3469,7 @@ module Queries = struct ~args:Arg.[] ~resolve:tracked_accounts_resolver - let account_resolver {ctx= coda; _} () pk = + let account_resolver { ctx = coda; _ } () pk = Some (Types.AccountObj.lift coda pk (Types.AccountObj.Partial_account.of_pk coda pk)) @@ -3392,7 +3481,8 @@ module Queries = struct ~args: Arg. [ arg "publicKey" ~doc:"Public key of account being retrieved" - ~typ:(non_null Types.Input.public_key_arg) ] + ~typ:(non_null Types.Input.public_key_arg) + ] ~resolve:account_resolver let account = @@ -3404,12 +3494,13 @@ module Queries = struct ~typ:(non_null Types.Input.public_key_arg) ; arg' "token" ~doc:"Token of account being retrieved (defaults to CODA)" - ~typ:Types.Input.token_id_arg ~default:Token_id.default ] - ~resolve:(fun {ctx= coda; _} () pk token -> + ~typ:Types.Input.token_id_arg ~default:Token_id.default + ] + ~resolve:(fun { ctx = coda; _ } () pk token -> Some ( Account_id.create pk token |> Types.AccountObj.Partial_account.of_account_id coda - |> Types.AccountObj.lift coda pk ) ) + |> Types.AccountObj.lift coda pk )) let accounts_for_pk = field "accounts" ~doc:"Find all accounts for a public key" @@ -3417,29 +3508,29 @@ module Queries = struct ~args: Arg. [ arg "publicKey" ~doc:"Public key to find accounts for" - ~typ:(non_null Types.Input.public_key_arg) ] - ~resolve:(fun {ctx= coda; _} () pk -> + ~typ:(non_null Types.Input.public_key_arg) + ] + ~resolve:(fun { ctx = coda; _ } () pk -> match coda |> Mina_lib.best_tip |> Participating_state.active |> Option.map ~f:(fun tip -> ( Transition_frontier.Breadcrumb.staged_ledger tip |> Staged_ledger.ledger - , tip ) ) + , tip )) with | Some (ledger, breadcrumb) -> let tokens = Ledger.tokens ledger pk |> Set.to_list in List.filter_map tokens ~f:(fun token -> let open Option.Let_syntax in let%bind location = - Ledger.location_of_account ledger - (Account_id.create pk token) + Ledger.location_of_account ledger (Account_id.create pk token) in let%map account = Ledger.get ledger location in Types.AccountObj.Partial_account.of_full_account ~breadcrumb account - |> Types.AccountObj.lift coda pk ) + |> Types.AccountObj.lift coda pk) | None -> - [] ) + []) let token_owner = field "tokenOwner" ~doc:"Find the public key that owns a given token" @@ -3447,30 +3538,32 @@ module Queries = struct ~args: Arg. [ arg "token" ~doc:"Token to find the owner for" - ~typ:(non_null Types.Input.token_id_arg) ] - ~resolve:(fun {ctx= coda; _} () token -> + ~typ:(non_null Types.Input.token_id_arg) + ] + ~resolve:(fun { ctx = coda; _ } () token -> coda |> Mina_lib.best_tip |> Participating_state.active |> Option.bind ~f:(fun tip -> let ledger = Transition_frontier.Breadcrumb.staged_ledger tip |> Staged_ledger.ledger in - Ledger.token_owner ledger token ) ) + Ledger.token_owner ledger token)) let transaction_status = result_field "transactionStatus" ~doc:"Get the status of a transaction" ~typ:(non_null Types.transaction_status) - ~args:Arg.[arg "payment" ~typ:(non_null guid) ~doc:"Id of a UserCommand"] - ~resolve:(fun {ctx= coda; _} () serialized_payment -> + ~args: + Arg.[ arg "payment" ~typ:(non_null guid) ~doc:"Id of a UserCommand" ] + ~resolve:(fun { ctx = coda; _ } () serialized_payment -> let open Result.Let_syntax in let deserialize_payment serialized_payment = result_of_or_error (Signed_command.of_base58_check serialized_payment) ~error:"Invalid payment provided" |> Result.map ~f:(fun cmd -> - { With_hash.data= cmd - ; hash= Transaction_hash.hash_command (Signed_command cmd) } - ) + { With_hash.data = cmd + ; hash = Transaction_hash.hash_command (Signed_command cmd) + }) in let%bind payment = deserialize_payment serialized_payment in let frontier_broadcast_pipe = Mina_lib.transition_frontier coda in @@ -3478,54 +3571,56 @@ module Queries = struct Result.map_error (Transaction_inclusion_status.get_status ~frontier_broadcast_pipe ~transaction_pool payment.data) - ~f:Error.to_string_hum ) + ~f:Error.to_string_hum) let current_snark_worker = field "currentSnarkWorker" ~typ:Types.snark_worker ~args:Arg.[] ~doc:"Get information about the current snark worker" - ~resolve:(fun {ctx= coda; _} _ -> + ~resolve:(fun { ctx = coda; _ } _ -> Option.map (Mina_lib.snark_worker_key coda) ~f:(fun k -> - (k, Mina_lib.snark_work_fee coda) ) ) + (k, Mina_lib.snark_work_fee coda))) let genesis_block = field "genesisBlock" ~typ:(non_null Types.block) ~args:[] - ~doc:"Get the genesis block" ~resolve:(fun {ctx= coda; _} () -> + ~doc:"Get the genesis block" ~resolve:(fun { ctx = coda; _ } () -> let open Mina_state in let { Precomputed_values.genesis_ledger ; constraint_constants ; consensus_constants ; genesis_epoch_data ; proof_data - ; _ } = + ; _ + } = (Mina_lib.config coda).precomputed_values in - let {With_hash.data= genesis_state; hash} = + let { With_hash.data = genesis_state; hash } = Genesis_protocol_state.t ~genesis_ledger:(Genesis_ledger.Packed.t genesis_ledger) ~genesis_epoch_data ~constraint_constants ~consensus_constants in let winner = fst Consensus_state_hooks.genesis_winner in - { With_hash.data= - { Filtered_external_transition.creator= winner + { With_hash.data = + { Filtered_external_transition.creator = winner ; winner - ; protocol_state= - { previous_state_hash= + ; protocol_state = + { previous_state_hash = Protocol_state.previous_state_hash genesis_state - ; blockchain_state= + ; blockchain_state = Protocol_state.blockchain_state genesis_state - ; consensus_state= Protocol_state.consensus_state genesis_state + ; consensus_state = Protocol_state.consensus_state genesis_state } - ; transactions= - { commands= [] - ; fee_transfers= [] - ; coinbase= constraint_constants.coinbase_amount - ; coinbase_receiver= - Some (fst Consensus_state_hooks.genesis_winner) } - ; snark_jobs= [] - ; proof= + ; transactions = + { commands = [] + ; fee_transfers = [] + ; coinbase = constraint_constants.coinbase_amount + ; coinbase_receiver = + Some (fst Consensus_state_hooks.genesis_winner) + } + ; snark_jobs = [] + ; proof = ( match proof_data with - | Some {genesis_proof; _} -> + | Some { genesis_proof; _ } -> genesis_proof | None -> (* It's nearly never useful to have a specific genesis @@ -3534,8 +3629,10 @@ module Queries = struct expensive proof generation step if we don't have one available. *) - Proof.blockchain_dummy ) } - ; hash } ) + Proof.blockchain_dummy ) + } + ; hash + }) (* used by best_chain, block below *) let block_of_breadcrumb coda breadcrumb = @@ -3550,10 +3647,11 @@ module Queries = struct transition in With_hash.Stable.Latest. - { data= + { data = Filtered_external_transition.of_transition transition `All transactions - ; hash } + ; hash + } let best_chain = io_field "bestChain" @@ -3568,24 +3666,25 @@ module Queries = struct "The maximum number of blocks to return. If there are more \ blocks in the transition frontier from root to tip, the n \ blocks closest to the best tip will be returned" - ~typ:int ] - ~resolve:(fun {ctx= coda; _} () max_length -> + ~typ:int + ] + ~resolve:(fun { ctx = coda; _ } () max_length -> match Mina_lib.best_chain ?max_length coda with | Some best_chain -> let%map blocks = Deferred.List.map best_chain ~f:(fun bc -> - Deferred.return @@ block_of_breadcrumb coda bc ) + Deferred.return @@ block_of_breadcrumb coda bc) in Ok (Some blocks) | None -> return - @@ Error "Could not obtain best chain from transition frontier" ) + @@ Error "Could not obtain best chain from transition frontier") let block = result_field2 "block" ~doc: - "Retrieve a block with the given state hash or height, if contained \ - in the transition frontier." + "Retrieve a block with the given state hash or height, if contained in \ + the transition frontier." ~typ:(non_null Types.block) ~args: Arg. @@ -3595,7 +3694,7 @@ module Queries = struct ~doc:"The height of the desired block in the best chain" ~typ:int ] ~resolve: - (fun {ctx= coda; _} () (state_hash_base58_opt : string option) + (fun { ctx = coda; _ } () (state_hash_base58_opt : string option) (height_opt : int option) -> let open Result.Let_syntax in let get_transition_frontier () = @@ -3626,7 +3725,7 @@ module Queries = struct empirically, conversion does not raise even if - the number is negative - the number is not representable using 32 bits - *) + *) Unsigned.UInt32.of_int height in let%bind transition_frontier = get_transition_frontier () in @@ -3642,12 +3741,12 @@ module Queries = struct Mina_transition.External_transition.Validated .blockchain_length validated_transition in - Unsigned.UInt32.equal block_height height_uint32 ) + Unsigned.UInt32.equal block_height height_uint32) |> Result.of_option ~error: (sprintf - "Could not find block in transition frontier with \ - height %d" + "Could not find block in transition frontier with height \ + %d" height) in block_of_breadcrumb coda desired_breadcrumb @@ -3658,40 +3757,39 @@ module Queries = struct | None, Some height -> block_from_height height | None, None | Some _, Some _ -> - Error "Must provide exactly one of state hash, height" ) + Error "Must provide exactly one of state hash, height") let initial_peers = field "initialPeers" ~doc:"List of peers that the daemon first used to connect to the network" ~args:Arg.[] ~typ:(non_null @@ list @@ non_null string) - ~resolve:(fun {ctx= coda; _} () -> - List.map (Mina_lib.initial_peers coda) ~f:Mina_net2.Multiaddr.to_string - ) + ~resolve:(fun { ctx = coda; _ } () -> + List.map (Mina_lib.initial_peers coda) ~f:Mina_net2.Multiaddr.to_string) let get_peers = io_field "getPeers" ~doc:"List of peers that the daemon is currently connected to" ~args:Arg.[] ~typ:(non_null @@ list @@ non_null Types.DaemonStatus.peer) - ~resolve:(fun {ctx= coda; _} () -> + ~resolve:(fun { ctx = coda; _ } () -> let%map peers = Mina_networking.peers (Mina_lib.net coda) in - Ok (List.map ~f:Network_peer.Peer.to_display peers) ) + Ok (List.map ~f:Network_peer.Peer.to_display peers)) let snark_pool = field "snarkPool" ~doc:"List of completed snark works that have the lowest fee so far" ~args:Arg.[] ~typ:(non_null @@ list @@ non_null Types.completed_work) - ~resolve:(fun {ctx= coda; _} () -> + ~resolve:(fun { ctx = coda; _ } () -> Mina_lib.snark_pool coda |> Network_pool.Snark_pool.resource_pool - |> Network_pool.Snark_pool.Resource_pool.all_completed_work ) + |> Network_pool.Snark_pool.Resource_pool.all_completed_work) let pending_snark_work = field "pendingSnarkWork" ~doc:"List of snark works that are yet to be done" ~args:Arg.[] ~typ:(non_null @@ list @@ non_null Types.pending_work) - ~resolve:(fun {ctx= coda; _} () -> + ~resolve:(fun { ctx = coda; _ } () -> let snark_job_state = Mina_lib.snark_job_state coda in let snark_pool = Mina_lib.snark_pool coda in let fee_opt = @@ -3699,7 +3797,7 @@ module Queries = struct Option.map (snark_worker_key coda) ~f:(fun _ -> snark_work_fee coda)) in let (module S) = Mina_lib.work_selection_method coda in - S.pending_work_statements ~snark_pool ~fee_opt snark_job_state ) + S.pending_work_statements ~snark_pool ~fee_opt snark_job_state) let genesis_constants = field "genesisConstants" @@ -3713,14 +3811,14 @@ module Queries = struct let time_offset = field "timeOffset" ~doc: - "The time offset in seconds used to convert real times into \ - blockchain times" + "The time offset in seconds used to convert real times into blockchain \ + times" ~args:Arg.[] ~typ:(non_null int) - ~resolve:(fun {ctx= coda; _} () -> + ~resolve:(fun { ctx = coda; _ } () -> Block_time.Controller.get_time_offset ~logger:(Mina_lib.config coda).logger - |> Time.Span.to_sec |> Float.to_int ) + |> Time.Span.to_sec |> Float.to_int) let next_available_token = field "nextAvailableToken" @@ -3729,12 +3827,12 @@ module Queries = struct allocated sequentially, so all lower token IDs have been allocated" ~args:Arg.[] ~typ:(non_null Types.token_id) - ~resolve:(fun {ctx= coda; _} () -> + ~resolve:(fun { ctx = coda; _ } () -> coda |> Mina_lib.best_tip |> Participating_state.active |> Option.map ~f:(fun tip -> Transition_frontier.Breadcrumb.staged_ledger tip - |> Staged_ledger.ledger |> Ledger.next_available_token ) - |> Option.value ~default:Token_id.(next default) ) + |> Staged_ledger.ledger |> Ledger.next_available_token) + |> Option.value ~default:Token_id.(next default)) let connection_gating_config = io_field "connectionGatingConfig" @@ -3743,30 +3841,31 @@ module Queries = struct connections to permit" ~args:Arg.[] ~typ:(non_null Types.Payload.set_connection_gating_config) - ~resolve:(fun {ctx= coda; _} _ -> + ~resolve:(fun { ctx = coda; _ } _ -> let net = Mina_lib.net coda in let%map config = Mina_networking.connection_gating_config net in - Ok config ) + Ok config) let validate_payment = io_field "validatePayment" - ~doc:"Validate the format and signature of a payment" - ~typ:(non_null bool) + ~doc:"Validate the format and signature of a payment" ~typ:(non_null bool) ~args: Arg. [ arg "input" ~typ:(non_null Types.Input.send_payment) - ; Types.Input.Fields.signature ] + ; Types.Input.Fields.signature + ] ~resolve: - (fun {ctx= mina; _} () + (fun { ctx = mina; _ } () (from, to_, token_id, amount, fee, valid_until, memo, nonce_opt) signature -> let open Deferred.Result.Let_syntax in let body = Signed_command_payload.Body.Payment - { source_pk= from - ; receiver_pk= to_ - ; token_id= Option.value ~default:Token_id.default token_id - ; amount= Amount.of_uint64 amount } + { source_pk = from + ; receiver_pk = to_ + ; token_id = Option.value ~default:Token_id.default token_id + ; amount = Amount.of_uint64 amount + } in let fee_token = Token_id.default in let%bind signature = @@ -3790,16 +3889,16 @@ module Queries = struct user_command_input |> Deferred.Result.map_error ~f:Error.to_string_hum in - Signed_command.check_signature user_command ) + Signed_command.check_signature user_command) let runtime_config = field "runtimeConfig" ~doc:"The runtime configuration passed to the daemon at start-up" ~typ:(non_null Types.json) ~args:Arg.[] - ~resolve:(fun {ctx= mina; _} () -> + ~resolve:(fun { ctx = mina; _ } () -> Mina_lib.runtime_config mina - |> Runtime_config.to_yojson |> Yojson.Safe.to_basic ) + |> Runtime_config.to_yojson |> Yojson.Safe.to_basic) let evaluate_vrf = io_field "evaluateVrf" @@ -3812,14 +3911,15 @@ module Queries = struct Arg. [ arg "message" ~typ:(non_null Types.Input.vrf_message) ; arg "publicKey" ~typ:(non_null Types.Input.public_key_arg) - ; arg "vrfThreshold" ~typ:Types.Input.vrf_threshold ] - ~resolve:(fun {ctx= mina; _} () message public_key vrf_threshold -> + ; arg "vrfThreshold" ~typ:Types.Input.vrf_threshold + ] + ~resolve:(fun { ctx = mina; _ } () message public_key vrf_threshold -> Deferred.return @@ let open Result.Let_syntax in let%map sk = match%bind Mutations.find_identity ~public_key mina with - | `Keypair {private_key; _} -> + | `Keypair { private_key; _ } -> Ok private_key | `Hd_index _ -> Error @@ -3833,13 +3933,14 @@ module Queries = struct { (Consensus_vrf.Layout.Evaluation.of_message_and_sk ~constraint_constants message sk) with - vrf_threshold } + vrf_threshold + } in match vrf_threshold with | Some _ -> Consensus_vrf.Layout.Evaluation.compute_vrf ~constraint_constants t | None -> - t ) + t) let check_vrf = field "checkVrf" @@ -3848,13 +3949,13 @@ module Queries = struct evaluations without needing to reveal the private key, in the format \ returned by evaluateVrf" ~typ:(non_null Types.vrf_evaluation) - ~args:Arg.[arg "input" ~typ:(non_null Types.Input.vrf_evaluation)] - ~resolve:(fun {ctx= mina; _} () evaluation -> + ~args:Arg.[ arg "input" ~typ:(non_null Types.Input.vrf_evaluation) ] + ~resolve:(fun { ctx = mina; _ } () evaluation -> let constraint_constants = (Mina_lib.config mina).precomputed_values.constraint_constants in Consensus_vrf.Layout.Evaluation.compute_vrf ~constraint_constants - evaluation ) + evaluation) let commands = [ sync_status @@ -3885,7 +3986,8 @@ module Queries = struct ; validate_payment ; evaluate_vrf ; check_vrf - ; runtime_config ] + ; runtime_config + ] end let schema = @@ -3897,5 +3999,5 @@ let schema_limited = (*including version because that's the default query*) Graphql_async.Schema.( schema - [Queries.daemon_status; Queries.block; Queries.version] + [ Queries.daemon_status; Queries.block; Queries.version ] ~mutations:[] ~subscriptions:[]) diff --git a/src/lib/mina_incremental/mina_incremental.ml b/src/lib/mina_incremental/mina_incremental.ml index cbefb281634..050c535bc7c 100644 --- a/src/lib/mina_incremental/mina_incremental.ml +++ b/src/lib/mina_incremental/mina_incremental.ml @@ -9,7 +9,7 @@ open Async module Make (Incremental : Incremental.S) (Name : sig - val t : string + val t : string end) = struct include Incremental @@ -27,14 +27,14 @@ struct | Changed (_, value) -> Strict_pipe.Writer.write writer value | Invalidated -> - () ) ; + ()) ; (Strict_pipe.Reader.to_linear_pipe reader).Linear_pipe.Reader.pipe let of_broadcast_pipe pipe = let init = Broadcast_pipe.Reader.peek pipe in let var = Var.create init in Broadcast_pipe.Reader.iter pipe ~f:(fun value -> - Var.set var value ; stabilize () ; Deferred.unit ) + Var.set var value ; stabilize () ; Deferred.unit) |> don't_wait_for ; var @@ -43,7 +43,7 @@ struct don't_wait_for (Deferred.map deferred ~f:(fun () -> Var.set var `Filled ; - stabilize () )) ; + stabilize ())) ; var let of_ivar (ivar : unit Ivar.t) = of_deferred (Ivar.read ivar) diff --git a/src/lib/mina_intf/core_intf.ml b/src/lib/mina_intf/core_intf.ml index b97025ee8a5..1360d9d5879 100644 --- a/src/lib/mina_intf/core_intf.ml +++ b/src/lib/mina_intf/core_intf.ml @@ -3,7 +3,7 @@ open Core_kernel module type Security_intf = sig (** In production we set this to (hopefully a prefix of) k for our consensus * mechanism; infinite is for tests *) - val max_depth : [`Infinity | `Finite of int] + val max_depth : [ `Infinity | `Finite of int ] end module type Snark_pool_proof_intf = sig diff --git a/src/lib/mina_intf/transition_frontier_components_intf.ml b/src/lib/mina_intf/transition_frontier_components_intf.ml index b3470216d80..3cd1aaa6706 100644 --- a/src/lib/mina_intf/transition_frontier_components_intf.ml +++ b/src/lib/mina_intf/transition_frontier_components_intf.ml @@ -16,16 +16,16 @@ module type Transition_handler_validator_intf = sig -> trust_system:Trust_system.t -> time_controller:Block_time.Controller.t -> frontier:transition_frontier - -> transition_reader:External_transition.Initial_validated.t - Envelope.Incoming.t - Strict_pipe.Reader.t - -> valid_transition_writer:( ( External_transition.Initial_validated.t - Envelope.Incoming.t - , State_hash.t ) - Cached.t - , Strict_pipe.crash Strict_pipe.buffered - , unit ) - Strict_pipe.Writer.t + -> transition_reader: + External_transition.Initial_validated.t Envelope.Incoming.t + Strict_pipe.Reader.t + -> valid_transition_writer: + ( ( External_transition.Initial_validated.t Envelope.Incoming.t + , State_hash.t ) + Cached.t + , Strict_pipe.crash Strict_pipe.buffered + , unit ) + Strict_pipe.Writer.t -> unprocessed_transition_cache:unprocessed_transition_cache -> unit @@ -75,49 +75,42 @@ module type Transition_handler_processor_intf = sig -> trust_system:Trust_system.t -> time_controller:Block_time.Controller.t -> frontier:transition_frontier - -> primary_transition_reader:( External_transition.Initial_validated.t - Envelope.Incoming.t - , State_hash.t ) - Cached.t - Strict_pipe.Reader.t - -> producer_transition_reader:transition_frontier_breadcrumb - Strict_pipe.Reader.t + -> primary_transition_reader: + ( External_transition.Initial_validated.t Envelope.Incoming.t + , State_hash.t ) + Cached.t + Strict_pipe.Reader.t + -> producer_transition_reader: + transition_frontier_breadcrumb Strict_pipe.Reader.t -> clean_up_catchup_scheduler:unit Ivar.t - -> catchup_job_writer:( State_hash.t - * ( External_transition.Initial_validated.t - Envelope.Incoming.t - , State_hash.t ) - Cached.t - Rose_tree.t - list - , Strict_pipe.crash Strict_pipe.buffered - , unit ) - Strict_pipe.Writer.t - -> catchup_breadcrumbs_reader:( ( transition_frontier_breadcrumb - , State_hash.t ) - Cached.t - Rose_tree.t - list - * [ `Ledger_catchup of unit Ivar.t - | `Catchup_scheduler ] ) - Strict_pipe.Reader.t - -> catchup_breadcrumbs_writer:( ( transition_frontier_breadcrumb - , State_hash.t ) - Cached.t - Rose_tree.t - list - * [ `Ledger_catchup of unit Ivar.t - | `Catchup_scheduler ] - , Strict_pipe.crash Strict_pipe.buffered - , unit ) - Strict_pipe.Writer.t - -> processed_transition_writer:( [ `Transition of - External_transition.Validated.t ] - * [ `Source of - [`Gossip | `Catchup | `Internal] ] - , Strict_pipe.crash Strict_pipe.buffered - , unit ) - Strict_pipe.Writer.t + -> catchup_job_writer: + ( State_hash.t + * ( External_transition.Initial_validated.t Envelope.Incoming.t + , State_hash.t ) + Cached.t + Rose_tree.t + list + , Strict_pipe.crash Strict_pipe.buffered + , unit ) + Strict_pipe.Writer.t + -> catchup_breadcrumbs_reader: + ( (transition_frontier_breadcrumb, State_hash.t) Cached.t Rose_tree.t + list + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] ) + Strict_pipe.Reader.t + -> catchup_breadcrumbs_writer: + ( (transition_frontier_breadcrumb, State_hash.t) Cached.t Rose_tree.t + list + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] + , Strict_pipe.crash Strict_pipe.buffered + , unit ) + Strict_pipe.Writer.t + -> processed_transition_writer: + ( [ `Transition of External_transition.Validated.t ] + * [ `Source of [ `Gossip | `Catchup | `Internal ] ] + , Strict_pipe.crash Strict_pipe.buffered + , unit ) + Strict_pipe.Writer.t -> unit end @@ -143,18 +136,18 @@ module type Transition_handler_intf = sig module Breadcrumb_builder : Breadcrumb_builder_intf - with type transition_frontier := transition_frontier - and type transition_frontier_breadcrumb := transition_frontier_breadcrumb + with type transition_frontier := transition_frontier + and type transition_frontier_breadcrumb := transition_frontier_breadcrumb module Validator : Transition_handler_validator_intf - with type unprocessed_transition_cache := Unprocessed_transition_cache.t - and type transition_frontier := transition_frontier + with type unprocessed_transition_cache := Unprocessed_transition_cache.t + and type transition_frontier := transition_frontier module Processor : Transition_handler_processor_intf - with type transition_frontier := transition_frontier - and type transition_frontier_breadcrumb := transition_frontier_breadcrumb + with type transition_frontier := transition_frontier + and type transition_frontier_breadcrumb := transition_frontier_breadcrumb end (** Interface that allows a peer to prove their best_tip in the @@ -177,8 +170,8 @@ module type Best_tip_prover_intf = sig -> ( External_transition.t , State_body_hash.t list * External_transition.t ) Proof_carrying_data.t - -> ( [`Root of External_transition.Initial_validated.t] - * [`Best_tip of External_transition.Initial_validated.t] ) + -> ( [ `Root of External_transition.Initial_validated.t ] + * [ `Best_tip of External_transition.Initial_validated.t ] ) Deferred.Or_error.t end @@ -208,8 +201,8 @@ module type Consensus_best_tip_prover_intf = sig -> ( External_transition.t , State_body_hash.t list * External_transition.t ) Proof_carrying_data.t - -> ( [`Root of External_transition.Initial_validated.t] - * [`Best_tip of External_transition.Initial_validated.t] ) + -> ( [ `Root of External_transition.Initial_validated.t ] + * [ `Best_tip of External_transition.Initial_validated.t ] ) Deferred.Or_error.t end @@ -244,7 +237,7 @@ module type Sync_handler_intf = sig that they have gossiped to the network *) module Root : Consensus_best_tip_prover_intf - with type transition_frontier := transition_frontier + with type transition_frontier := transition_frontier end module type Transition_chain_prover_intf = sig @@ -272,9 +265,9 @@ module type Bootstrap_controller_intf = sig -> verifier:Verifier.t -> network:network -> consensus_local_state:Consensus.Data.Local_state.t - -> transition_reader:External_transition.Initial_validated.t - Envelope.Incoming.t - Strict_pipe.Reader.t + -> transition_reader: + External_transition.Initial_validated.t Envelope.Incoming.t + Strict_pipe.Reader.t -> persistent_root:persistent_root -> persistent_frontier:persistent_frontier -> initial_root_transition:External_transition.Validated.t @@ -299,15 +292,14 @@ module type Transition_frontier_controller_intf = sig -> verifier:Verifier.t -> network:network -> time_controller:Block_time.Controller.t - -> collected_transitions:External_transition.Initial_validated.t - Envelope.Incoming.t - list + -> collected_transitions: + External_transition.Initial_validated.t Envelope.Incoming.t list -> frontier:transition_frontier - -> network_transition_reader:External_transition.Initial_validated.t - Envelope.Incoming.t - Strict_pipe.Reader.t + -> network_transition_reader: + External_transition.Initial_validated.t Envelope.Incoming.t + Strict_pipe.Reader.t -> producer_transition_reader:breadcrumb Strict_pipe.Reader.t - -> clear_reader:[`Clear] Strict_pipe.Reader.t + -> clear_reader:[ `Clear ] Strict_pipe.Reader.t -> External_transition.Validated.t Strict_pipe.Reader.t end @@ -319,19 +311,18 @@ module type Initial_validator_intf = sig val run : logger:Logger.t -> trust_system:Trust_system.t - -> transition_reader:( [ `Transition of - external_transition Envelope.Incoming.t ] - * [`Time_received of Block_time.t] - * [ `Valid_cb of - Mina_net2.Validation_callback.t -> unit ] ) - Strict_pipe.Reader.t - -> valid_transition_writer:( [ `Transition of - external_transition_with_initial_validation - Envelope.Incoming.t ] - * [`Time_received of Block_time.t] - , Strict_pipe.crash Strict_pipe.buffered - , unit ) - Strict_pipe.Writer.t + -> transition_reader: + ( [ `Transition of external_transition Envelope.Incoming.t ] + * [ `Time_received of Block_time.t ] + * [ `Valid_cb of Mina_net2.Validation_callback.t -> unit ] ) + Strict_pipe.Reader.t + -> valid_transition_writer: + ( [ `Transition of + external_transition_with_initial_validation Envelope.Incoming.t ] + * [ `Time_received of Block_time.t ] + , Strict_pipe.crash Strict_pipe.buffered + , unit ) + Strict_pipe.Writer.t -> genesis_state_hash:State_hash.t -> genesis_constants:Genesis_constants.t -> unit @@ -359,26 +350,22 @@ module type Transition_router_intf = sig -> consensus_local_state:Consensus.Data.Local_state.t -> persistent_root_location:string -> persistent_frontier_location:string - -> frontier_broadcast_pipe:transition_frontier option - Pipe_lib.Broadcast_pipe.Reader.t - * transition_frontier option - Pipe_lib.Broadcast_pipe.Writer.t - -> network_transition_reader:( [ `Transition of - External_transition.t Envelope.Incoming.t - ] - * [`Time_received of Block_time.t] - * [ `Valid_cb of - Mina_net2.Validation_callback.t ] ) - Strict_pipe.Reader.t + -> frontier_broadcast_pipe: + transition_frontier option Pipe_lib.Broadcast_pipe.Reader.t + * transition_frontier option Pipe_lib.Broadcast_pipe.Writer.t + -> network_transition_reader: + ( [ `Transition of External_transition.t Envelope.Incoming.t ] + * [ `Time_received of Block_time.t ] + * [ `Valid_cb of Mina_net2.Validation_callback.t ] ) + Strict_pipe.Reader.t -> producer_transition_reader:breadcrumb Strict_pipe.Reader.t - -> most_recent_valid_block:External_transition.Initial_validated.t - Broadcast_pipe.Reader.t - * External_transition.Initial_validated.t - Broadcast_pipe.Writer.t + -> most_recent_valid_block: + External_transition.Initial_validated.t Broadcast_pipe.Reader.t + * External_transition.Initial_validated.t Broadcast_pipe.Writer.t -> precomputed_values:Precomputed_values.t - -> catchup_mode:[`Normal | `Super] - -> ( [`Transition of External_transition.Validated.t] - * [`Source of [`Gossip | `Catchup | `Internal]] ) + -> catchup_mode:[ `Normal | `Super ] + -> ( [ `Transition of External_transition.Validated.t ] + * [ `Source of [ `Gossip | `Catchup | `Internal ] ] ) Strict_pipe.Reader.t * unit Ivar.t end diff --git a/src/lib/mina_lib/archive_client.ml b/src/lib/mina_lib/archive_client.ml index 713cd66cf8e..b8cead7e1af 100644 --- a/src/lib/mina_lib/archive_client.ml +++ b/src/lib/mina_lib/archive_client.ml @@ -12,8 +12,8 @@ let dispatch ?(max_tries = 5) (Error (Error.tag_arg e (sprintf - "Could not send archive diff data to archive process after \ - %d tries. The process may not be running, please check the \ + "Could not send archive diff data to archive process after %d \ + tries. The process may not be running, please check the \ daemon-argument" max_tries) ( ("host_and_port", archive_location.value) @@ -40,8 +40,8 @@ let make_dispatch_block rpc ?(max_tries = 5) (Error (Error.tag_arg e (sprintf - "Could not send block data to archive process after %d \ - tries. The process may not be running, please check the \ + "Could not send block data to archive process after %d tries. \ + The process may not be running, please check the \ daemon-argument" max_tries) ( ("host_and_port", archive_location.value) @@ -79,8 +79,9 @@ let transfer ~logger ~archive_location ~metadata: [ ("error", Error_json.error_to_yojson e) ; ( "breadcrumb" - , Transition_frontier.Breadcrumb.to_yojson breadcrumb ) ] - "Could not send breadcrumb to archive: $error" ) ) + , Transition_frontier.Breadcrumb.to_yojson breadcrumb ) + ] + "Could not send breadcrumb to archive: $error")) let run ~logger ~(frontier_broadcast_pipe : @@ -97,4 +98,4 @@ let run ~logger Transition_frontier.Extensions.get_view_pipe extensions Transition_frontier.Extensions.New_breadcrumbs in - transfer ~logger ~archive_location breadcrumb_reader )) ) + transfer ~logger ~archive_location breadcrumb_reader))) diff --git a/src/lib/mina_lib/archive_client.mli b/src/lib/mina_lib/archive_client.mli index 28648165689..d451eac7bd2 100644 --- a/src/lib/mina_lib/archive_client.mli +++ b/src/lib/mina_lib/archive_client.mli @@ -15,7 +15,7 @@ val dispatch_extensional_block : val run : logger:Logger.t - -> frontier_broadcast_pipe:Transition_frontier.t option - Broadcast_pipe.Reader.t + -> frontier_broadcast_pipe: + Transition_frontier.t option Broadcast_pipe.Reader.t -> Host_and_port.t Cli_lib.Flag.Types.with_name -> unit diff --git a/src/lib/mina_lib/coda_subscriptions.ml b/src/lib/mina_lib/coda_subscriptions.ml index 7e9e84e5eaa..5e202d8ba5c 100644 --- a/src/lib/mina_lib/coda_subscriptions.ml +++ b/src/lib/mina_lib/coda_subscriptions.ml @@ -16,21 +16,22 @@ module Optional_public_key = struct end type t = - { subscribed_payment_users: + { subscribed_payment_users : Signed_command.t reader_and_writer Public_key.Compressed.Table.t - ; subscribed_block_users: + ; subscribed_block_users : (Filtered_external_transition.t, State_hash.t) With_hash.t reader_and_writer list Optional_public_key.Table.t - ; mutable reorganization_subscription: [`Changed] reader_and_writer list } + ; mutable reorganization_subscription : [ `Changed ] reader_and_writer list + } (* idempotent *) let add_new_subscription (t : t) ~pk = (* add a new subscribed block user for this pk if we're not already tracking it *) ignore ( Optional_public_key.Table.find_or_add t.subscribed_block_users (Some pk) - ~default:(fun () -> [Pipe.create ()]) + ~default:(fun () -> [ Pipe.create () ]) : (Filtered_external_transition.t, State_hash.t) With_hash.t reader_and_writer list ) ; @@ -47,13 +48,13 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks Optional_public_key.Table.of_alist_multi @@ List.map (Secrets.Wallets.pks wallets) ~f:(fun wallet -> let reader, writer = Pipe.create () in - (Some wallet, (reader, writer)) ) + (Some wallet, (reader, writer))) in let subscribed_payment_users = Public_key.Compressed.Table.of_alist_exn @@ List.map (Secrets.Wallets.pks wallets) ~f:(fun wallet -> let reader, writer = Pipe.create () in - (wallet, (reader, writer)) ) + (wallet, (reader, writer))) in let update_payment_subscriptions filtered_external_transition participants = Set.iter participants ~f:(fun participant -> @@ -62,32 +63,31 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks let user_commands = filtered_external_transition |> Filtered_external_transition.commands - |> List.map ~f:(fun {With_status.data; _} -> data.data) + |> List.map ~f:(fun { With_status.data; _ } -> data.data) |> List.filter_map ~f:(function | User_command.Signed_command c -> Some c | Snapp_command _ -> - None ) + None) |> Fn.flip Signed_command.filter_by_participant participant in List.iter user_commands ~f:(fun user_command -> - Pipe.write_without_pushback_if_open writer user_command ) ) ) + Pipe.write_without_pushback_if_open writer user_command))) in - let update_block_subscriptions {With_hash.data= external_transition; hash} + let update_block_subscriptions { With_hash.data = external_transition; hash } transactions participants = Set.iter participants ~f:(fun participant -> Hashtbl.find_and_call subscribed_block_users (Some participant) ~if_found:(fun pipes -> List.iter pipes ~f:(fun (_, writer) -> let data = - Filtered_external_transition.of_transition - external_transition + Filtered_external_transition.of_transition external_transition (`Some (Public_key.Compressed.Set.singleton participant)) transactions in Pipe.write_without_pushback_if_open writer - {With_hash.data; hash} ) ) - ~if_not_found:ignore ) ; + { With_hash.data; hash })) + ~if_not_found:ignore) ; Hashtbl.find_and_call subscribed_block_users None ~if_found:(fun pipes -> List.iter pipes ~f:(fun (_, writer) -> @@ -96,7 +96,7 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks `All transactions in if not (Pipe.is_closed writer) then - Pipe.write_without_pushback writer {With_hash.data; hash} ) ) + Pipe.write_without_pushback writer { With_hash.data; hash })) ~if_not_found:ignore in let gcloud_keyfile = @@ -113,7 +113,7 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks ignore ( Core.Sys.command (sprintf "gcloud auth activate-service-account --key-file=%s" path) - : int ) ) ; + : int )) ; trace_task "subscriptions new block loop" (fun () -> Strict_pipe.Reader.iter new_blocks ~f:(fun new_block -> let hash = @@ -153,8 +153,8 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks Some bucket | _ -> [%log warn] - "GCLOUD_BLOCK_UPLOAD_BUCKET environment variable not \ - set. Must be set to use upload_blocks_to_gcloud" ; + "GCLOUD_BLOCK_UPLOAD_BUCKET environment variable not set. \ + Must be set to use upload_blocks_to_gcloud" ; None in match (gcloud_keyfile, network, bucket) with @@ -195,11 +195,11 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks Deferred.Or_error.try_with_join ~here:[%here] (fun () -> Or_error.try_with (fun () -> Async.Process.run () ~prog:"bash" - ~args:["-c"; command] + ~args:[ "-c"; command ] |> Deferred.Result.map_error - ~f:(Error.tag ~tag:__LOC__) ) + ~f:(Error.tag ~tag:__LOC__)) |> Result.map_error ~f:(Error.tag ~tag:__LOC__) - |> Deferred.return |> Deferred.Or_error.join ) + |> Deferred.return |> Deferred.Or_error.join) in ( match output with | Ok _result -> @@ -208,29 +208,29 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks [%log warn] ~metadata: [ ("error", Error_json.error_to_yojson e) - ; ("command", `String command) ] + ; ("command", `String command) + ] "Uploading block to gcloud with command $command \ failed: $error" ) ; Sys.remove tmp_file ; Mina_metrics.( Gauge.dec_one Block_latency.Upload_to_gcloud.upload_to_gcloud_blocks) - ) + ) | _ -> () ) ; Option.iter path ~f:(fun (`Path path) -> Out_channel.with_file ~append:true path ~f:(fun out_channel -> Out_channel.output_lines out_channel - [Yojson.Safe.to_string (Lazy.force precomputed_block)] ) - ) ; + [ Yojson.Safe.to_string (Lazy.force precomputed_block) ])) ; [%log info] "Saw block with state hash $state_hash" ~metadata: (let state_hash_data = - [("state_hash", `String (State_hash.to_base58_check hash))] + [ ("state_hash", `String (State_hash.to_base58_check hash)) ] in if is_some log then state_hash_data - @ [("precomputed_block", Lazy.force precomputed_block)] + @ [ ("precomputed_block", Lazy.force precomputed_block) ] else state_hash_data)) ; match Filtered_external_transition.validate_transactions @@ -243,8 +243,7 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks verified_transactions) in let filtered_external_transition = - if is_storing_all then - Lazy.force unfiltered_external_transition + if is_storing_all then Lazy.force unfiltered_external_transition else Filtered_external_transition.of_transition new_block (`Some @@ -260,19 +259,19 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks update_payment_subscriptions filtered_external_transition participants ; update_block_subscriptions - {With_hash.data= new_block; hash} + { With_hash.data = new_block; hash } verified_transactions participants ; Deferred.unit | Error e -> [%log error] ~metadata: [ ( "error" - , `String (Staged_ledger.Pre_diff_info.Error.to_string e) - ) - ; ("state_hash", State_hash.to_yojson hash) ] + , `String (Staged_ledger.Pre_diff_info.Error.to_string e) ) + ; ("state_hash", State_hash.to_yojson hash) + ] "Staged ledger had error with transactions in block for state \ $state_hash: $error" ; - Deferred.unit ) ) ; + Deferred.unit)) ; let reorganization_subscription = [] in let reader, writer = Strict_pipe.create ~name:"Reorganization subscription" @@ -281,29 +280,27 @@ let create ~logger ~constraint_constants ~wallets ~new_blocks let t = { subscribed_payment_users ; subscribed_block_users - ; reorganization_subscription } + ; reorganization_subscription + } in don't_wait_for @@ Broadcast_pipe.Reader.iter transition_frontier ~f: - (Option.value_map ~default:Deferred.unit - ~f:(fun transition_frontier -> + (Option.value_map ~default:Deferred.unit ~f:(fun transition_frontier -> let best_tip_diff_pipe = Transition_frontier.( Extensions.( - get_view_pipe - (extensions transition_frontier) - Best_tip_diff)) + get_view_pipe (extensions transition_frontier) Best_tip_diff)) in Broadcast_pipe.Reader.iter best_tip_diff_pipe - ~f:(fun {reorg_best_tip; _} -> + ~f:(fun { reorg_best_tip; _ } -> if reorg_best_tip then Strict_pipe.Writer.write writer () ; - Deferred.unit ) )) ; + Deferred.unit))) ; Strict_pipe.Reader.iter reader ~f:(fun () -> List.iter t.reorganization_subscription ~f:(fun (_, writer) -> if not (Pipe.is_closed writer) then - Pipe.write_without_pushback writer `Changed ) ; - Deferred.unit ) + Pipe.write_without_pushback writer `Changed) ; + Deferred.unit) |> don't_wait_for ; t @@ -319,17 +316,17 @@ let add_block_subscriber t public_key = | None -> None | Some pipes -> ( - match - List.filter pipes ~f:(fun rw_pair' -> - (* Intentionally using pointer equality *) - not - @@ Tuple2.equal ~eq1:Pipe.equal ~eq2:Pipe.equal rw_pair rw_pair' - ) - with - | [] -> - None - | l -> - Some l ) ) ) ; + match + List.filter pipes ~f:(fun rw_pair' -> + (* Intentionally using pointer equality *) + not + @@ Tuple2.equal ~eq1:Pipe.equal ~eq2:Pipe.equal rw_pair + rw_pair') + with + | [] -> + None + | l -> + Some l )) ) ; block_reader let add_payment_subscriber t public_key = @@ -340,6 +337,6 @@ let add_payment_subscriber t public_key = let add_reorganization_subscriber t = let reader, writer = Pipe.create () in - t.reorganization_subscription - <- (reader, writer) :: t.reorganization_subscription ; + t.reorganization_subscription <- + (reader, writer) :: t.reorganization_subscription ; reader diff --git a/src/lib/mina_lib/conf_dir.ml b/src/lib/mina_lib/conf_dir.ml index e1966bc5231..b63c57752d8 100644 --- a/src/lib/mina_lib/conf_dir.ml +++ b/src/lib/mina_lib/conf_dir.ml @@ -15,18 +15,18 @@ let check_and_set_lockfile ~logger conf_dir = Monitor.try_with ~here:[%here] ~extract_exn:true (fun () -> Writer.with_file ~exclusive:true lockfile ~f:(fun writer -> let pid = Unix.getpid () in - return (Writer.writef writer "%d\n" (Pid.to_int pid)) ) ) + return (Writer.writef writer "%d\n" (Pid.to_int pid)))) with | Ok () -> [%log info] "Created daemon lockfile $lockfile" - ~metadata:[("lockfile", `String lockfile)] ; + ~metadata:[ ("lockfile", `String lockfile) ] ; Exit_handlers.register_async_shutdown_handler ~logger ~description:"Remove daemon lockfile" (fun () -> match%bind Sys.file_exists lockfile with | `Yes -> Unix.unlink lockfile | _ -> - return () ) + return ()) | Error exn -> Error.tag_arg (Error.of_exn exn) "Could not create the daemon lockfile" ("lockfile", lockfile) @@ -45,7 +45,7 @@ let check_and_set_lockfile ~logger conf_dir = in match%map Reader.read_line reader with | `Ok s -> ( - try Pid.of_string s with _ -> rm_and_raise () ) + try Pid.of_string s with _ -> rm_and_raise () ) | `Eof -> rm_and_raise () in @@ -70,8 +70,9 @@ let check_and_set_lockfile ~logger conf_dir = [%log info] "Removing lockfile for terminated process" ~metadata: [ ("lockfile", `String lockfile) - ; ("pid", `Int (Pid.to_int pid)) ] ; - Unix.unlink lockfile ) ) ) + ; ("pid", `Int (Pid.to_int pid)) + ] ; + Unix.unlink lockfile ))) with | Ok () -> () @@ -91,7 +92,7 @@ let export_logs_to_tar ?basename ~conf_dir = match basename with | None -> let date, day = Time.(now () |> to_date_ofday ~zone:Zone.utc) in - let Time.Span.Parts.{hr; min; sec; _} = Time.Ofday.to_parts day in + let Time.Span.Parts.{ hr; min; sec; _ } = Time.Ofday.to_parts day in sprintf "%s_%02d-%02d-%02d" (Date.to_string date) hr min sec | Some basename -> basename @@ -109,9 +110,7 @@ let export_logs_to_tar ?basename ~conf_dir = in let%bind.Deferred linux_info = if String.equal Sys.os_type "Unix" then - match%map.Deferred - Process.run ~prog:"uname" ~args:["-a"] () - with + match%map.Deferred Process.run ~prog:"uname" ~args:[ "-a" ] () with | Ok s when String.is_prefix s ~prefix:"Linux" -> Some s | _ -> @@ -122,11 +121,12 @@ let export_logs_to_tar ?basename ~conf_dir = if Option.is_some linux_info then let open Deferred.Let_syntax in let linux_hw_progs = - [ ("cat", ["/etc/os-release"]) + [ ("cat", [ "/etc/os-release" ]) ; ("lscpu", []) ; ("lsgpu", []) ; ("lsmem", []) - ; ("lsblk", []) ] + ; ("lsblk", []) + ] in let%map outputs = Deferred.List.map linux_hw_progs ~f:(fun (prog, args) -> @@ -139,9 +139,9 @@ let export_logs_to_tar ?basename ~conf_dir = | Ok lines -> lines | Error err -> - [sprintf "Error: %s" (Error.to_string_hum err)] + [ sprintf "Error: %s" (Error.to_string_hum err) ] in - return ((header :: output) @ [""]) ) + return ((header :: output) @ [ "" ])) in Some (Option.value_exn linux_info :: List.concat outputs) else (* TODO: Mac, other Unixes *) @@ -155,8 +155,8 @@ let export_logs_to_tar ?basename ~conf_dir = match%map Monitor.try_with ~here:[%here] ~extract_exn:true (fun () -> Writer.with_file ~exclusive:true hw_info_file ~f:(fun writer -> - Deferred.List.map (Option.value_exn hw_info_opt) - ~f:(fun line -> return (Writer.write_line writer line)) ) ) + Deferred.List.map (Option.value_exn hw_info_opt) ~f:(fun line -> + return (Writer.write_line writer line)))) with | Ok _units -> Some hw_info @@ -168,20 +168,23 @@ let export_logs_to_tar ?basename ~conf_dir = let base_files = "mina.version" :: log_files in let files = Option.value_map hw_file_opt ~default:base_files ~f:(fun hw_file -> - hw_file :: base_files ) - in - let tmp_dir = - Filename.temp_dir ~in_dir:"/tmp" ("mina-logs_" ^ basename) "" + hw_file :: base_files) in + let tmp_dir = Filename.temp_dir ~in_dir:"/tmp" ("mina-logs_" ^ basename) "" in let files_in_dir dir = List.map files ~f:(fun file -> dir ^/ file) in let conf_dir_files = files_in_dir conf_dir in let%bind _result0 = - Process.run ~prog:"cp" ~args:(("-p" :: conf_dir_files) @ [tmp_dir]) () + Process.run ~prog:"cp" ~args:(("-p" :: conf_dir_files) @ [ tmp_dir ]) () in let%bind _result1 = Process.run ~prog:"tar" ~args: - ( ["-C"; tmp_dir; (* Create gzipped tar file [file]. *) "-czf"; tarfile] + ( [ "-C" + ; tmp_dir + ; (* Create gzipped tar file [file]. *) + "-czf" + ; tarfile + ] @ files ) () in diff --git a/src/lib/mina_lib/config.ml b/src/lib/mina_lib/config.ml index 38a2446a140..d03c9d2c1fc 100644 --- a/src/lib/mina_lib/config.ml +++ b/src/lib/mina_lib/config.ml @@ -6,52 +6,54 @@ open Signature_lib module Snark_worker_config = struct type t = - { initial_snark_worker_key: Public_key.Compressed.t option - ; shutdown_on_disconnect: bool - ; num_threads: int option } + { initial_snark_worker_key : Public_key.Compressed.t option + ; shutdown_on_disconnect : bool + ; num_threads : int option + } end (** If ledger_db_location is None, will auto-generate a db based on a UUID *) type t = - { conf_dir: string - ; chain_id: string - ; logger: Logger.t - ; pids: Child_processes.Termination.t - ; trust_system: Trust_system.t - ; monitor: Monitor.t option - ; is_seed: bool - ; disable_node_status: bool - ; super_catchup: bool - ; initial_block_production_keypairs: Keypair.Set.t - ; coinbase_receiver: Consensus.Coinbase_receiver.t - ; work_selection_method: (module Work_selector.Selection_method_intf) - ; snark_worker_config: Snark_worker_config.t - ; snark_coordinator_key: Public_key.Compressed.t option [@default None] - ; work_reassignment_wait: int - ; gossip_net_params: Gossip_net.Libp2p.Config.t - ; net_config: Mina_networking.Config.t - ; initial_protocol_version: Protocol_version.t + { conf_dir : string + ; chain_id : string + ; logger : Logger.t + ; pids : Child_processes.Termination.t + ; trust_system : Trust_system.t + ; monitor : Monitor.t option + ; is_seed : bool + ; disable_node_status : bool + ; super_catchup : bool + ; initial_block_production_keypairs : Keypair.Set.t + ; coinbase_receiver : Consensus.Coinbase_receiver.t + ; work_selection_method : (module Work_selector.Selection_method_intf) + ; snark_worker_config : Snark_worker_config.t + ; snark_coordinator_key : Public_key.Compressed.t option [@default None] + ; work_reassignment_wait : int + ; gossip_net_params : Gossip_net.Libp2p.Config.t + ; net_config : Mina_networking.Config.t + ; initial_protocol_version : Protocol_version.t (* Option.t instead of option, so that the derived `make' requires an argument *) - ; proposed_protocol_version_opt: Protocol_version.t Option.t - ; snark_pool_disk_location: string - ; wallets_disk_location: string - ; persistent_root_location: string - ; persistent_frontier_location: string - ; epoch_ledger_location: string - ; staged_ledger_transition_backup_capacity: int [@default 10] - ; time_controller: Block_time.Controller.t - ; snark_work_fee: Currency.Fee.t - ; consensus_local_state: Consensus.Data.Local_state.t - ; is_archive_rocksdb: bool [@default false] - ; archive_process_location: + ; proposed_protocol_version_opt : Protocol_version.t Option.t + ; snark_pool_disk_location : string + ; wallets_disk_location : string + ; persistent_root_location : string + ; persistent_frontier_location : string + ; epoch_ledger_location : string + ; staged_ledger_transition_backup_capacity : int [@default 10] + ; time_controller : Block_time.Controller.t + ; snark_work_fee : Currency.Fee.t + ; consensus_local_state : Consensus.Data.Local_state.t + ; is_archive_rocksdb : bool [@default false] + ; archive_process_location : Core.Host_and_port.t Cli_lib.Flag.Types.with_name option [@default None] - ; demo_mode: bool [@default false] - ; log_block_creation: bool [@default false] - ; precomputed_values: Precomputed_values.t - ; start_time: Time.t - ; precomputed_blocks_path: string option - ; log_precomputed_blocks: bool - ; upload_blocks_to_gcloud: bool - ; block_reward_threshold: Currency.Amount.t option [@default None] } + ; demo_mode : bool [@default false] + ; log_block_creation : bool [@default false] + ; precomputed_values : Precomputed_values.t + ; start_time : Time.t + ; precomputed_blocks_path : string option + ; log_precomputed_blocks : bool + ; upload_blocks_to_gcloud : bool + ; block_reward_threshold : Currency.Amount.t option [@default None] + } [@@deriving make] diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml index bff30a845fc..54cf588b6ab 100644 --- a/src/lib/mina_lib/mina_lib.ml +++ b/src/lib/mina_lib/mina_lib.ml @@ -17,23 +17,23 @@ module Snark_worker_lib = Snark_worker module Timeout = Timeout_lib.Core_time type Structured_log_events.t += Connecting - [@@deriving register_event {msg= "Mina daemon is connecting"}] + [@@deriving register_event { msg = "Mina daemon is connecting" }] type Structured_log_events.t += Listening - [@@deriving register_event {msg= "Mina daemon is listening"}] + [@@deriving register_event { msg = "Mina daemon is listening" }] type Structured_log_events.t += Bootstrapping - [@@deriving register_event {msg= "Mina daemon is bootstrapping"}] + [@@deriving register_event { msg = "Mina daemon is bootstrapping" }] type Structured_log_events.t += Ledger_catchup - [@@deriving register_event {msg= "Mina daemon is doing ledger catchup"}] + [@@deriving register_event { msg = "Mina daemon is doing ledger catchup" }] type Structured_log_events.t += Synced - [@@deriving register_event {msg= "Mina daemon is synced"}] + [@@deriving register_event { msg = "Mina daemon is synced" }] type Structured_log_events.t += - | Rebroadcast_transition of {state_hash: State_hash.t} - [@@deriving register_event {msg= "Rebroadcasting $state_hash"}] + | Rebroadcast_transition of { state_hash : State_hash.t } + [@@deriving register_event { msg = "Rebroadcasting $state_hash" }] exception Snark_worker_error of int @@ -44,49 +44,52 @@ exception Snark_worker_signal_interrupt of Signal.t assigned to a public key. This public key can change throughout the entire time the daemon is running *) type snark_worker = - { public_key: Public_key.Compressed.t - ; process: Process.t Ivar.t - ; kill_ivar: unit Ivar.t } + { public_key : Public_key.Compressed.t + ; process : Process.t Ivar.t + ; kill_ivar : unit Ivar.t + } type processes = - { prover: Prover.t - ; verifier: Verifier.t - ; mutable snark_worker: - [`On of snark_worker * Currency.Fee.t | `Off of Currency.Fee.t] } + { prover : Prover.t + ; verifier : Verifier.t + ; mutable snark_worker : + [ `On of snark_worker * Currency.Fee.t | `Off of Currency.Fee.t ] + } type components = - { net: Mina_networking.t - ; transaction_pool: Network_pool.Transaction_pool.t - ; snark_pool: Network_pool.Snark_pool.t - ; transition_frontier: Transition_frontier.t option Broadcast_pipe.Reader.t - ; most_recent_valid_block: - External_transition.Initial_validated.t Broadcast_pipe.Reader.t } + { net : Mina_networking.t + ; transaction_pool : Network_pool.Transaction_pool.t + ; snark_pool : Network_pool.Snark_pool.t + ; transition_frontier : Transition_frontier.t option Broadcast_pipe.Reader.t + ; most_recent_valid_block : + External_transition.Initial_validated.t Broadcast_pipe.Reader.t + } type pipes = - { validated_transitions_reader: + { validated_transitions_reader : External_transition.Validated.t Strict_pipe.Reader.t - ; producer_transition_writer: + ; producer_transition_writer : (Transition_frontier.Breadcrumb.t, synchronous, unit Deferred.t) Writer.t - ; external_transitions_writer: + ; external_transitions_writer : ( External_transition.t Envelope.Incoming.t * Block_time.t * Mina_net2.Validation_callback.t ) Pipe.Writer.t - ; user_command_input_writer: + ; user_command_input_writer : ( User_command_input.t list * ( ( Network_pool.Transaction_pool.Resource_pool.Diff.t * Network_pool.Transaction_pool.Resource_pool.Diff.Rejected.t ) Or_error.t -> unit) * ( Account_id.t - -> ( [`Min of Mina_base.Account.Nonce.t] * Mina_base.Account.Nonce.t + -> ( [ `Min of Mina_base.Account.Nonce.t ] * Mina_base.Account.Nonce.t , string ) Result.t) * (Account_id.t -> Account.t option Participating_state.T.t) , Strict_pipe.synchronous , unit Deferred.t ) Strict_pipe.Writer.t - ; user_command_writer: + ; user_command_writer : ( User_command.t list * ( ( Network_pool.Transaction_pool.Resource_pool.Diff.t * Network_pool.Transaction_pool.Resource_pool.Diff.Rejected.t ) @@ -95,7 +98,7 @@ type pipes = , Strict_pipe.synchronous , unit Deferred.t ) Strict_pipe.Writer.t - ; local_snark_work_writer: + ; local_snark_work_writer : ( Network_pool.Snark_pool.Resource_pool.Diff.t * ( ( Network_pool.Snark_pool.Resource_pool.Diff.t * Network_pool.Snark_pool.Resource_pool.Diff.rejected ) @@ -103,26 +106,29 @@ type pipes = -> unit) , Strict_pipe.synchronous , unit Deferred.t ) - Strict_pipe.Writer.t } + Strict_pipe.Writer.t + } type t = - { config: Config.t - ; processes: processes - ; components: components - ; initialization_finish_signal: unit Ivar.t - ; pipes: pipes - ; wallets: Secrets.Wallets.t - ; coinbase_receiver: Consensus.Coinbase_receiver.t ref - ; block_production_keypairs: + { config : Config.t + ; processes : processes + ; components : components + ; initialization_finish_signal : unit Ivar.t + ; pipes : pipes + ; wallets : Secrets.Wallets.t + ; coinbase_receiver : Consensus.Coinbase_receiver.t ref + ; block_production_keypairs : (Agent.read_write Agent.flag, Keypair.And_compressed_pk.Set.t) Agent.t - ; snark_job_state: Work_selector.State.t - ; mutable next_producer_timing: + ; snark_job_state : Work_selector.State.t + ; mutable next_producer_timing : Daemon_rpcs.Types.Status.Next_producer_timing.t option - ; subscriptions: Coda_subscriptions.t - ; sync_status: Sync_status.t Mina_incremental.Status.Observer.t - ; precomputed_block_writer: ([`Path of string] option * [`Log] option) ref - ; block_production_status: - [`Producing | `Producing_in_ms of float | `Free] ref } + ; subscriptions : Coda_subscriptions.t + ; sync_status : Sync_status.t Mina_incremental.Status.Observer.t + ; precomputed_block_writer : + ([ `Path of string ] option * [ `Log ] option) ref + ; block_production_status : + [ `Producing | `Producing_in_ms of float | `Free ] ref + } [@@deriving fields] let time_controller t = t.config.time_controller @@ -137,7 +143,7 @@ let peek_frontier frontier_broadcast_pipe = "Cannot retrieve transition frontier now. Bootstrapping right now.") let client_port t = - let {Node_addrs_and_ports.client_port; _} = + let { Node_addrs_and_ports.client_port; _ } = t.config.gossip_net_params.addrs_and_ports in client_port @@ -156,8 +162,8 @@ let replace_coinbase_receiver t coinbase_receiver = ~metadata: [ ( "old_receiver" , Consensus.Coinbase_receiver.to_yojson !(t.coinbase_receiver) ) - ; ( "new_receiver" - , Consensus.Coinbase_receiver.to_yojson coinbase_receiver ) ] ; + ; ("new_receiver", Consensus.Coinbase_receiver.to_yojson coinbase_receiver) + ] ; t.coinbase_receiver := coinbase_receiver let replace_block_production_keypairs t kps = @@ -166,8 +172,8 @@ let replace_block_production_keypairs t kps = let log_snark_worker_warning t = if Option.is_some t.config.snark_coordinator_key then [%log' warn t.config.logger] - "The snark coordinator flag is set; running a snark worker will \ - override the snark coordinator key" + "The snark coordinator flag is set; running a snark worker will override \ + the snark coordinator key" let log_snark_coordinator_warning (config : Config.t) snark_worker = if Option.is_some config.snark_coordinator_key then @@ -183,7 +189,7 @@ module Snark_worker = struct let run_process ~logger ~proof_level client_port kill_ivar num_threads = let env = Option.map - ~f:(fun num -> `Extend [("RAYON_NUM_THREADS", string_of_int num)]) + ~f:(fun num -> `Extend [ ("RAYON_NUM_THREADS", string_of_int num) ]) num_threads in let%map snark_worker_process = @@ -201,31 +207,32 @@ module Snark_worker = struct don't_wait_for ( match%bind Monitor.try_with ~here:[%here] (fun () -> - Process.wait snark_worker_process ) + Process.wait snark_worker_process) with | Ok signal_or_error -> ( - match signal_or_error with - | Ok () -> - [%log info] "Snark worker process died" ; - if Ivar.is_full kill_ivar then - [%log error] "Ivar.fill bug is here!" ; - Ivar.fill kill_ivar () ; - Deferred.unit - | Error (`Exit_non_zero non_zero_error) -> - [%log fatal] - !"Snark worker process died with a nonzero error %i" - non_zero_error ; - raise (Snark_worker_error non_zero_error) - | Error (`Signal signal) -> - [%log fatal] - !"Snark worker died with signal %{sexp:Signal.t}. Aborting daemon" - signal ; - raise (Snark_worker_signal_interrupt signal) ) + match signal_or_error with + | Ok () -> + [%log info] "Snark worker process died" ; + if Ivar.is_full kill_ivar then + [%log error] "Ivar.fill bug is here!" ; + Ivar.fill kill_ivar () ; + Deferred.unit + | Error (`Exit_non_zero non_zero_error) -> + [%log fatal] + !"Snark worker process died with a nonzero error %i" + non_zero_error ; + raise (Snark_worker_error non_zero_error) + | Error (`Signal signal) -> + [%log fatal] + !"Snark worker died with signal %{sexp:Signal.t}. Aborting \ + daemon" + signal ; + raise (Snark_worker_signal_interrupt signal) ) | Error exn -> [%log info] !"Exception when waiting for snark worker process to terminate: \ $exn" - ~metadata:[("exn", `String (Exn.to_string exn))] ; + ~metadata:[ ("exn", `String (Exn.to_string exn)) ] ; Deferred.unit ) ; [%log trace] !"Created snark worker with pid: %i" @@ -243,7 +250,7 @@ module Snark_worker = struct let start t = match t.processes.snark_worker with - | `On ({process= process_ivar; kill_ivar; _}, _) -> + | `On ({ process = process_ivar; kill_ivar; _ }, _) -> [%log' debug t.config.logger] !"Starting snark worker process" ; log_snark_worker_warning t ; let%map snark_worker_process = @@ -255,7 +262,8 @@ module Snark_worker = struct [%log' debug t.config.logger] ~metadata: [ ( "snark_worker_pid" - , `Int (Pid.to_int (Process.pid snark_worker_process)) ) ] + , `Int (Pid.to_int (Process.pid snark_worker_process)) ) + ] "Started snark worker process with pid: $snark_worker_pid" ; if Ivar.is_full process_ivar then [%log' error t.config.logger] "Ivar.fill bug is here!" ; @@ -268,12 +276,12 @@ module Snark_worker = struct let stop ?(should_wait_kill = false) t = match t.processes.snark_worker with - | `On ({public_key= _; process; kill_ivar}, _) -> + | `On ({ public_key = _; process; kill_ivar }, _) -> let%bind process = Ivar.read process in [%log' info t.config.logger] "Killing snark worker process with pid: $snark_worker_pid" ~metadata: - [("snark_worker_pid", `Int (Pid.to_int (Process.pid process)))] ; + [ ("snark_worker_pid", `Int (Pid.to_int (Process.pid process))) ] ; Signal.send_exn Signal.term (`Pid (Process.pid process)) ; if should_wait_kill then Ivar.read kill_ivar else Deferred.unit | `Off _ -> @@ -281,14 +289,15 @@ module Snark_worker = struct "Attempted to turn off snark worker, but no snark worker was running" ; Deferred.unit - let get_key {processes= {snark_worker; _}; _} = + let get_key { processes = { snark_worker; _ }; _ } = match snark_worker with - | `On ({public_key; _}, _) -> + | `On ({ public_key; _ }, _) -> Some public_key | `Off _ -> None - let replace_key ({processes= {snark_worker; _}; config= {logger; _}; _} as t) + let replace_key + ({ processes = { snark_worker; _ }; config = { logger; _ }; _ } as t) new_key = match (snark_worker, new_key) with | `Off _, None -> @@ -299,17 +308,18 @@ module Snark_worker = struct | `Off fee, Some new_key -> let process = Ivar.create () in let kill_ivar = Ivar.create () in - t.processes.snark_worker - <- `On ({public_key= new_key; process; kill_ivar}, fee) ; + t.processes.snark_worker <- + `On ({ public_key = new_key; process; kill_ivar }, fee) ; start t - | `On ({public_key= old; process; kill_ivar}, fee), Some new_key -> + | `On ({ public_key = old; process; kill_ivar }, fee), Some new_key -> [%log debug] !"Changing snark worker key from $old to $new" ~metadata: [ ("old", Public_key.Compressed.to_yojson old) - ; ("new", Public_key.Compressed.to_yojson new_key) ] ; - t.processes.snark_worker - <- `On ({public_key= new_key; process; kill_ivar}, fee) ; + ; ("new", Public_key.Compressed.to_yojson new_key) + ] ; + t.processes.snark_worker <- + `On ({ public_key = new_key; process; kill_ivar }, fee) ; Deferred.unit | `On (_, fee), None -> let%map () = stop t in @@ -368,7 +378,7 @@ let active_or_bootstrapping = compose_of_option (fun t -> Option.bind (Broadcast_pipe.Reader.peek t.components.transition_frontier) - ~f:(Fn.const (Some ())) ) + ~f:(Fn.const (Some ()))) (* This is a hack put in place to deal with nodes getting stuck in Offline states, that is, not receiving blocks for an extended period. @@ -397,7 +407,7 @@ let create_sync_status_observer ~logger ~is_seed ~demo_mode ~net (Async.Clock.Event.run_after restart_delay (fun () -> Mina_networking.restart_helper net ; - next_helper_restart := None ) + next_helper_restart := None) ()) | Some _ -> () ) ; @@ -411,7 +421,7 @@ let create_sync_status_observer ~logger ~is_seed ~demo_mode ~net else `Offline | `Online -> ( Option.iter !next_helper_restart ~f:(fun e -> - Async.Clock.Event.abort_if_possible e () ) ; + Async.Clock.Event.abort_if_possible e ()) ; next_helper_restart := None ; match active_status with | None -> @@ -425,7 +435,7 @@ let create_sync_status_observer ~logger ~is_seed ~demo_mode ~net `Catchup ) else ( [%str_log info] Synced ; - `Synced ) ) ) + `Synced ) )) in let observer = observe incremental_status in (* monitor Mina status, issue a warning if offline for too long (unless we are a seed node) *) @@ -456,8 +466,8 @@ let create_sync_status_observer ~logger ~is_seed ~demo_mode ~net | Some timeout -> if !offline_warned then ( [%log info] - "Daemon had been offline (no gossip messages received), now \ - back online" ; + "Daemon had been offline (no gossip messages received), now back \ + online" ; offline_warned := false ) ; Timeout.cancel () timeout () ; offline_timeout := None @@ -475,7 +485,7 @@ let create_sync_status_observer ~logger ~is_seed ~demo_mode ~net | Changed (_, value) -> handle_status_change value | Invalidated -> - () ) ) ; + ()) ) ; (* recompute Mina status on an interval *) stabilize () ; every (Time.Span.of_sec 15.0) ~stop:(never ()) stabilize ; @@ -557,43 +567,43 @@ let get_snarked_ledger t state_hash_opt = (State_hash.to_string (Transition_frontier.Breadcrumb.state_hash b)))) | Some txns -> ( - match - List.fold_until ~init:(Ok ()) - (Non_empty_list.to_list txns) - ~f:(fun _acc (txn, state_hash) -> - (*Validate transactions against the protocol state associated with the transaction*) - match - Transition_frontier.find_protocol_state frontier - state_hash - with - | Some state -> ( - let txn_state_view = - Mina_state.Protocol_state.body state - |> Mina_state.Protocol_state.Body.view - in - match - Ledger.apply_transaction - ~constraint_constants: - t.config.precomputed_values - .constraint_constants ~txn_state_view ledger - txn.data - with - | Ok _ -> - Continue (Ok ()) - | e -> - Stop (Or_error.map e ~f:ignore) ) - | None -> - Stop - (Or_error.errorf - !"Coudln't find protocol state with hash %s" - (State_hash.to_string state_hash)) ) - ~finish:Fn.id - with - | Ok _ -> - Continue (Ok ()) - | e -> - Stop e ) - else Continue (Ok ()) ) + match + List.fold_until ~init:(Ok ()) + (Non_empty_list.to_list txns) + ~f:(fun _acc (txn, state_hash) -> + (*Validate transactions against the protocol state associated with the transaction*) + match + Transition_frontier.find_protocol_state frontier + state_hash + with + | Some state -> ( + let txn_state_view = + Mina_state.Protocol_state.body state + |> Mina_state.Protocol_state.Body.view + in + match + Ledger.apply_transaction + ~constraint_constants: + t.config.precomputed_values + .constraint_constants ~txn_state_view ledger + txn.data + with + | Ok _ -> + Continue (Ok ()) + | e -> + Stop (Or_error.map e ~f:ignore) ) + | None -> + Stop + (Or_error.errorf + !"Coudln't find protocol state with hash %s" + (State_hash.to_string state_hash))) + ~finish:Fn.id + with + | Ok _ -> + Continue (Ok ()) + | e -> + Stop e ) + else Continue (Ok ())) ~finish:Fn.id in let snarked_ledger_hash = @@ -670,12 +680,12 @@ let snark_work_fee t = match t.processes.snark_worker with `On (_, fee) -> fee | `Off fee -> fee let set_snark_work_fee t new_fee = - t.processes.snark_worker - <- ( match t.processes.snark_worker with - | `On (config, _) -> - `On (config, new_fee) - | `Off _ -> - `Off new_fee ) + t.processes.snark_worker <- + ( match t.processes.snark_worker with + | `On (config, _) -> + `On (config, new_fee) + | `Off _ -> + `Off new_fee ) let top_level_logger t = t.config.logger @@ -693,8 +703,9 @@ module Root_diff = struct module Stable = struct module V1 = struct type t = - { commands: User_command.Stable.V1.t With_status.Stable.V1.t list - ; root_length: int } + { commands : User_command.Stable.V1.t With_status.Stable.V1.t list + ; root_length : int + } let to_latest = Fn.id end @@ -707,7 +718,7 @@ let initialization_finish_signal t = t.initialization_finish_signal * - uses an abstraction leak to patch new functionality instead of making a new extension * - every call to this function will create a new, unique pipe with it's own thread for transfering * items from the identity extension with no route for termination -*) + *) let root_diff t = let root_diff_reader, root_diff_writer = Strict_pipe.create ~name:"root diff" @@ -725,11 +736,12 @@ let root_diff t = | Some frontier -> let root = Transition_frontier.root frontier in Strict_pipe.Writer.write root_diff_writer - { commands= + { commands = List.map (Transition_frontier.Breadcrumb.commands root) ~f:(With_status.map ~f:User_command.forget_check) - ; root_length= length_of_breadcrumb root } ; + ; root_length = length_of_breadcrumb root + } ; Broadcast_pipe.Reader.iter Transition_frontier.( Extensions.(get_view_pipe (extensions frontier) Identity)) @@ -742,7 +754,7 @@ let root_diff t = (Best_tip_changed _, _) -> Deferred.unit | Transition_frontier.Diff.Full.With_mutant.E - (Root_transitioned {new_root; _}, _) -> + (Root_transitioned { new_root; _ }, _) -> let root_hash = Transition_frontier.Root_data.Limited.hash new_root in @@ -750,16 +762,16 @@ let root_diff t = Transition_frontier.(find_exn frontier root_hash) in Strict_pipe.Writer.write root_diff_writer - { commands= + { commands = Transition_frontier.Breadcrumb.commands new_root_breadcrumb |> List.map ~f: (With_status.map ~f:User_command.forget_check) - ; root_length= length_of_breadcrumb new_root_breadcrumb + ; root_length = length_of_breadcrumb new_root_breadcrumb } ; - Deferred.unit )) ) ) ; + Deferred.unit)))) ; root_diff_reader let dump_tf t = @@ -784,8 +796,8 @@ let best_chain ?max_length t = match max_length with | Some max_length when max_length <= List.length best_tip_path -> (* The [best_tip_path] has already been truncated to the correct length, - we skip adding the root to stay below the maximum. - *) + we skip adding the root to stay below the maximum. + *) best_tip_path | _ -> Transition_frontier.root frontier :: best_tip_path @@ -798,7 +810,7 @@ let request_work t = ~snark_pool:(snark_pool t) (snark_job_state t) in Option.map instances_opt ~f:(fun instances -> - {Snark_work_lib.Work.Spec.instances; fee} ) + { Snark_work_lib.Work.Spec.instances; fee }) let work_selection_method t = t.config.work_selection_method @@ -844,7 +856,7 @@ let get_current_nonce t aid = let ledger_nonce = Participating_state.active (get_account t aid) |> Option.join - |> Option.map ~f:(fun {Account.Poly.nonce; _} -> nonce) + |> Option.map ~f:(fun { Account.Poly.nonce; _ } -> nonce) |> Option.value ~default:nonce in Ok (`Min ledger_nonce, nonce) @@ -950,9 +962,7 @@ let perform_compaction t = | None -> 6000. in - let span ?(incr = 0.) ms = - Float.(of_int ms +. incr) |> Time.Span.of_ms - in + let span ?(incr = 0.) ms = Float.(of_int ms +. incr) |> Time.Span.of_ms in let interval_configured = match Sys.getenv "MINA_COMPACTION_INTERVAL_MS" with | Some ms -> @@ -978,7 +988,7 @@ let perform_compaction t = Gc.compact () ; let span = Time.diff (Time.now ()) start in [%log' debug t.config.logger] - ~metadata:[("time", `Float (Time.Span.to_ms span))] + ~metadata:[ ("time", `Float (Time.Span.to_ms span)) ] "Gc.compact took $time ms" in let rec perform interval = @@ -995,7 +1005,7 @@ let perform_compaction t = perform (span slot_duration_ms ~incr:ms) else ( call_compact () ; - perform interval_configured ) ) + perform interval_configured )) in perform interval_configured @@ -1004,27 +1014,29 @@ let start t = let block_production_status, next_producer_timing = let generated_from_consensus_at : Daemon_rpcs.Types.Status.Next_producer_timing.slot = - { slot= Consensus.Data.Consensus_state.curr_global_slot consensus_state - ; global_slot_since_genesis= + { slot = Consensus.Data.Consensus_state.curr_global_slot consensus_state + ; global_slot_since_genesis = Consensus.Data.Consensus_state.global_slot_since_genesis - consensus_state } + consensus_state + } in let info time (data : Consensus.Data.Block_data.t) : Daemon_rpcs.Types.Status.Next_producer_timing.producing_time = let for_slot : Daemon_rpcs.Types.Status.Next_producer_timing.slot = - { slot= Consensus.Data.Block_data.global_slot data - ; global_slot_since_genesis= - Consensus.Data.Block_data.global_slot_since_genesis data } + { slot = Consensus.Data.Block_data.global_slot data + ; global_slot_since_genesis = + Consensus.Data.Block_data.global_slot_since_genesis data + } in - {time; for_slot} + { time; for_slot } in let status, timing = match timing with | `Check_again time -> ( `Free , Daemon_rpcs.Types.Status.Next_producer_timing.Check_again - ( time |> Block_time.Span.of_ms - |> Block_time.of_span_since_epoch ) ) + (time |> Block_time.Span.of_ms |> Block_time.of_span_since_epoch) + ) | `Produce_now (block_data, _) -> let info : Daemon_rpcs.Types.Status.Next_producer_timing.producing_time = @@ -1048,7 +1060,8 @@ let start t = in ( status , { Daemon_rpcs.Types.Status.Next_producer_timing.timing - ; generated_from_consensus_at } ) + ; generated_from_consensus_at + } ) in t.block_production_status := block_production_status ; t.next_producer_timing <- Some next_producer_timing @@ -1080,19 +1093,19 @@ let start_with_precomputed_blocks t blocks = ~time_controller:t.config.time_controller ~frontier_reader:t.components.transition_frontier ~transition_writer:t.pipes.producer_transition_writer - ~precomputed_values:t.config.precomputed_values - ~precomputed_blocks:blocks + ~precomputed_values:t.config.precomputed_values ~precomputed_blocks:blocks in start t let send_resource_pool_diff_or_wait ~rl ~diff_score ~max_per_15_seconds diff = (* HACK: Pretend we're a remote peer so that we can rate limit ourselves. - *) + *) let us = - { Network_peer.Peer.host= Unix.Inet_addr.of_string "127.0.0.1" - ; libp2p_port= 0 - ; peer_id= "" } + { Network_peer.Peer.host = Unix.Inet_addr.of_string "127.0.0.1" + ; libp2p_port = 0 + ; peer_id = "" + } in let score = diff_score diff in let rec able_to_send_or_wait () = @@ -1108,7 +1121,7 @@ let send_resource_pool_diff_or_wait ~rl ~diff_score ~max_per_15_seconds diff = ignore ( Network_pool.Rate_limiter.add rl (Remote us) ~now:(Time.now ()) ~score:0 - : [`Within_capacity | `Capacity_exceeded] ) ; + : [ `Within_capacity | `Capacity_exceeded ] ) ; Deferred.return () ) else let%bind () = @@ -1136,13 +1149,13 @@ let create ?wallets (config : Config.t) = let err = Error.of_exn ~backtrace:`Get exn in [%log' warn config.logger] "unhandled exception from daemon-side prover server: $exn" - ~metadata:[("exn", Error_json.error_to_yojson err)] )) + ~metadata:[ ("exn", Error_json.error_to_yojson err) ])) (fun () -> trace "prover" (fun () -> Prover.create ~logger:config.logger ~proof_level:config.precomputed_values.proof_level ~constraint_constants ~pids:config.pids - ~conf_dir:config.conf_dir ) ) + ~conf_dir:config.conf_dir)) >>| Result.ok_exn in let%bind verifier = @@ -1154,37 +1167,36 @@ let create ?wallets (config : Config.t) = [%log' warn config.logger] "unhandled exception from daemon-side verifier server: \ $exn" - ~metadata:[("exn", Error_json.error_to_yojson err)] )) + ~metadata:[ ("exn", Error_json.error_to_yojson err) ])) (fun () -> trace "verifier" (fun () -> Verifier.create ~logger:config.logger ~proof_level:config.precomputed_values.proof_level ~constraint_constants: config.precomputed_values.constraint_constants - ~pids:config.pids ~conf_dir:(Some config.conf_dir) ) ) + ~pids:config.pids ~conf_dir:(Some config.conf_dir))) >>| Result.ok_exn in let snark_worker = - Option.value_map - config.snark_worker_config.initial_snark_worker_key + Option.value_map config.snark_worker_config.initial_snark_worker_key ~default:(`Off config.snark_work_fee) ~f:(fun public_key -> `On ( { public_key - ; process= Ivar.create () - ; kill_ivar= Ivar.create () } - , config.snark_work_fee ) ) + ; process = Ivar.create () + ; kill_ivar = Ivar.create () + } + , config.snark_work_fee )) in log_snark_coordinator_warning config snark_worker ; Protocol_version.set_current config.initial_protocol_version ; - Protocol_version.set_proposed_opt - config.proposed_protocol_version_opt ; + Protocol_version.set_proposed_opt config.proposed_protocol_version_opt ; let log_rate_limiter_occasionally rl ~label = let t = Time.Span.of_min 1. in every t (fun () -> [%log' debug config.logger] ~metadata: - [("rate_limiter", Network_pool.Rate_limiter.summary rl)] - !"%s $rate_limiter" label ) + [ ("rate_limiter", Network_pool.Rate_limiter.summary rl) ] + !"%s $rate_limiter" label) in let external_transitions_reader, external_transitions_writer = let rl = @@ -1207,12 +1219,13 @@ let create ?wallets (config : Config.t) = | `Capacity_exceeded -> [%log' warn config.logger] "$sender has sent many blocks. This is very unusual." - ~metadata:[("sender", Envelope.Sender.to_yojson sender)] ; - Mina_net2.Validation_callback.fire_if_not_already_fired - cb `Reject ; + ~metadata: + [ ("sender", Envelope.Sender.to_yojson sender) ] ; + Mina_net2.Validation_callback.fire_if_not_already_fired cb + `Reject ; None | `Within_capacity -> - Some x ) + Some x) , w ) in let producer_transition_reader, producer_transition_writer = @@ -1227,7 +1240,7 @@ let create ?wallets (config : Config.t) = | None -> Deferred.unit | Some frontier -> - Transition_frontier.close ~loc:__LOC__ frontier ) ; + Transition_frontier.close ~loc:__LOC__ frontier) ; let handle_request name ~f query_env = trace_recurring name (fun () -> let input = Envelope.Incoming.data query_env in @@ -1237,7 +1250,7 @@ let create ?wallets (config : Config.t) = let%bind frontier = Broadcast_pipe.Reader.peek frontier_broadcast_pipe_r in - f ~frontier input ) + f ~frontier input) in (* knot-tying hacks so we can pass a get_node_status function before net, Mina_lib.t created *) let net_ref = ref None in @@ -1247,8 +1260,8 @@ let create ?wallets (config : Config.t) = ~f:(fun kps -> Keypair.Set.to_list kps |> List.map ~f:(fun kp -> - (kp, Public_key.compress kp.Keypair.public_key) ) - |> Keypair.And_compressed_pk.Set.of_list ) + (kp, Public_key.compress kp.Keypair.public_key)) + |> Keypair.And_compressed_pk.Set.of_list) config.initial_block_production_keypairs in let get_node_status _env = @@ -1258,7 +1271,7 @@ let create ?wallets (config : Config.t) = let peer_opt = config.gossip_net_params.addrs_and_ports.peer in let node_peer_id = Option.value_map peer_opt ~default:"" ~f:(fun peer -> - peer.peer_id ) + peer.peer_id) in if config.disable_node_status then Deferred.return @@ -1278,9 +1291,9 @@ let create ?wallets (config : Config.t) = @@ Error (Error.of_string (sprintf - !"Node with IP address=%{sexp: \ - Unix.Inet_addr.t}, peer ID=%s, network not \ - instantiated when node status requested" + !"Node with IP address=%{sexp: Unix.Inet_addr.t}, \ + peer ID=%s, network not instantiated when node \ + status requested" node_ip_addr node_peer_id)) | Some net -> let ( protocol_state_hash @@ -1290,8 +1303,7 @@ let create ?wallets (config : Config.t) = Broadcast_pipe.Reader.peek frontier_broadcast_pipe_r with | None -> - ( config.precomputed_values.protocol_state_with_hash - .hash + ( config.precomputed_values.protocol_state_with_hash.hash , None , [] ) | Some frontier -> @@ -1315,7 +1327,7 @@ let create ?wallets (config : Config.t) = ~default:"no timestamp available" ~f: (Time.to_string_iso8601_basic - ~zone:Time.Zone.utc) ) ) + ~zone:Time.Zone.utc) )) in ( protocol_state_hash , Some tip @@ -1378,7 +1390,8 @@ let create ?wallets (config : Config.t) = ; k_block_hashes_and_timestamps ; git_commit ; uptime_minutes - ; block_height_opt } + ; block_height_opt + } in let get_some_initial_peers _ = match !net_ref with @@ -1426,14 +1439,13 @@ let create ?wallets (config : Config.t) = ( scan_state , expected_merkle_root , pending_coinbases - , protocol_states ) ) ) + , protocol_states ))) ~answer_sync_ledger_query:(fun query_env -> let open Deferred.Or_error.Let_syntax in trace_recurring "answer_sync_ledger_query" (fun () -> let ledger_hash, _ = Envelope.Incoming.data query_env in let%bind frontier = - Deferred.return - @@ peek_frontier frontier_broadcast_pipe_r + Deferred.return @@ peek_frontier frontier_broadcast_pipe_r in Sync_handler.answer_query ~frontier ledger_hash (Envelope.Incoming.map ~f:Tuple2.get2 query_env) @@ -1446,7 +1458,7 @@ let create ?wallets (config : Config.t) = (Error.createf !"%s for ledger_hash: %{sexp:Ledger_hash.t}" Mina_networking.refused_answer_query_string - ledger_hash)) ) ) + ledger_hash)))) ~get_ancestry: (handle_request "get_ancestry" ~f: @@ -1460,12 +1472,13 @@ let create ?wallets (config : Config.t) = Best_tip_prover.prove ~logger:config.logger frontier in { proof_with_data with - data= With_hash.data proof_with_data.data } )) + data = With_hash.data proof_with_data.data + })) ~get_node_status ~get_transition_chain_proof: (handle_request "get_transition_chain_proof" ~f:(fun ~frontier hash -> - Transition_chain_prover.prove ~frontier hash )) + Transition_chain_prover.prove ~frontier hash)) ~get_transition_chain: (handle_request "get_transition_chain" ~f:Sync_handler.get_transition_chain) @@ -1477,7 +1490,7 @@ let create ?wallets (config : Config.t) = | None -> [] | Some frontier -> - Sync_handler.best_tip_path ~frontier ) ) + Sync_handler.best_tip_path ~frontier )) in (* tie the first knot *) net_ref := Some net ; @@ -1521,17 +1534,16 @@ let create ?wallets (config : Config.t) = (*callback for the result from transaction_pool.apply_diff*) Strict_pipe.Writer.write local_txns_writer ( List.map user_commands ~f:(fun c -> - User_command.Signed_command c ) + User_command.Signed_command c) , result_cb ) | Error e -> [%log' error config.logger] "Failed to submit user commands: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] ; result_cb (Error e) ; - Deferred.unit ) + Deferred.unit) |> Deferred.don't_wait_for ; - let ((most_recent_valid_block_reader, _) as most_recent_valid_block) - = + let ((most_recent_valid_block_reader, _) as most_recent_valid_block) = Broadcast_pipe.create ( External_transition.genesis ~precomputed_values:config.precomputed_values @@ -1579,7 +1591,7 @@ let create ?wallets (config : Config.t) = Mina_metrics.Block_latency.Gossip_time.update Block_time.( Span.to_time_span @@ diff tm tn_production_time) ; - (`Transition tn, `Time_received tm, `Valid_cb cb) )) + (`Transition tn, `Time_received tm, `Valid_cb cb))) ~producer_transition_reader: (Strict_pipe.Reader.map producer_transition_reader ~f:(fun breadcrumb -> @@ -1606,9 +1618,9 @@ let create ?wallets (config : Config.t) = Mina_networking.broadcast_state net (External_transition.Validation .forget_validation_with_hash et)) ; - breadcrumb )) + breadcrumb)) ~most_recent_valid_block - ~precomputed_values:config.precomputed_values ) + ~precomputed_values:config.precomputed_values) in let ( valid_transitions_for_network , valid_transitions_for_api @@ -1637,7 +1649,7 @@ let create ?wallets (config : Config.t) = .max_per_15_seconds x in Mina_networking.broadcast_transaction_pool_diff net x ; - Deferred.unit ) ) ; + Deferred.unit)) ; trace_task "valid_transitions_for_network broadcast loop" (fun () -> Strict_pipe.Reader.iter_without_pushback valid_transitions_for_network @@ -1662,15 +1674,16 @@ let create ?wallets (config : Config.t) = (*Don't log rebroadcast message if it is internally generated; There is a broadcast log for it*) if not - ([%equal: [`Catchup | `Gossip | `Internal]] source + ([%equal: [ `Catchup | `Gossip | `Internal ]] source `Internal) then [%str_log' info config.logger] ~metadata: [ ( "external_transition" , External_transition.Validated.to_yojson - transition ) ] - (Rebroadcast_transition {state_hash= hash}) ; + transition ) + ] + (Rebroadcast_transition { state_hash = hash }) ; External_transition.Validated.broadcast transition | Error reason -> ( let timing_error_json = @@ -1685,7 +1698,8 @@ let create ?wallets (config : Config.t) = ; ( "external_transition" , External_transition.Validated.to_yojson transition ) - ; ("timing", timing_error_json) ] + ; ("timing", timing_error_json) + ] in External_transition.Validated.don't_broadcast transition ; match source with @@ -1699,7 +1713,7 @@ let create ?wallets (config : Config.t) = | `Gossip -> [%log' warn config.logger] ~metadata "Not rebroadcasting block $state_hash because it \ - was received $timing" ) ) ) ; + was received $timing" ))) ; don't_wait_for (Strict_pipe.transfer (Mina_networking.states net) @@ -1761,24 +1775,24 @@ let create ?wallets (config : Config.t) = .max_per_15_seconds x in Mina_networking.broadcast_snark_pool_diff net x ; - Deferred.unit ) ) ; + Deferred.unit)) ; Option.iter config.archive_process_location ~f:(fun archive_process_port -> [%log' info config.logger] "Communicating with the archive process" ~metadata: [ ( "Host" - , `String (Host_and_port.host archive_process_port.value) - ) + , `String (Host_and_port.host archive_process_port.value) ) ; ( "Port" - , `Int (Host_and_port.port archive_process_port.value) ) ] ; + , `Int (Host_and_port.port archive_process_port.value) ) + ] ; Archive_client.run ~logger:config.logger ~frontier_broadcast_pipe:frontier_broadcast_pipe_r - archive_process_port ) ; + archive_process_port) ; let precomputed_block_writer = ref ( Option.map config.precomputed_blocks_path ~f:(fun path -> - `Path path ) + `Path path) , if config.log_precomputed_blocks then Some `Log else None ) in let subscriptions = @@ -1822,34 +1836,37 @@ let create ?wallets (config : Config.t) = sync_status_ref := Some sync_status ; Deferred.return { config - ; next_producer_timing= None - ; processes= {prover; verifier; snark_worker} + ; next_producer_timing = None + ; processes = { prover; verifier; snark_worker } ; initialization_finish_signal - ; components= + ; components = { net ; transaction_pool ; snark_pool - ; transition_frontier= frontier_broadcast_pipe_r - ; most_recent_valid_block= most_recent_valid_block_reader } - ; pipes= - { validated_transitions_reader= valid_transitions_for_api + ; transition_frontier = frontier_broadcast_pipe_r + ; most_recent_valid_block = most_recent_valid_block_reader + } + ; pipes = + { validated_transitions_reader = valid_transitions_for_api ; producer_transition_writer - ; external_transitions_writer= + ; external_transitions_writer = Strict_pipe.Writer.to_linear_pipe external_transitions_writer ; user_command_input_writer - ; user_command_writer= local_txns_writer - ; local_snark_work_writer } + ; user_command_writer = local_txns_writer + ; local_snark_work_writer + } ; wallets ; block_production_keypairs - ; coinbase_receiver= ref config.coinbase_receiver - ; snark_job_state= snark_jobs_state + ; coinbase_receiver = ref config.coinbase_receiver + ; snark_job_state = snark_jobs_state ; subscriptions ; sync_status ; precomputed_block_writer - ; block_production_status= ref `Free } ) ) + ; block_production_status = ref `Free + })) -let net {components= {net; _}; _} = net +let net { components = { net; _ }; _ } = net -let runtime_config {config= {precomputed_values; _}; _} = +let runtime_config { config = { precomputed_values; _ }; _ } = Genesis_ledger_helper.runtime_config_of_precomputed_values precomputed_values diff --git a/src/lib/mina_lib/mina_lib.mli b/src/lib/mina_lib/mina_lib.mli index fba28df1e33..0873bcddcdb 100644 --- a/src/lib/mina_lib/mina_lib.mli +++ b/src/lib/mina_lib/mina_lib.mli @@ -18,7 +18,7 @@ type Structured_log_events.t += | Bootstrapping | Ledger_catchup | Synced - | Rebroadcast_transition of {state_hash: State_hash.t} + | Rebroadcast_transition of { state_hash : State_hash.t } [@@deriving register_event] exception Snark_worker_error of int @@ -86,7 +86,7 @@ val snark_job_state : t -> Work_selector.State.t val get_current_nonce : t -> Account_id.t - -> ([> `Min of Account.Nonce.t] * Account.Nonce.t, string) result + -> ([> `Min of Account.Nonce.t ] * Account.Nonce.t, string) result val add_transactions : t @@ -137,8 +137,9 @@ module Root_diff : sig module Stable : sig module V1 : sig type t = - { commands: User_command.Stable.V1.t With_status.Stable.V1.t list - ; root_length: int } + { commands : User_command.Stable.V1.t With_status.Stable.V1.t list + ; root_length : int + } end end] end diff --git a/src/lib/mina_metrics/metric_generators.ml b/src/lib/mina_metrics/metric_generators.ml index 751e8ff71fc..6ce738c34db 100644 --- a/src/lib/mina_metrics/metric_generators.ml +++ b/src/lib/mina_metrics/metric_generators.ml @@ -71,7 +71,7 @@ module Moving_bucketed_average (Spec : Bucketed_average_spec_intf) () : Gauge.set v (render_average buckets_val) ; buckets := Some (empty_bucket_entry :: List.take buckets_val (num_buckets - 1)) ; - tick () ) + tick ()) in tick () end @@ -89,7 +89,8 @@ module Moving_time_average (Spec : Time_average_spec_intf) () : "invalid intervals provided to Moving_time_average -- the \ tick_interval does not evenly divide the rolling_interval" - include Moving_bucketed_average (struct + include Moving_bucketed_average + (struct include Spec let bucket_interval = tick_interval @@ -103,7 +104,7 @@ module Moving_time_average (Spec : Time_average_spec_intf) () : let total_sum, count_sum = List.fold buckets ~init:(0.0, 0) ~f:(fun (total_sum, count_sum) (total, count) -> - (total_sum +. total, count_sum + count) ) + (total_sum +. total, count_sum + count)) in total_sum /. Float.of_int count_sum end) diff --git a/src/lib/mina_metrics/mina_metrics.ml b/src/lib/mina_metrics/mina_metrics.ml index 4fdab972c87..8989a21db4f 100644 --- a/src/lib/mina_metrics/mina_metrics.ml +++ b/src/lib/mina_metrics/mina_metrics.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Prometheus @@ -69,7 +68,7 @@ module TextFormat_0_0_4 = struct Fmt.pf f "{%a}" output_pairs (label_names, label_values) let output_sample ~base ~label_names ~label_values f - {Sample_set.ext; value; bucket} = + { Sample_set.ext; value; bucket } = let label_names, label_values = match bucket with | None -> @@ -83,17 +82,15 @@ module TextFormat_0_0_4 = struct label_values output_value value let output_metric ~name ~label_names f (label_values, samples) = - List.iter samples - ~f:(output_sample ~base:name ~label_names ~label_values f) + List.iter samples ~f:(output_sample ~base:name ~label_names ~label_values f) let output f = MetricFamilyMap.iter (fun metric samples -> - let {MetricInfo.name; metric_type; help; label_names} = metric in + let { MetricInfo.name; metric_type; help; label_names } = metric in Fmt.pf f "#HELP %a %a@.#TYPE %a %a@.%a" MetricName.pp name - output_unquoted help MetricName.pp name output_metric_type - metric_type + output_unquoted help MetricName.pp name output_metric_type metric_type (LabelSetMap.pp ~sep:Fmt.nop (output_metric ~name ~label_names)) - samples ) + samples) end module Runtime = struct @@ -128,9 +125,13 @@ module Runtime = struct let simple_metric ~metric_type ~help name fn = let name = Printf.sprintf "%s_%s_%s" namespace subsystem name in let info = - {MetricInfo.name= MetricName.v name; help; metric_type; label_names= []} + { MetricInfo.name = MetricName.v name + ; help + ; metric_type + ; label_names = [] + } in - let collect () = LabelSetMap.singleton [] [Sample_set.sample (fn ())] in + let collect () = LabelSetMap.singleton [] [ Sample_set.sample (fn ()) ] in (info, collect) let ocaml_gc_allocated_bytes = @@ -228,7 +229,7 @@ module Runtime = struct let process_uptime_ms_total = simple_metric ~metric_type:Counter "process_uptime_ms_total" (fun () -> - Core.Time.Span.to_ms (Core.Time.diff (Core.Time.now ()) start_time) ) + Core.Time.Span.to_ms (Core.Time.diff (Core.Time.now ()) start_time)) ~help:"Total time the process has been running for in milliseconds." let metrics = @@ -249,13 +250,14 @@ module Runtime = struct ; jemalloc_allocated_bytes ; jemalloc_mapped_bytes ; process_cpu_seconds_total - ; process_uptime_ms_total ] + ; process_uptime_ms_total + ] let () = let open CollectorRegistry in register_pre_collect default update ; List.iter metrics ~f:(fun (info, collector) -> - register default info collector ) + register default info collector) end module Cryptography = struct @@ -286,9 +288,9 @@ module Cryptography = struct ~subsystem (* TODO: - let transaction_proving_time_ms = - let help = "time elapsed while proving most recently generated transaction snark" in - Gauge.v "transaction_proving_time_ms" ~help ~namespace ~subsystem + let transaction_proving_time_ms = + let help = "time elapsed while proving most recently generated transaction snark" in + Gauge.v "transaction_proving_time_ms" ~help ~namespace ~subsystem *) end @@ -334,9 +336,7 @@ struct let add t ~name ~help : Metric.t = if Metric_name_map.mem t name then Metric_name_map.find_exn t name else - let metric = - Metric.v ~help ~namespace ~subsystem:Metric.subsystem name - in + let metric = Metric.v ~help ~namespace ~subsystem:Metric.subsystem name in Metric_name_map.add_exn t ~key:name ~data:metric ; metric end @@ -406,8 +406,7 @@ module Network = struct let rpc_latency_ms_summary : Rpc_latency_histogram.t = let help = "A histogram for all RPC call latencies" in - Rpc_latency_histogram.v "rpc_latency_ms_summary" ~help ~namespace - ~subsystem + Rpc_latency_histogram.v "rpc_latency_ms_summary" ~help ~namespace ~subsystem end module Snark_work = struct @@ -421,8 +420,7 @@ module Snark_work = struct let completed_snark_work_received_gossip : Counter.t = let help = "# of completed snark work bundles received from peers" in - Counter.v "completed_snark_work_received_gossip" ~help ~namespace - ~subsystem + Counter.v "completed_snark_work_received_gossip" ~help ~namespace ~subsystem let completed_snark_work_received_rpc : Counter.t = let help = "# of completed snark work bundles received via rpc" in @@ -531,9 +529,9 @@ module Consensus = struct let subsystem = "Consensus" (* TODO: - let vrf_threshold = - let help = "vrf threshold expressed as % to win (in range 0..1)" in - Gauge.v "vrf_threshold" ~help ~namespace ~subsystem + let vrf_threshold = + let help = "vrf threshold expressed as % to win (in range 0..1)" in + Gauge.v "vrf_threshold" ~help ~namespace ~subsystem *) let vrf_evaluations : Counter.t = @@ -598,8 +596,8 @@ module Transition_frontier = struct let slot_fill_rate : Gauge.t = let help = - "fill rate for the last k slots (or fewer if there have not been k \ - slots between the best tip and the frontier root)" + "fill rate for the last k slots (or fewer if there have not been k slots \ + between the best tip and the frontier root)" in Gauge.v "slot_fill_rate" ~help ~namespace ~subsystem @@ -628,7 +626,8 @@ module Transition_frontier = struct Counter.v "finalized_staged_txns" ~help ~namespace ~subsystem module TPS_30min = - Moving_bucketed_average (struct + Moving_bucketed_average + (struct let bucket_interval = Core.Time.Span.of_min 3.0 let num_buckets = 10 @@ -692,13 +691,13 @@ module Transition_frontier = struct Gauge.v "best_tip_slot_time_sec" ~help ~namespace ~subsystem (* TODO: - let recently_finalized_snarked_txns : Gauge.t = - let help = "toal # of snarked txns that have been finalized" in - Gauge.v "finalized_snarked_txns" ~help ~namespace ~subsystem + let recently_finalized_snarked_txns : Gauge.t = + let help = "toal # of snarked txns that have been finalized" in + Gauge.v "finalized_snarked_txns" ~help ~namespace ~subsystem - let recently_finalized_snarked_txns : Gauge.t = - let help = "# of snarked txns that were finalized during the last transition frontier root transition" in - Gauge.v "recently_finalized_snarked_txns" ~help ~namespace ~subsystem + let recently_finalized_snarked_txns : Gauge.t = + let help = "# of snarked txns that were finalized during the last transition frontier root transition" in + Gauge.v "recently_finalized_snarked_txns" ~help ~namespace ~subsystem *) let root_snarked_ledger_accounts : Gauge.t = @@ -710,13 +709,13 @@ module Transition_frontier = struct Gauge.v "root_snarked_ledger_total_currency" ~help ~namespace ~subsystem (* TODO: - let root_staged_ledger_accounts : Gauge.t = - let help = "# of accounts in transition frontier root staged ledger" in - Gauge.v "root_staged_ledger_accounts" ~help ~namespace ~subsystem + let root_staged_ledger_accounts : Gauge.t = + let help = "# of accounts in transition frontier root staged ledger" in + Gauge.v "root_staged_ledger_accounts" ~help ~namespace ~subsystem - let root_staged_ledger_total_currency : Gauge.t = - let help = "total amount of currency in root staged ledger" in - Gauge.v "root_staged_ledger_total_currency" ~help ~namespace ~subsystem + let root_staged_ledger_total_currency : Gauge.t = + let help = "total amount of currency in root staged ledger" in + Gauge.v "root_staged_ledger_total_currency" ~help ~namespace ~subsystem *) end @@ -761,8 +760,7 @@ module Block_latency = struct Gauge.v "upload_to_gcloud_blocks" ~help ~namespace ~subsystem end - [%%inject - "block_window_duration", block_window_duration] + [%%inject "block_window_duration", block_window_duration] module Latency_time_spec = struct let tick_interval = @@ -773,7 +771,8 @@ module Block_latency = struct end module Gossip_slots = - Moving_bucketed_average (struct + Moving_bucketed_average + (struct let bucket_interval = Core.Time.Span.of_ms (Int.to_float (block_window_duration / 2)) @@ -790,14 +789,15 @@ module Block_latency = struct let total_sum, count_sum = List.fold buckets ~init:(0.0, 0) ~f:(fun (total_sum, count_sum) (total, count) -> - (total_sum +. total, count_sum + count) ) + (total_sum +. total, count_sum + count)) in total_sum /. Float.of_int count_sum end) () module Gossip_time = - Moving_time_average (struct + Moving_time_average + (struct include Latency_time_spec let subsystem = subsystem @@ -810,7 +810,8 @@ module Block_latency = struct () module Inclusion_time = - Moving_time_average (struct + Moving_time_average + (struct include Latency_time_spec let subsystem = subsystem @@ -818,8 +819,8 @@ module Block_latency = struct let name = "inclusion_time" let help = - "average delay, in seconds, after which produced blocks are \ - included into our frontier" + "average delay, in seconds, after which produced blocks are included \ + into our frontier" end) () end @@ -927,7 +928,7 @@ let generic_server ?forward_uri ~port ~logger ~registry () = let open Cohttp_async in let handle_error _ exn = [%log error] - ~metadata:[("error", `String (Exn.to_string exn))] + ~metadata:[ ("error", `String (Exn.to_string exn)) ] "Encountered error while handling request to prometheus server: $error" in let callback ~body:_ _ req = @@ -947,7 +948,8 @@ let generic_server ?forward_uri ~port ~logger ~registry () = ~metadata: [ ("url", `String (Uri.to_string uri)) ; ("status_code", `Int (Code.code_of_status status)) - ; ("status", `String (Code.string_of_status status)) ] ; + ; ("status", `String (Code.string_of_status status)) + ] ; return None ) | None -> return None @@ -978,16 +980,16 @@ let server ?forward_uri ~port ~logger () = module Archive = struct type t = - {registry: CollectorRegistry.t; gauge_metrics: (string, Gauge.t) Hashtbl.t} + { registry : CollectorRegistry.t + ; gauge_metrics : (string, Gauge.t) Hashtbl.t + } let subsystem = "Archive" let find_or_add t ~name ~help ~subsystem = match Hashtbl.find t.gauge_metrics name with | None -> - let g = - Gauge.v name ~help ~namespace ~subsystem ~registry:t.registry - in + let g = Gauge.v name ~help ~namespace ~subsystem ~registry:t.registry in Hashtbl.add_exn t.gauge_metrics ~key:name ~data:g ; g | Some m -> @@ -1018,7 +1020,9 @@ module Archive = struct let%map _ = generic_server ?forward_uri ~port ~logger ~registry:archive_registry () in - {registry= archive_registry; gauge_metrics= Hashtbl.create (module String)} + { registry = archive_registry + ; gauge_metrics = Hashtbl.create (module String) + } end (* re-export a constrained subset of prometheus to keep consumers of this module abstract over implementation *) diff --git a/src/lib/mina_net2/mina_net2.ml b/src/lib/mina_net2/mina_net2.ml index 8e11137c939..f64c11dcd31 100644 --- a/src/lib/mina_net2/mina_net2.ml +++ b/src/lib/mina_net2/mina_net2.ml @@ -7,13 +7,15 @@ open Network_peer module Timeout = Timeout_lib.Core_time_ns module Validation_callback = struct - type validation_result = [`Accept | `Reject | `Ignore] [@@deriving equal] + type validation_result = [ `Accept | `Reject | `Ignore ] [@@deriving equal] - type t = {expiration: Time_ns.t option; signal: validation_result Ivar.t} + type t = { expiration : Time_ns.t option; signal : validation_result Ivar.t } - let create expiration = {expiration= Some expiration; signal= Ivar.create ()} + let create expiration = + { expiration = Some expiration; signal = Ivar.create () } - let create_without_expiration () = {expiration= None; signal= Ivar.create ()} + let create_without_expiration () = + { expiration = None; signal = Ivar.create () } let is_expired cb = match cb.expiration with @@ -51,11 +53,7 @@ module Validation_callback = struct None ) let await_exn cb = - match%map await cb with - | None -> - failwith "timeout" - | Some result -> - result + match%map await cb with None -> failwith "timeout" | Some result -> result let fire_if_not_already_fired cb result = if not (is_expired cb) then ( @@ -71,11 +69,11 @@ module Validation_callback = struct end (** simple types for yojson to derive, later mapped into a Peer.t *) -type peer_info = {libp2p_port: int; host: string; peer_id: string} +type peer_info = { libp2p_port : int; host : string; peer_id : string } [@@deriving yojson] type connection_gating = - {banned_peers: Peer.t list; trusted_peers: Peer.t list; isolate: bool} + { banned_peers : Peer.t list; trusted_peers : Peer.t list; isolate : bool } [@@deriving yojson] let peer_of_peer_info peer_info = @@ -86,11 +84,11 @@ let peer_of_peer_info peer_info = let of_b64_data = function | `String s -> ( - match Base64.decode s with - | Ok result -> - Ok result - | Error (`Msg s) -> - Or_error.error_string ("invalid base64: " ^ s) ) + match Base64.decode s with + | Ok result -> + Ok result + | Error (`Msg s) -> + Or_error.error_string ("invalid base64: " ^ s) ) | _ -> Or_error.error_string "expected a string" @@ -107,7 +105,8 @@ module Keypair0 = struct [%%versioned module Stable = struct module V1 = struct - type t = {secret: string; public: string; peer_id: Peer.Id.Stable.V1.t} + type t = + { secret : string; public : string; peer_id : Peer.Id.Stable.V1.t } let to_latest = Fn.id end @@ -116,13 +115,13 @@ end type stream_state = | FullyOpen (** Streams start in this state. Both sides can still write *) - | HalfClosed of [`Us | `Them] + | HalfClosed of [ `Us | `Them ] (** Streams move from [FullyOpen] to [HalfClosed `Us] when the write pipe is closed. Streams move from [FullyOpen] to [HalfClosed `Them] when [Stream.reset] is called or the remote host closes their write stream. *) | FullyClosed (** Streams move from [HalfClosed peer] to FullyClosed once the party that isn't peer has their "close write" event. Once a stream is FullyClosed, its resources are released. *) [@@deriving show] -type erased_magic = [`Be_very_careful_to_be_type_safe] +type erased_magic = [ `Be_very_careful_to_be_type_safe ] module Go_log = struct let ours_of_go lvl = @@ -143,11 +142,12 @@ module Go_log = struct (* there should be no other levels. *) type record = - { ts: string - ; module_: string [@key "logger"] - ; level: string - ; msg: string - ; metadata: Yojson.Safe.t String.Map.t } + { ts : string + ; module_ : string [@key "logger"] + ; level : string + ; msg : string + ; metadata : Yojson.Safe.t String.Map.t + } let record_of_yojson (json : Yojson.Safe.t) = let open Result.Let_syntax in @@ -163,7 +163,7 @@ module Go_log = struct parse json |> Result.map_error ~f:(fun err -> prefix ^ "Could not parse field '" ^ field_name ^ "':" - ^ err ) + ^ err) |> Result.map ~f:Option.return in let get_field field_name value = @@ -208,28 +208,29 @@ module Go_log = struct , module_ , level , msg - , Map.set ~key:field ~data:json metadata ) ) + , Map.set ~key:field ~data:json metadata )) in let%bind ts = get_field "ts" ts in let%bind module_ = get_field "logger" module_ in let%bind level = get_field "level" level in let%map msg = get_field "msg" msg in - {ts; module_; level; msg; metadata} + { ts; module_; level; msg; metadata } | _ -> Error (prefix ^ "Expected a JSON object") let record_to_message r = Logger.Message. - { timestamp= Time.of_string r.ts - ; level= ours_of_go r.level - ; source= + { timestamp = Time.of_string r.ts + ; level = ours_of_go r.level + ; source = Some (Logger.Source.create ~module_:(sprintf "Libp2p_helper.Go.%s" r.module_) ~location:"(not tracked)") - ; message= r.msg - ; metadata= r.metadata - ; event_id= None } + ; message = r.msg + ; metadata = r.metadata + ; event_id = None + } end (** Set of peers, represented as a host/port pair. We ignore the peer ID so @@ -238,7 +239,7 @@ end *) module Peers_no_ids = struct module T = struct - type t = {libp2p_port: int; host: string} + type t = { libp2p_port : int; host : string } [@@deriving sexp, compare, yojson] end @@ -248,9 +249,9 @@ end module Helper = struct type t = - { subprocess: Child_processes.t - ; conf_dir: string - ; outstanding_requests: (int, Yojson.Safe.t Or_error.t Ivar.t) Hashtbl.t + { subprocess : Child_processes.t + ; conf_dir : string + ; outstanding_requests : (int, Yojson.Safe.t Or_error.t Ivar.t) Hashtbl.t (** seqno is used to assign unique IDs to our outbound requests and index the tables below. @@ -263,56 +264,60 @@ module Helper = struct Some types would make it harder to misuse these integers. *) - ; mutable seqno: int - ; mutable connection_gating: connection_gating - ; logger: Logger.t - ; me_keypair: Keypair0.t Ivar.t - ; subscriptions: (int, erased_magic subscription) Hashtbl.t - ; streams: (int, stream) Hashtbl.t - ; protocol_handlers: (string, protocol_handler) Hashtbl.t - ; mutable all_peers_seen: Peers_no_ids.Set.t option - ; mutable banned_ips: Unix.Inet_addr.t list - ; mutable peer_connected_callback: (string -> unit) option - ; mutable peer_disconnected_callback: (string -> unit) option - ; mutable finished: bool } + ; mutable seqno : int + ; mutable connection_gating : connection_gating + ; logger : Logger.t + ; me_keypair : Keypair0.t Ivar.t + ; subscriptions : (int, erased_magic subscription) Hashtbl.t + ; streams : (int, stream) Hashtbl.t + ; protocol_handlers : (string, protocol_handler) Hashtbl.t + ; mutable all_peers_seen : Peers_no_ids.Set.t option + ; mutable banned_ips : Unix.Inet_addr.t list + ; mutable peer_connected_callback : (string -> unit) option + ; mutable peer_disconnected_callback : (string -> unit) option + ; mutable finished : bool + } and 'a subscription = - { net: t - ; topic: string - ; idx: int - ; mutable closed: bool - ; validator: + { net : t + ; topic : string + ; idx : int + ; mutable closed : bool + ; validator : 'a Envelope.Incoming.t -> Validation_callback.t -> unit Deferred.t - ; encode: 'a -> string - ; on_decode_failure: - [`Ignore | `Call of string Envelope.Incoming.t -> Error.t -> unit] - ; decode: string -> 'a Or_error.t - ; write_pipe: + ; encode : 'a -> string + ; on_decode_failure : + [ `Ignore | `Call of string Envelope.Incoming.t -> Error.t -> unit ] + ; decode : string -> 'a Or_error.t + ; write_pipe : ( 'a Envelope.Incoming.t , Strict_pipe.synchronous , unit Deferred.t ) Strict_pipe.Writer.t - ; read_pipe: 'a Envelope.Incoming.t Strict_pipe.Reader.t } + ; read_pipe : 'a Envelope.Incoming.t Strict_pipe.Reader.t + } and stream = - { net: t - ; idx: int - ; mutable state: stream_state - ; mutable state_lock: bool - ; state_wait: unit Async.Condition.t - ; protocol: string - ; peer: Peer.t - ; incoming_r: string Pipe.Reader.t - ; incoming_w: string Pipe.Writer.t - ; outgoing_r: string Pipe.Reader.t - ; outgoing_w: string Pipe.Writer.t } + { net : t + ; idx : int + ; mutable state : stream_state + ; mutable state_lock : bool + ; state_wait : unit Async.Condition.t + ; protocol : string + ; peer : Peer.t + ; incoming_r : string Pipe.Reader.t + ; incoming_w : string Pipe.Writer.t + ; outgoing_r : string Pipe.Reader.t + ; outgoing_w : string Pipe.Writer.t + } and protocol_handler = - { net: t - ; protocol_name: string - ; mutable closed: bool - ; on_handler_error: [`Raise | `Ignore | `Call of stream -> exn -> unit] - ; f: stream -> unit Deferred.t } + { net : t + ; protocol_name : string + ; mutable closed : bool + ; on_handler_error : [ `Raise | `Ignore | `Call of stream -> exn -> unit ] + ; f : stream -> unit Deferred.t + } module type Rpc = sig type input [@@deriving to_yojson] @@ -341,8 +346,8 @@ module Helper = struct let of_yojson = function | `String s -> ( - try Ok (decode_string s) - with exn -> Error Error.(to_string_hum (of_exn exn)) ) + try Ok (decode_string s) + with exn -> Error Error.(to_string_hum (of_exn exn)) ) | _ -> Error "expected a string" @@ -359,7 +364,7 @@ module Helper = struct end module Send_stream_msg = struct - type input = {stream_idx: int; data: string} [@@deriving yojson] + type input = { stream_idx : int; data : string } [@@deriving yojson] type output = string [@@deriving yojson] @@ -367,7 +372,7 @@ module Helper = struct end module Close_stream = struct - type input = {stream_idx: int} [@@deriving yojson] + type input = { stream_idx : int } [@@deriving yojson] type output = string [@@deriving yojson] @@ -377,7 +382,7 @@ module Helper = struct end module Remove_stream_handler = struct - type input = {protocol: string} [@@deriving yojson] + type input = { protocol : string } [@@deriving yojson] type output = string [@@deriving yojson] @@ -387,14 +392,14 @@ module Helper = struct module Generate_keypair = struct include No_input - type output = {sk: string; pk: string; peer_id: string} + type output = { sk : string; pk : string; peer_id : string } [@@deriving yojson] let name = "generateKeypair" end module Publish = struct - type input = {topic: string; data: Data.t} [@@deriving yojson] + type input = { topic : string; data : Data.t } [@@deriving yojson] type output = string [@@deriving yojson] @@ -402,7 +407,8 @@ module Helper = struct end module Subscribe = struct - type input = {topic: string; subscription_idx: int} [@@deriving yojson] + type input = { topic : string; subscription_idx : int } + [@@deriving yojson] type output = string [@@deriving yojson] @@ -410,7 +416,7 @@ module Helper = struct end module Unsubscribe = struct - type input = {subscription_idx: int} [@@deriving yojson] + type input = { subscription_idx : int } [@@deriving yojson] type output = string [@@deriving yojson] @@ -419,11 +425,12 @@ module Helper = struct module Set_gater_config = struct type input = - { banned_ips: string list - ; banned_peers: string list - ; trusted_peers: string list - ; trusted_ips: string list - ; isolate: bool } + { banned_ips : string list + ; banned_peers : string list + ; trusted_peers : string list + ; trusted_ips : string list + ; isolate : bool + } [@@deriving yojson] type output = string [@@deriving yojson] @@ -433,21 +440,22 @@ module Helper = struct module Configure = struct type input = - { privk: string - ; statedir: string - ; ifaces: string list - ; metrics_port: string - ; external_maddr: string - ; network_id: string - ; unsafe_no_trust_ip: bool - ; flood: bool - ; direct_peers: string list - ; peer_exchange: bool - ; gating_config: Set_gater_config.input - ; seed_peers: string list - ; max_connections: int - ; validation_queue_size: int - ; mina_peer_exchange: bool } + { privk : string + ; statedir : string + ; ifaces : string list + ; metrics_port : string + ; external_maddr : string + ; network_id : string + ; unsafe_no_trust_ip : bool + ; flood : bool + ; direct_peers : string list + ; peer_exchange : bool + ; gating_config : Set_gater_config.input + ; seed_peers : string list + ; max_connections : int + ; validation_queue_size : int + ; mina_peer_exchange : bool + } [@@deriving yojson] type output = string [@@deriving yojson] @@ -456,7 +464,7 @@ module Helper = struct end module Listen = struct - type input = {iface: string} [@@deriving yojson] + type input = { iface : string } [@@deriving yojson] type output = string list [@@deriving yojson] @@ -472,7 +480,7 @@ module Helper = struct end module Reset_stream = struct - type input = {idx: int} [@@deriving yojson] + type input = { idx : int } [@@deriving yojson] type output = string [@@deriving yojson] @@ -480,7 +488,7 @@ module Helper = struct end module Add_stream_handler = struct - type input = {protocol: string} [@@deriving yojson] + type input = { protocol : string } [@@deriving yojson] type output = string [@@deriving yojson] @@ -488,15 +496,15 @@ module Helper = struct end module Open_stream = struct - type input = {peer: string; protocol: string} [@@deriving yojson] + type input = { peer : string; protocol : string } [@@deriving yojson] - type output = {stream_idx: int; peer: peer_info} [@@deriving yojson] + type output = { stream_idx : int; peer : peer_info } [@@deriving yojson] let name = "openStream" end module Validation_complete = struct - type input = {seqno: int; is_valid: string} [@@deriving yojson] + type input = { seqno : int; is_valid : string } [@@deriving yojson] type output = string [@@deriving yojson] @@ -504,7 +512,7 @@ module Helper = struct end module Add_peer = struct - type input = {multiaddr: string; seed: bool} [@@deriving yojson] + type input = { multiaddr : string; seed : bool } [@@deriving yojson] type output = string [@@deriving yojson] @@ -528,7 +536,7 @@ module Helper = struct end module Set_node_status = struct - type input = {data: string} [@@deriving yojson] + type input = { data : string } [@@deriving yojson] type output = string [@@deriving yojson] @@ -536,7 +544,7 @@ module Helper = struct end module Get_peer_node_status = struct - type input = {peer_multiaddr: string} [@@deriving yojson] + type input = { peer_multiaddr : string } [@@deriving yojson] type output = string [@@deriving yojson] @@ -544,7 +552,7 @@ module Helper = struct end module Find_peer = struct - type input = {peer_id: string} [@@deriving yojson] + type input = { peer_id : string } [@@deriving yojson] type output = peer_info [@@deriving yojson] @@ -564,15 +572,16 @@ module Helper = struct ~f:(fun p -> let p = Unix.Inet_addr.to_string p.host in (* Trusted peers cannot be banned. *) - if Set.mem trusted p then None else Some p ) + if Set.mem trusted p then None else Some p) config.banned_peers in Rpcs.Set_gater_config. { banned_ips - ; banned_peers= List.map ~f:(fun p -> p.peer_id) config.banned_peers + ; banned_peers = List.map ~f:(fun p -> p.peer_id) config.banned_peers ; trusted_ips - ; trusted_peers= List.map ~f:(fun p -> p.peer_id) config.trusted_peers - ; isolate= config.isolate } + ; trusted_peers = List.map ~f:(fun p -> p.peer_id) config.trusted_peers + ; isolate = config.isolate + } (** Generate the next sequence number for our side of the connection *) let genseq t = @@ -597,19 +606,20 @@ module Helper = struct `Assoc [ ("seqno", `Int seqno) ; ("method", `String M.name) - ; ("body", M.input_to_yojson body) ] + ; ("body", M.input_to_yojson body) + ] in let rpc = Yojson.Safe.to_string actual_obj in [%log' spam t.logger] "sending line to libp2p_helper: $line" ~metadata: [ ( "line" - , `String (String.slice rpc 0 (Int.min (String.length rpc) 2048)) - ) ] ; + , `String (String.slice rpc 0 (Int.min (String.length rpc) 2048)) ) + ] ; Writer.write_line (Child_processes.stdin t.subprocess) rpc ; let%map res_json = Ivar.read res in Or_error.bind res_json - ~f: - (Fn.compose (Result.map_error ~f:Error.of_string) M.output_of_yojson) ) + ~f:(Fn.compose (Result.map_error ~f:Error.of_string) M.output_of_yojson) + ) else Deferred.Or_error.errorf "helper process already exited (doing RPC %s)" (M.input_to_yojson body |> Yojson.Safe.to_string) @@ -654,14 +664,14 @@ module Helper = struct ~finally:(fun () -> stream.state_lock <- false ; Async.Condition.signal stream.state_wait () ; - Deferred.unit ) + Deferred.unit) (fun () -> let%map () = match who_closed with | `Us -> (* FIXME related to https://github.com/libp2p/go-libp2p-circuit/issues/18 - "preemptive" or half-closing a stream doesn't actually seem supported: - after closing it we can't read anymore.*) + "preemptive" or half-closing a stream doesn't actually seem supported: + after closing it we can't read anymore.*) (* match%map do_rpc net (module Rpcs.Close_stream) {stream_idx= stream.idx} @@ -685,7 +695,8 @@ module Helper = struct "stream with index $index closed twice by $party" ~metadata: [ ("index", `Int stream.idx) - ; ("party", `String (name_participant who_closed)) ] ; + ; ("party", `String (name_participant who_closed)) + ] ; stream.state in let release () = @@ -695,28 +706,28 @@ module Helper = struct | None -> [%log' error net.logger] "tried to release stream $idx but it was already gone" - ~metadata:[("idx", `Int stream.idx)] + ~metadata:[ ("idx", `Int stream.idx) ] in - stream.state - <- ( match old_state with - | FullyOpen -> - HalfClosed who_closed - | HalfClosed other -> - if [%equal: [`Us | `Them]] other who_closed then - ignore (double_close () : stream_state) - else release () ; - FullyClosed - | FullyClosed -> - double_close () ) ; + stream.state <- + ( match old_state with + | FullyOpen -> + HalfClosed who_closed + | HalfClosed other -> + if [%equal: [ `Us | `Them ]] other who_closed then + ignore (double_close () : stream_state) + else release () ; + FullyClosed + | FullyClosed -> + double_close () ) ; (* TODO: maybe we can check some invariants on the Go side too? *) if not (stream_state_invariant stream net.logger) then [%log' error net.logger] - "after $who_closed closed the stream, stream state invariant \ - broke (previous state: $old_stream_state)" + "after $who_closed closed the stream, stream state invariant broke \ + (previous state: $old_stream_state)" ~metadata: [ ("who_closed", `String (name_participant who_closed)) - ; ("old_stream_state", `String (show_stream_state old_state)) ] - ) + ; ("old_stream_state", `String (show_stream_state old_state)) + ]) (** Track a new stream. @@ -738,15 +749,16 @@ module Helper = struct let stream = { net ; idx - ; state= FullyOpen - ; state_lock= false - ; state_wait= Async.Condition.create () + ; state = FullyOpen + ; state_lock = false + ; state_wait = Async.Condition.create () ; peer ; protocol ; incoming_r ; incoming_w ; outgoing_r - ; outgoing_w } + ; outgoing_w + } in let outgoing_loop () = let%bind () = @@ -754,19 +766,20 @@ module Helper = struct match%map do_rpc net (module Rpcs.Send_stream_msg) - {stream_idx= idx; data= to_b64_data msg} + { stream_idx = idx; data = to_b64_data msg } with | Ok "sendStreamMsg success" -> () | Ok v -> - failwithf "helper broke RPC protocol: sendStreamMsg got %s" v - () + failwithf "helper broke RPC protocol: sendStreamMsg got %s" v () | Error e -> [%log' error net.logger] "error sending message on stream $idx: $error" ~metadata: - [("idx", `Int idx); ("error", Error_json.error_to_yojson e)] ; - Pipe.close outgoing_w ) + [ ("idx", `Int idx) + ; ("error", Error_json.error_to_yojson e) + ] ; + Pipe.close outgoing_w) in advance_stream_state net stream `Us in @@ -813,50 +826,56 @@ module Helper = struct module Upcall = struct module Publish = struct type t = - { upcall: string - ; subscription_idx: int - ; sender: peer_info option - ; data: Data.t } + { upcall : string + ; subscription_idx : int + ; sender : peer_info option + ; data : Data.t + } [@@deriving yojson] end module Validate = struct type t = - { sender: peer_info option - ; data: Data.t - ; expiration: int64 - ; seqno: int - ; upcall: string - ; subscription_idx: int } + { sender : peer_info option + ; data : Data.t + ; expiration : int64 + ; seqno : int + ; upcall : string + ; subscription_idx : int + } [@@deriving yojson] end module Stream_lost = struct - type t = {upcall: string; stream_idx: int; reason: string} + type t = { upcall : string; stream_idx : int; reason : string } [@@deriving yojson] end module Stream_read_complete = struct - type t = {upcall: string; stream_idx: int} [@@deriving yojson] + type t = { upcall : string; stream_idx : int } [@@deriving yojson] end module Incoming_stream_msg = struct - type t = {upcall: string; stream_idx: int; data: Data.t} + type t = { upcall : string; stream_idx : int; data : Data.t } [@@deriving yojson] end module Incoming_stream = struct type t = - {upcall: string; peer: peer_info; stream_idx: int; protocol: string} + { upcall : string + ; peer : peer_info + ; stream_idx : int + ; protocol : string + } [@@deriving yojson] end module Peer_connected = struct - type t = {upcall: string; peer_id: string} [@@deriving yojson] + type t = { upcall : string; peer_id : string } [@@deriving yojson] end module Peer_disconnected = struct - type t = {upcall: string; peer_id: string} [@@deriving yojson] + type t = { upcall : string; peer_id : string } [@@deriving yojson] end let or_error (t : ('a, string) Result.t) = @@ -868,7 +887,7 @@ module Helper = struct end let lookup_peerid net peer_id = - match%map do_rpc net (module Rpcs.Find_peer) {peer_id} with + match%map do_rpc net (module Rpcs.Find_peer) { peer_id } with | Ok peer_info -> Ok (Peer.create @@ -889,8 +908,7 @@ module Helper = struct && Int.equal sender.libp2p_port 0 then Envelope.Incoming.local data else - Envelope.Incoming.wrap_peer ~sender:(peer_of_peer_info sender) - ~data + Envelope.Incoming.wrap_peer ~sender:(peer_of_peer_info sender) ~data | None -> Envelope.Incoming.local data in @@ -922,14 +940,14 @@ module Helper = struct match decoded with | Ok data -> (* TAKE CARE: doing anything with the return - value here except ignore is UNSOUND because - write_pipe has a cast type. We don't remember - what the original 'return was. *) + value here except ignore is UNSOUND because + write_pipe has a cast type. We don't remember + what the original 'return was. *) if Strict_pipe.Writer.is_closed sub.write_pipe then [%log' error t.logger] "subscription writer for $topic unexpectedly closed. \ dropping message." - ~metadata:[("topic", `String sub.topic)] + ~metadata:[ ("topic", `String sub.topic) ] else ignore ( Strict_pipe.Writer.write sub.write_pipe @@ -942,24 +960,26 @@ module Helper = struct | `Call f -> f (wrap m.sender raw_data) e ) ; [%log' error t.logger] - "failed to decode message published on subscription \ - $topic ($idx): $error" + "failed to decode message published on subscription $topic \ + ($idx): $error" ~metadata: [ ("topic", `String sub.topic) ; ("idx", `Int idx) - ; ("error", Error_json.error_to_yojson e) ] ; + ; ("error", Error_json.error_to_yojson e) + ] ; () (* TODO: add sender to Publish.t and include it here. *) - (* TODO: think about exposing the PeerID of the originator as well? *) ) + (* TODO: think about exposing the PeerID of the originator as well? *) + ) else [%log' debug t.logger] "received msg for subscription $sub after unsubscribe, was it \ still in the stdout pipe?" - ~metadata:[("sub", `Int idx)] ; + ~metadata:[ ("sub", `Int idx) ] ; Ok () | None -> - Or_error.errorf - "message published with inactive subsubscription %d" idx ) + Or_error.errorf "message published with inactive subsubscription %d" + idx ) (* Validate a message received on a subscription *) | "validate" -> ( let%bind m = Validate.of_yojson v |> or_error in @@ -991,12 +1011,13 @@ module Helper = struct | `Call f -> f (wrap m.sender raw_data) e ) ; [%log' error t.logger] - "failed to decode message published on subscription \ - $topic ($idx): $error" + "failed to decode message published on subscription $topic \ + ($idx): $error" ~metadata: [ ("topic", `String sub.topic) ; ("idx", `Int idx) - ; ("error", Error_json.error_to_yojson e) ] ; + ; ("error", Error_json.error_to_yojson e) + ] ; return (Some `Reject) in match action_opt with @@ -1009,14 +1030,15 @@ module Helper = struct do_rpc t (module Rpcs.Validation_complete) { seqno - ; is_valid= + ; is_valid = ( match action with | `Accept -> "accept" | `Reject -> "reject" | `Ignore -> - "ignore" ) } + "ignore" ) + } with | Ok "validationComplete success" -> () @@ -1028,7 +1050,7 @@ module Helper = struct [%log' error t.logger] "error during validationComplete, ignoring and \ continuing: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] )) + ~metadata:[ ("error", Error_json.error_to_yojson e) ] )) |> don't_wait_for ; Ok () | None -> @@ -1043,12 +1065,12 @@ module Helper = struct Option.iter t.all_peers_seen ~f:(fun all_peers_seen -> let all_peers_seen = Set.add all_peers_seen - {libp2p_port= m.peer.libp2p_port; host= m.peer.host} + { libp2p_port = m.peer.libp2p_port; host = m.peer.host } in t.all_peers_seen <- Some all_peers_seen ; Mina_metrics.( Gauge.set Network.all_peers - (Set.length all_peers_seen |> Int.to_float)) ) ; + (Set.length all_peers_seen |> Int.to_float))) ; let stream = make_stream t stream_idx protocol m.peer in match Hashtbl.find t.protocol_handlers protocol with | Some ph -> @@ -1057,36 +1079,38 @@ module Helper = struct don't_wait_for (let open Deferred.Let_syntax in (* Call the protocol handler. If it throws an exception, - handle it according to [on_handler_error]. Mimics - [Tcp.Server.create]. See [handle_protocol] doc comment. - *) + handle it according to [on_handler_error]. Mimics + [Tcp.Server.create]. See [handle_protocol] doc comment. + *) match%map Monitor.try_with ~here:[%here] ~extract_exn:true (fun () -> - ph.f stream ) + ph.f stream) with | Ok () -> () | Error e -> ( - try - match ph.on_handler_error with - | `Raise -> - raise e - | `Ignore -> - () - | `Call f -> - f stream e - with handler_exn -> - ph.closed <- true ; - don't_wait_for - ( do_rpc t (module Rpcs.Remove_stream_handler) {protocol} - >>| fun _ -> Hashtbl.remove t.protocol_handlers protocol - ) ; - raise handler_exn )) ; + try + match ph.on_handler_error with + | `Raise -> + raise e + | `Ignore -> + () + | `Call f -> + f stream e + with handler_exn -> + ph.closed <- true ; + don't_wait_for + ( do_rpc t + (module Rpcs.Remove_stream_handler) + { protocol } + >>| fun _ -> Hashtbl.remove t.protocol_handlers protocol + ) ; + raise handler_exn )) ; Ok () ) else (* silently ignore new streams for closed protocol handlers. - these are buffered stream open RPCs that were enqueued before - our close went into effect. *) + these are buffered stream open RPCs that were enqueued before + our close went into effect. *) (* TODO: we leak the new pipes here*) Ok () | None -> @@ -1103,7 +1127,7 @@ module Helper = struct | "incomingStreamMsg" -> ( let%bind m = Incoming_stream_msg.of_yojson v |> or_error in match Hashtbl.find t.streams m.stream_idx with - | Some {incoming_w; _} -> + | Some { incoming_w; _ } -> don't_wait_for (Pipe.write_if_open incoming_w (Data.to_string m.data)) ; Ok () @@ -1116,7 +1140,7 @@ module Helper = struct let stream_idx = m.stream_idx in [%log' trace t.logger] "Encountered error while reading stream $idx: $error" - ~metadata:[("error", `String m.reason); ("idx", `Int stream_idx)] ; + ~metadata:[ ("error", `String m.reason); ("idx", `Int stream_idx) ] ; Ok () (* The remote peer closed its write end of one of our streams *) | "streamReadComplete" -> ( @@ -1132,11 +1156,11 @@ module Helper = struct ) | s -> Or_error.errorf "unknown upcall %s" s -end [@(* Warning 30 is about field labels being defined in multiple types. - It means more disambiguation has to happen sometimes but it doesn't - seem to be a big deal. *) - warning - "-30"] +end +(* Warning 30 is about field labels being defined in multiple types. + It means more disambiguation has to happen sometimes but it doesn't + seem to be a big deal. *) +[@warning "-30"] type net = Helper.t @@ -1145,29 +1169,29 @@ module Keypair = struct let random net = match%map Helper.do_rpc net (module Helper.Rpcs.Generate_keypair) () with - | Ok {sk; pk; peer_id} -> + | Ok { sk; pk; peer_id } -> (let open Or_error.Let_syntax in let%bind secret = of_b64_data (`String sk) in let%map public = of_b64_data (`String pk) in - ({secret; public; peer_id= Peer.Id.unsafe_of_string peer_id} : t)) + ({ secret; public; peer_id = Peer.Id.unsafe_of_string peer_id } : t)) |> Or_error.ok_exn | Error e -> Error.tag e ~tag:"Other RPC error generateKeypair" |> Error.raise - let secret_key_base64 ({secret; _} : t) = to_b64_data secret + let secret_key_base64 ({ secret; _ } : t) = to_b64_data secret - let to_string ({secret; public; peer_id} : t) = + let to_string ({ secret; public; peer_id } : t) = String.concat ~sep:"," - [to_b64_data secret; to_b64_data public; Peer.Id.to_string peer_id] + [ to_b64_data secret; to_b64_data public; Peer.Id.to_string peer_id ] let of_string s = let parse_with_sep sep = match String.split s ~on:sep with - | [secret_b64; public_b64; peer_id] -> + | [ secret_b64; public_b64; peer_id ] -> let open Or_error.Let_syntax in let%map secret = of_b64_data (`String secret_b64) and public = of_b64_data (`String public_b64) in - ({secret; public; peer_id= Peer.Id.unsafe_of_string peer_id} : t) + ({ secret; public; peer_id = Peer.Id.unsafe_of_string peer_id } : t) | _ -> Or_error.errorf "%s is not a valid Keypair.to_string output" s in @@ -1175,7 +1199,7 @@ module Keypair = struct let with_comma = parse_with_sep ',' in if Or_error.is_error with_semicolon then with_comma else with_semicolon - let to_peer_id ({peer_id; _} : t) = peer_id + let to_peer_id ({ peer_id; _ } : t) = peer_id end module Multiaddr = struct @@ -1187,20 +1211,21 @@ module Multiaddr = struct let to_peer t = match String.split ~on:'/' t with - | [""; "ip4"; ip4_str; "tcp"; tcp_str; "p2p"; peer_id] -> ( - try - let host = Unix.Inet_addr.of_string ip4_str in - let libp2p_port = Int.of_string tcp_str in - Some (Network_peer.Peer.create host ~libp2p_port ~peer_id) - with _ -> None ) + | [ ""; "ip4"; ip4_str; "tcp"; tcp_str; "p2p"; peer_id ] -> ( + try + let host = Unix.Inet_addr.of_string ip4_str in + let libp2p_port = Int.of_string tcp_str in + Some (Network_peer.Peer.create host ~libp2p_port ~peer_id) + with _ -> None ) | _ -> None let valid_as_peer t = match String.split ~on:'/' t with - | [""; protocol; _; "tcp"; _; "p2p"; _] - when List.mem ["ip4"; "ip6"; "dns4"; "dns6"] protocol ~equal:String.equal - -> + | [ ""; protocol; _; "tcp"; _; "p2p"; _ ] + when List.mem + [ "ip4"; "ip6"; "dns4"; "dns6" ] + protocol ~equal:String.equal -> true | _ -> false @@ -1213,18 +1238,18 @@ module Multiaddr = struct else ( [%log' error (Logger.create ())] "Invalid peer $peer found in peers list" - ~metadata:[("peer", `String s)] ; - false ) ) + ~metadata:[ ("peer", `String s) ] ; + false )) end -type discovered_peer = {id: Peer.Id.t; maddrs: Multiaddr.t list} +type discovered_peer = { id : Peer.Id.t; maddrs : Multiaddr.t list } module Pubsub = struct let publish net ~topic ~data = match%map Helper.do_rpc net (module Helper.Rpcs.Publish) - {topic; data= Helper.Data.pack_data data} + { topic; data = Helper.Data.pack_data data } with | Ok "publish success" -> () @@ -1234,38 +1259,39 @@ module Pubsub = struct [%log' error net.logger] "error while publishing message on $topic: $err" ~metadata: - [("topic", `String topic); ("err", Error_json.error_to_yojson e)] + [ ("topic", `String topic); ("err", Error_json.error_to_yojson e) ] module Subscription = struct type 'a t = 'a Helper.subscription = - { net: Helper.t - ; topic: string - ; idx: int - ; mutable closed: bool - ; validator: + { net : Helper.t + ; topic : string + ; idx : int + ; mutable closed : bool + ; validator : 'a Envelope.Incoming.t -> Validation_callback.t -> unit Deferred.t - ; encode: 'a -> string - ; on_decode_failure: - [`Ignore | `Call of string Envelope.Incoming.t -> Error.t -> unit] - ; decode: string -> 'a Or_error.t - ; write_pipe: + ; encode : 'a -> string + ; on_decode_failure : + [ `Ignore | `Call of string Envelope.Incoming.t -> Error.t -> unit ] + ; decode : string -> 'a Or_error.t + ; write_pipe : ( 'a Envelope.Incoming.t , Strict_pipe.synchronous , unit Deferred.t ) Strict_pipe.Writer.t - ; read_pipe: 'a Envelope.Incoming.t Strict_pipe.Reader.t } + ; read_pipe : 'a Envelope.Incoming.t Strict_pipe.Reader.t + } - let publish {net; topic; encode; _} message = + let publish { net; topic; encode; _ } message = publish net ~topic ~data:(encode message) - let unsubscribe ({net; idx; write_pipe; _} as t) = + let unsubscribe ({ net; idx; write_pipe; _ } as t) = if not t.closed then ( t.closed <- true ; Strict_pipe.Writer.close write_pipe ; match%map Helper.do_rpc net (module Helper.Rpcs.Unsubscribe) - {subscription_idx= idx} + { subscription_idx = idx } with | Ok "unsubscribe success" -> Ok () @@ -1275,11 +1301,11 @@ module Pubsub = struct Error e ) else Deferred.Or_error.error_string "already unsubscribed" - let message_pipe {read_pipe; _} = read_pipe + let message_pipe { read_pipe; _ } = read_pipe end - let subscribe_raw (net : net) (topic : string) ~should_forward_message - ~encode ~decode ~on_decode_failure = + let subscribe_raw (net : net) (topic : string) ~should_forward_message ~encode + ~decode ~on_decode_failure = let subscription_idx = Helper.genseq net in let read_pipe, write_pipe = Strict_pipe.( @@ -1288,14 +1314,15 @@ module Pubsub = struct let sub = { Subscription.net ; topic - ; idx= subscription_idx - ; closed= false + ; idx = subscription_idx + ; closed = false ; encode ; on_decode_failure ; decode - ; validator= should_forward_message + ; validator = should_forward_message ; write_pipe - ; read_pipe } + ; read_pipe + } in (* Linear scan over all subscriptions. Should generally be small, probably not a problem. *) let already_exists_error = @@ -1304,7 +1331,7 @@ module Pubsub = struct else if String.equal data.topic topic then ( Strict_pipe.Writer.close write_pipe ; Some (Or_error.errorf "already subscribed to topic %s" topic) ) - else acc ) + else acc) in match already_exists_error with | Some err -> @@ -1318,13 +1345,12 @@ module Pubsub = struct | `Ok -> return (Ok ()) | `Duplicate -> - failwith - "fresh genseq was already present in subscription table?" + failwith "fresh genseq was already present in subscription table?" in match%map Helper.do_rpc net (module Helper.Rpcs.Subscribe) - {topic; subscription_idx} + { topic; subscription_idx } with | Ok "subscribe success" -> Ok sub @@ -1341,11 +1367,11 @@ module Pubsub = struct ~decode:(fun msg_str -> let b = Bigstring.of_string msg_str in Bigstring.read_bin_prot b bin_prot.Bin_prot.Type_class.reader - |> Or_error.map ~f:fst ) + |> Or_error.map ~f:fst) ~encode:(fun msg -> - Bin_prot.Utils.bin_dump ~header:true - bin_prot.Bin_prot.Type_class.writer msg - |> Bigstring.to_string ) + Bin_prot.Utils.bin_dump ~header:true bin_prot.Bin_prot.Type_class.writer + msg + |> Bigstring.to_string) ~should_forward_message ~on_decode_failure net topic let subscribe = @@ -1356,7 +1382,7 @@ end let me (net : Helper.t) = Ivar.read net.me_keypair let set_node_status net data = - match%map Helper.do_rpc net (module Helper.Rpcs.Set_node_status) {data} with + match%map Helper.do_rpc net (module Helper.Rpcs.Set_node_status) { data } with | Ok "setNodeStatus success" -> Ok () | Ok v -> @@ -1367,24 +1393,24 @@ let set_node_status net data = let get_peer_node_status net peer = Helper.do_rpc net (module Helper.Rpcs.Get_peer_node_status) - {peer_multiaddr= Peer.to_multiaddr_string peer} + { peer_multiaddr = Peer.to_multiaddr_string peer } let list_peers net = match%map Helper.do_rpc net (module Helper.Rpcs.List_peers) () with | Ok peers -> (* FIXME #4039: filter_map shouldn't be necessary *) - List.filter_map peers ~f:(fun {host; libp2p_port; peer_id} -> + List.filter_map peers ~f:(fun { host; libp2p_port; peer_id } -> if Int.equal libp2p_port 0 then None else Some (Peer.create (Unix.Inet_addr.of_string host) ~libp2p_port - ~peer_id:(Peer.Id.unsafe_of_string peer_id)) ) + ~peer_id:(Peer.Id.unsafe_of_string peer_id))) | Error error -> [%log' error net.logger] "Encountered $error while asking libp2p_helper for peers" - ~metadata:[("error", Error_json.error_to_yojson error)] ; + ~metadata:[ ("error", Error_json.error_to_yojson error) ] ; [] (* `on_new_peer` fires whenever a peer connects OR disconnects *) @@ -1392,30 +1418,31 @@ let configure net ~logger:_ ~me ~external_maddr ~maddrs ~network_id ~metrics_port ~on_peer_connected ~on_peer_disconnected ~unsafe_no_trust_ip ~flooding ~direct_peers ~peer_exchange ~mina_peer_exchange ~seed_peers ~initial_gating_config ~max_connections ~validation_queue_size = - net.Helper.peer_connected_callback - <- Some (fun peer_id -> on_peer_connected (Peer.Id.unsafe_of_string peer_id)) ; - net.Helper.peer_disconnected_callback - <- Some - (fun peer_id -> on_peer_disconnected (Peer.Id.unsafe_of_string peer_id)) ; + net.Helper.peer_connected_callback <- + Some (fun peer_id -> on_peer_connected (Peer.Id.unsafe_of_string peer_id)) ; + net.Helper.peer_disconnected_callback <- + Some + (fun peer_id -> on_peer_disconnected (Peer.Id.unsafe_of_string peer_id)) ; match%map Helper.do_rpc net (module Helper.Rpcs.Configure) - { privk= Keypair.secret_key_base64 me - ; statedir= net.conf_dir - ; ifaces= List.map ~f:Multiaddr.to_string maddrs - ; metrics_port= Option.value metrics_port ~default:"" - ; external_maddr= Multiaddr.to_string external_maddr + { privk = Keypair.secret_key_base64 me + ; statedir = net.conf_dir + ; ifaces = List.map ~f:Multiaddr.to_string maddrs + ; metrics_port = Option.value metrics_port ~default:"" + ; external_maddr = Multiaddr.to_string external_maddr ; network_id ; unsafe_no_trust_ip - ; flood= flooding - ; direct_peers= List.map ~f:Multiaddr.to_string direct_peers - ; seed_peers= List.map ~f:Multiaddr.to_string seed_peers + ; flood = flooding + ; direct_peers = List.map ~f:Multiaddr.to_string direct_peers + ; seed_peers = List.map ~f:Multiaddr.to_string seed_peers ; peer_exchange ; mina_peer_exchange ; max_connections ; validation_queue_size - ; gating_config= - Helper.gating_config_to_helper_format initial_gating_config } + ; gating_config = + Helper.gating_config_to_helper_format initial_gating_config + } with | Ok "configure success" -> Ivar.fill_if_empty net.me_keypair me ; @@ -1429,7 +1456,7 @@ let configure net ~logger:_ ~me ~external_maddr ~maddrs ~network_id let peers (net : net) = list_peers net let listen_on net iface = - match%map Helper.do_rpc net (module Helper.Rpcs.Listen) {iface} with + match%map Helper.do_rpc net (module Helper.Rpcs.Listen) { iface } with | Ok maddrs -> Ok maddrs | Error e -> @@ -1451,13 +1478,13 @@ let shutdown (net : net) = module Stream = struct type t = Helper.stream - let pipes ({incoming_r; outgoing_w; _} : t) = (incoming_r, outgoing_w) + let pipes ({ incoming_r; outgoing_w; _ } : t) = (incoming_r, outgoing_w) - let reset ({net; idx; _} : t) = + let reset ({ net; idx; _ } : t) = (* NOTE: do not close the pipes here. Reset_stream should end up notifying us that streamReadComplete. We can reset the stream (telling the remote peer to stop writing) and still be sending data ourselves. *) - match%map Helper.do_rpc net (module Helper.Rpcs.Reset_stream) {idx} with + match%map Helper.do_rpc net (module Helper.Rpcs.Reset_stream) { idx } with | Ok "resetStream success" -> Ok () | Ok v -> @@ -1465,15 +1492,15 @@ module Stream = struct | Error e -> Error e - let remote_peer ({peer; _} : t) = peer + let remote_peer ({ peer; _ } : t) = peer end module Protocol_handler = struct type t = Helper.protocol_handler - let handling_protocol ({protocol_name; _} : t) = protocol_name + let handling_protocol ({ protocol_name; _ } : t) = protocol_name - let is_closed ({closed; _} : t) = closed + let is_closed ({ closed; _ } : t) = closed let close_connections (net : net) for_protocol = Hashtbl.filter_inplace net.streams ~f:(fun stream -> @@ -1483,9 +1510,9 @@ module Protocol_handler = struct (* TODO: this probably needs to be more thorough than a reset. Also force the write pipe closed? *) (let%map _ = Stream.reset stream in ()) ; - false ) ) + false )) - let close ?(reset_existing_streams = false) ({net; protocol_name; _} : t) = + let close ?(reset_existing_streams = false) ({ net; protocol_name; _ } : t) = Hashtbl.remove net.protocol_handlers protocol_name ; let close_connections = if reset_existing_streams then close_connections else fun _ _ -> () @@ -1493,7 +1520,7 @@ module Protocol_handler = struct match%map Helper.do_rpc net (module Helper.Rpcs.Remove_stream_handler) - {protocol= protocol_name} + { protocol = protocol_name } with | Ok "removeStreamHandler success" -> close_connections net protocol_name @@ -1506,19 +1533,20 @@ module Protocol_handler = struct anyway: $err" ~metadata: [ ("protocol", `String protocol_name) - ; ("err", Error_json.error_to_yojson e) ] ; + ; ("err", Error_json.error_to_yojson e) + ] ; close_connections net protocol_name end let handle_protocol net ~on_handler_error ~protocol f = let ph : Protocol_handler.t = - {net; closed= false; on_handler_error; f; protocol_name= protocol} + { net; closed = false; on_handler_error; f; protocol_name = protocol } in if Hashtbl.find net.protocol_handlers protocol |> Option.is_some then Deferred.Or_error.errorf "already handling protocol %s" protocol else match%map - Helper.do_rpc net (module Helper.Rpcs.Add_stream_handler) {protocol} + Helper.do_rpc net (module Helper.Rpcs.Add_stream_handler) { protocol } with | Ok "addStreamHandler success" -> Hashtbl.add_exn net.protocol_handlers ~key:protocol ~data:ph ; @@ -1533,9 +1561,9 @@ let open_stream net ~protocol peer = Helper.( do_rpc net (module Rpcs.Open_stream) - {peer= Peer.Id.to_string peer; protocol}) + { peer = Peer.Id.to_string peer; protocol }) with - | Ok {stream_idx; peer} -> + | Ok { stream_idx; peer } -> let stream = Helper.make_stream net stream_idx protocol peer in Hashtbl.add_exn net.streams ~key:stream_idx ~data:stream ; Ok stream @@ -1547,7 +1575,7 @@ let add_peer net maddr ~seed = Helper.( do_rpc net (module Rpcs.Add_peer) - {multiaddr= Multiaddr.to_string maddr; seed}) + { multiaddr = Multiaddr.to_string maddr; seed }) with | Ok "addPeer success" -> Ok () @@ -1602,7 +1630,7 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids Hashtbl.iter outstanding_requests ~f:(fun iv -> Ivar.fill_if_empty iv (Or_error.error_string - "libp2p_helper process died before answering") ) ; + "libp2p_helper process died before answering")) ; Hashtbl.clear outstanding_requests ; if (not killed) @@ -1617,20 +1645,21 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids !"libp2p_helper process died unexpectedly: $exit_status" ~metadata: [ ( "exit_status" - , `String (Unix.Exit_or_signal.to_string_hum e) ) ] ; + , `String (Unix.Exit_or_signal.to_string_hum e) ) + ] ; Option.iter !termination_hack_ref ~f:(fun t -> - t.finished <- true ) ; + t.finished <- true) ; on_unexpected_termination () | Error err -> [%log fatal] !"Child processes library could not track libp2p_helper \ process: $err" - ~metadata:[("err", Error_json.error_to_yojson err)] ; + ~metadata:[ ("err", Error_json.error_to_yojson err) ] ; Option.iter !termination_hack_ref ~f:(fun t -> - t.finished <- true ) ; + t.finished <- true) ; let%bind () = match !termination_hack_ref with - | Some {subprocess; _} -> + | Some { subprocess; _ } -> Deferred.ignore_m (Child_processes.kill subprocess) | None -> Deferred.unit @@ -1651,8 +1680,8 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids in [%log info] !"libp2p_helper process killed successfully: $exit_status" - ~metadata:[("exit_status", exit_status)] ; - Deferred.unit )) + ~metadata:[ ("exit_status", exit_status) ] ; + Deferred.unit)) with | Error e -> Deferred.Or_error.fail @@ -1668,21 +1697,21 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids { subprocess ; conf_dir ; logger - ; banned_ips= [] - ; connection_gating= - {banned_peers= []; trusted_peers= []; isolate= false} - ; me_keypair= Ivar.create () + ; banned_ips = [] + ; connection_gating = + { banned_peers = []; trusted_peers = []; isolate = false } + ; me_keypair = Ivar.create () ; outstanding_requests - ; subscriptions= Hashtbl.create (module Int) - ; streams= Hashtbl.create (module Int) - ; all_peers_seen= - ( if all_peers_seen_metric then Some Peers_no_ids.Set.empty - else None ) - ; peer_connected_callback= None - ; peer_disconnected_callback= None - ; protocol_handlers= Hashtbl.create (module String) - ; seqno= 1 - ; finished= false } + ; subscriptions = Hashtbl.create (module Int) + ; streams = Hashtbl.create (module Int) + ; all_peers_seen = + (if all_peers_seen_metric then Some Peers_no_ids.Set.empty else None) + ; peer_connected_callback = None + ; peer_disconnected_callback = None + ; protocol_handlers = Hashtbl.create (module String) + ; seqno = 1 + ; finished = false + } in termination_hack_ref := Some t ; Strict_pipe.Reader.iter (Child_processes.stderr_lines subprocess) @@ -1697,21 +1726,22 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids with _exn -> Logger.raw logger { r with - message= + message = "(go log message was not valid for logger; see $line)" - ; metadata= String.Map.singleton "line" (`String r.message) + ; metadata = String.Map.singleton "line" (`String r.message) } ) | Error err -> [%log error] ~metadata: [ ("line", `String line) - ; ("error", Error_json.error_to_yojson err) ] + ; ("error", Error_json.error_to_yojson err) + ] "failed to parse log line $line from helper stderr as json" | Ok (Error err) -> [%log debug] - ~metadata:[("line", `String line); ("error", `String err)] + ~metadata:[ ("line", `String line); ("error", `String err) ] "failed to parse log line $line from helper stderr" ) ; - Deferred.unit ) + Deferred.unit) |> don't_wait_for ; Strict_pipe.Reader.iter (Child_processes.stdout_lines subprocess) ~f:(fun line -> @@ -1721,7 +1751,7 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids Or_error.map v ~f:(fun v -> if Yojson.Safe.equal (member "upcall" v) `Null then Helper.handle_response t v - else Helper.handle_upcall t v ) + else Helper.handle_upcall t v) with | Ok (Ok ()) -> () @@ -1729,14 +1759,16 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids [%log error] ~metadata: [ ("line", `String line) - ; ("error", Error_json.error_to_yojson err) ] + ; ("error", Error_json.error_to_yojson err) + ] "failed to parse log line $line from helper stderr as json" | Ok (Error e) -> [%log error] "handling line from helper failed! $err" ~metadata: [ ("line", `String line) - ; ("err", Error_json.error_to_yojson e) ] ) ; - Deferred.unit ) + ; ("err", Error_json.error_to_yojson e) + ] ) ; + Deferred.unit) |> don't_wait_for ; ( if all_peers_seen_metric then let log_all_peers_interval = Time.Span.of_hr 2.0 in @@ -1747,10 +1779,9 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids Set.fold_right all_peers_seen ~init:(0, 0, [], []) ~f:(fun peer (num_batches, num_in_batch, batches, batch) -> if num_in_batch >= log_message_batch_size then - (num_batches + 1, 1, batch :: batches, [peer]) + (num_batches + 1, 1, batch :: batches, [ peer ]) else - (num_batches, num_in_batch + 1, batches, peer :: batch) - ) + (num_batches, num_in_batch + 1, batches, peer :: batch)) in let num_batches, batches = if num_in_batch > 0 then (num_batches + 1, batch :: batches) @@ -1765,7 +1796,7 @@ let create ~all_peers_seen_metric ~on_unexpected_termination ~logger ~pids ; ("num_batches", `Int num_batches) ; ( "peers" , `List (List.map ~f:Peers_no_ids.to_yojson batch) ) - ] ) ) ) ) ; + ]))) ) ; Deferred.Or_error.return t let%test_module "coda network tests" = @@ -1784,32 +1815,32 @@ let%test_module "coda network tests" = let%bind c_tmp = Unix.mkdtemp "p2p_helper_test_c" in let%bind a = create ~all_peers_seen_metric:false - ~logger:(Logger.extend logger [("name", `String "a")]) + ~logger:(Logger.extend logger [ ("name", `String "a") ]) ~conf_dir:a_tmp ~pids ~on_unexpected_termination:(fun () -> - raise Child_processes.Child_died ) + raise Child_processes.Child_died) >>| Or_error.ok_exn in let%bind b = create ~all_peers_seen_metric:false - ~logger:(Logger.extend logger [("name", `String "b")]) + ~logger:(Logger.extend logger [ ("name", `String "b") ]) ~conf_dir:b_tmp ~pids ~on_unexpected_termination:(fun () -> - raise Child_processes.Child_died ) + raise Child_processes.Child_died) >>| Or_error.ok_exn in let%bind c = create ~all_peers_seen_metric:false - ~logger:(Logger.extend logger [("name", `String "c")]) + ~logger:(Logger.extend logger [ ("name", `String "c") ]) ~conf_dir:c_tmp ~pids ~on_unexpected_termination:(fun () -> - raise Child_processes.Child_died ) + raise Child_processes.Child_died) >>| Or_error.ok_exn in let%bind kp_a = Keypair.random a in let%bind kp_b = Keypair.random b in let%bind kp_c = Keypair.random c in - let maddrs = ["/ip4/127.0.0.1/tcp/0"] in + let maddrs = [ "/ip4/127.0.0.1/tcp/0" ] in let%bind () = configure a ~logger ~external_maddr:(List.hd_exn maddrs) ~me:kp_a ~maddrs ~network_id ~peer_exchange:true ~mina_peer_exchange:true @@ -1818,7 +1849,7 @@ let%test_module "coda network tests" = ~unsafe_no_trust_ip:true ~max_connections:50 ~validation_queue_size:150 ~initial_gating_config: - {trusted_peers= []; banned_peers= []; isolate= false} + { trusted_peers = []; banned_peers = []; isolate = false } >>| Or_error.ok_exn in let%bind raw_seed_peers = listening_addrs a >>| Or_error.ok_exn in @@ -1827,26 +1858,26 @@ let%test_module "coda network tests" = (List.hd_exn raw_seed_peers) (Keypair.to_peer_id kp_a) in - [%log error] ~metadata:[("peer", `String seed_peer)] "Seed_peer: $peer" ; + [%log error] ~metadata:[ ("peer", `String seed_peer) ] "Seed_peer: $peer" ; let%bind () = configure b ~logger ~external_maddr:(List.hd_exn maddrs) ~me:kp_b ~maddrs ~network_id ~peer_exchange:true ~mina_peer_exchange:true - ~direct_peers:[] ~seed_peers:[seed_peer] ~on_peer_connected:Fn.ignore - ~on_peer_disconnected:Fn.ignore ~flooding:false ~metrics_port:None - ~unsafe_no_trust_ip:true ~max_connections:50 - ~validation_queue_size:150 + ~direct_peers:[] ~seed_peers:[ seed_peer ] + ~on_peer_connected:Fn.ignore ~on_peer_disconnected:Fn.ignore + ~flooding:false ~metrics_port:None ~unsafe_no_trust_ip:true + ~max_connections:50 ~validation_queue_size:150 ~initial_gating_config: - {trusted_peers= []; banned_peers= []; isolate= false} + { trusted_peers = []; banned_peers = []; isolate = false } >>| Or_error.ok_exn and () = configure c ~logger ~external_maddr:(List.hd_exn maddrs) ~me:kp_c ~maddrs ~network_id ~peer_exchange:true ~mina_peer_exchange:true - ~direct_peers:[] ~seed_peers:[seed_peer] ~on_peer_connected:Fn.ignore - ~on_peer_disconnected:Fn.ignore ~flooding:false ~metrics_port:None - ~unsafe_no_trust_ip:true ~max_connections:50 - ~validation_queue_size:150 + ~direct_peers:[] ~seed_peers:[ seed_peer ] + ~on_peer_connected:Fn.ignore ~on_peer_disconnected:Fn.ignore + ~flooding:false ~metrics_port:None ~unsafe_no_trust_ip:true + ~max_connections:50 ~validation_queue_size:150 ~initial_gating_config: - {trusted_peers= []; banned_peers= []; isolate= false} + { trusted_peers = []; banned_peers = []; isolate = false } >>| Or_error.ok_exn in let%bind b_advert = begin_advertising b in @@ -1951,7 +1982,7 @@ let%test_module "coda network tests" = let r, w = Stream.pipes stream in let%map () = Pipe.transfer r w ~f:Fn.id in Pipe.close w ; - handler_finished := true ) + handler_finished := true) |> Deferred.Or_error.ok_exn in let%bind stream = diff --git a/src/lib/mina_net2/mina_net2.mli b/src/lib/mina_net2/mina_net2.mli index ca3794bbcd8..fbd297eff15 100644 --- a/src/lib/mina_net2/mina_net2.mli +++ b/src/lib/mina_net2/mina_net2.mli @@ -54,7 +54,7 @@ open Network_peer type net module Validation_callback : sig - type validation_result = [`Accept | `Reject | `Ignore] [@@deriving equal] + type validation_result = [ `Accept | `Reject | `Ignore ] [@@deriving equal] type t @@ -130,7 +130,7 @@ module Multiaddr : sig val of_file_contents : contents:string -> t list end -type discovered_peer = {id: Peer.Id.t; maddrs: Multiaddr.t list} +type discovered_peer = { id : Peer.Id.t; maddrs : Multiaddr.t list } module Pubsub : sig (** A subscription to a pubsub topic. *) @@ -176,9 +176,10 @@ module Pubsub : sig val subscribe : net -> string - -> should_forward_message:( string Envelope.Incoming.t - -> Validation_callback.t - -> unit Deferred.t) + -> should_forward_message: + ( string Envelope.Incoming.t + -> Validation_callback.t + -> unit Deferred.t) -> string Subscription.t Deferred.Or_error.t (** Like [subscribe], but knows how to stringify/destringify @@ -195,13 +196,11 @@ module Pubsub : sig val subscribe_encode : net -> string - -> should_forward_message:( 'a Envelope.Incoming.t - -> Validation_callback.t - -> unit Deferred.t) + -> should_forward_message: + ('a Envelope.Incoming.t -> Validation_callback.t -> unit Deferred.t) -> bin_prot:'a Bin_prot.Type_class.t - -> on_decode_failure:[ `Ignore - | `Call of - string Envelope.Incoming.t -> Error.t -> unit ] + -> on_decode_failure: + [ `Ignore | `Call of string Envelope.Incoming.t -> Error.t -> unit ] -> 'a Subscription.t Deferred.Or_error.t end @@ -223,7 +222,7 @@ val create : or peer IDs in [banned_peers], except for those listed in [trusted_peers]. If [isolate] is true, only connections to [trusted_peers] are allowed. *) type connection_gating = - {banned_peers: Peer.t list; trusted_peers: Peer.t list; isolate: bool} + { banned_peers : Peer.t list; trusted_peers : Peer.t list; isolate : bool } (** Configure the network connection. * @@ -351,7 +350,7 @@ val open_stream : *) val handle_protocol : net - -> on_handler_error:[`Raise | `Ignore | `Call of Stream.t -> exn -> unit] + -> on_handler_error:[ `Raise | `Ignore | `Call of Stream.t -> exn -> unit ] -> protocol:string -> (Stream.t -> unit Deferred.t) -> Protocol_handler.t Deferred.Or_error.t diff --git a/src/lib/mina_networking/mina_networking.ml b/src/lib/mina_networking/mina_networking.ml index 97a203ac17c..898d08033c6 100644 --- a/src/lib/mina_networking/mina_networking.ml +++ b/src/lib/mina_networking/mina_networking.ml @@ -12,36 +12,40 @@ let refused_answer_query_string = "Refused to answer_query" exception No_initial_peers type Structured_log_events.t += - | Block_received of {state_hash: State_hash.t; sender: Envelope.Sender.t} - [@@deriving register_event {msg= "Received a block from $sender"}] + | Block_received of { state_hash : State_hash.t; sender : Envelope.Sender.t } + [@@deriving register_event { msg = "Received a block from $sender" }] type Structured_log_events.t += | Snark_work_received of - { work: Snark_pool.Resource_pool.Diff.compact - ; sender: Envelope.Sender.t } + { work : Snark_pool.Resource_pool.Diff.compact + ; sender : Envelope.Sender.t + } [@@deriving - register_event {msg= "Received Snark-pool diff $work from $sender"}] + register_event { msg = "Received Snark-pool diff $work from $sender" }] type Structured_log_events.t += | Transactions_received of - { txns: Transaction_pool.Resource_pool.Diff.t - ; sender: Envelope.Sender.t } + { txns : Transaction_pool.Resource_pool.Diff.t + ; sender : Envelope.Sender.t + } [@@deriving - register_event {msg= "Received transaction-pool diff $txns from $sender"}] + register_event { msg = "Received transaction-pool diff $txns from $sender" }] -type Structured_log_events.t += Gossip_new_state of {state_hash: State_hash.t} - [@@deriving register_event {msg= "Broadcasting new state over gossip net"}] +type Structured_log_events.t += + | Gossip_new_state of { state_hash : State_hash.t } + [@@deriving register_event { msg = "Broadcasting new state over gossip net" }] type Structured_log_events.t += | Gossip_transaction_pool_diff of - { txns: Transaction_pool.Resource_pool.Diff.t } + { txns : Transaction_pool.Resource_pool.Diff.t } [@@deriving - register_event {msg= "Broadcasting transaction pool diff over gossip net"}] + register_event + { msg = "Broadcasting transaction pool diff over gossip net" }] type Structured_log_events.t += - | Gossip_snark_pool_diff of {work: Snark_pool.Resource_pool.Diff.compact} + | Gossip_snark_pool_diff of { work : Snark_pool.Resource_pool.Diff.compact } [@@deriving - register_event {msg= "Broadcasting snark pool diff over gossip net"}] + register_event { msg = "Broadcasting snark pool diff over gossip net" }] (* INSTRUCTIONS FOR ADDING A NEW RPC: * - define a new module under the Rpcs module @@ -50,7 +54,7 @@ type Structured_log_events.t += * - add a pattern matching case to Rpcs.implementation_of_rpc mapping the * new constructor to the new module for your RPC * - add a match case to `match_handler`, below -*) + *) module Rpcs = struct (* for versioning of the types here, see @@ -88,10 +92,10 @@ module Rpcs = struct module V1 = struct module T = struct - type query = unit [@@deriving bin_io, version {rpc}] + type query = unit [@@deriving bin_io, version { rpc }] type response = Network_peer.Peer.Stable.V1.t list - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -103,7 +107,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -144,7 +149,7 @@ module Rpcs = struct module V1 = struct module T = struct - type query = State_hash.Stable.V1.t [@@deriving bin_io, version {rpc}] + type query = State_hash.Stable.V1.t [@@deriving bin_io, version { rpc }] type response = ( Staged_ledger.Scan_state.Stable.V1.t @@ -152,7 +157,7 @@ module Rpcs = struct * Pending_coinbase.Stable.V1.t * Mina_state.Protocol_state.Value.Stable.V1.t list ) option - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -164,7 +169,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -201,11 +207,10 @@ module Rpcs = struct module V1 = struct module T = struct type query = Ledger_hash.Stable.V1.t * Sync_ledger.Query.Stable.V1.t - [@@deriving bin_io, sexp, version {rpc}] + [@@deriving bin_io, sexp, version { rpc }] - type response = - Sync_ledger.Answer.Stable.V1.t Core.Or_error.Stable.V1.t - [@@deriving bin_io, sexp, version {rpc}] + type response = Sync_ledger.Answer.Stable.V1.t Core.Or_error.Stable.V1.t + [@@deriving bin_io, sexp, version { rpc }] let query_of_caller_model = Fn.id @@ -217,7 +222,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -254,10 +260,10 @@ module Rpcs = struct module V1 = struct module T = struct type query = State_hash.Stable.V1.t list - [@@deriving bin_io, sexp, version {rpc}] + [@@deriving bin_io, sexp, version { rpc }] type response = External_transition.Stable.V1.t list option - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -269,7 +275,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -306,11 +313,11 @@ module Rpcs = struct module V1 = struct module T = struct type query = State_hash.Stable.V1.t - [@@deriving bin_io, sexp, version {rpc}] + [@@deriving bin_io, sexp, version { rpc }] type response = (State_hash.Stable.V1.t * State_body_hash.Stable.V1.t list) option - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -322,7 +329,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -358,10 +366,10 @@ module Rpcs = struct module V1 = struct module T = struct - type query = unit [@@deriving bin_io, sexp, version {rpc}] + type query = unit [@@deriving bin_io, sexp, version { rpc }] type response = State_hash.Stable.V1.t list - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -373,7 +381,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -420,7 +429,7 @@ module Rpcs = struct ( Consensus.Data.Consensus_state.Value.Stable.V1.t , State_hash.Stable.V1.t ) With_hash.Stable.V1.t - [@@deriving bin_io, sexp, version {rpc}] + [@@deriving bin_io, sexp, version { rpc }] type response = ( External_transition.Stable.V1.t @@ -428,7 +437,7 @@ module Rpcs = struct ) Proof_carrying_data.Stable.V1.t option - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -440,7 +449,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -478,9 +488,9 @@ module Rpcs = struct module V1 = struct module T = struct type query = Core.Time.Stable.V1.t - [@@deriving bin_io, sexp, version {rpc}] + [@@deriving bin_io, sexp, version { rpc }] - type response = unit [@@deriving bin_io, version {rpc}] + type response = unit [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -492,7 +502,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -532,7 +543,7 @@ module Rpcs = struct module V1 = struct module T = struct - type query = unit [@@deriving bin_io, sexp, version {rpc}] + type query = unit [@@deriving bin_io, sexp, version { rpc }] type response = ( External_transition.Stable.V1.t @@ -540,7 +551,7 @@ module Rpcs = struct ) Proof_carrying_data.Stable.V1.t option - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -552,7 +563,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -569,7 +581,7 @@ module Rpcs = struct module Stable = struct module V2 = struct type t = - { node_ip_addr: Core.Unix.Inet_addr.Stable.V1.t + { node_ip_addr : Core.Unix.Inet_addr.Stable.V1.t [@to_yojson fun ip_addr -> `String (Unix.Inet_addr.to_string ip_addr)] [@of_yojson @@ -578,24 +590,25 @@ module Rpcs = struct Ok (Unix.Inet_addr.of_string s) | _ -> Error "expected string"] - ; node_peer_id: Network_peer.Peer.Id.Stable.V1.t + ; node_peer_id : Network_peer.Peer.Id.Stable.V1.t [@to_yojson fun peer_id -> `String peer_id] [@of_yojson function `String s -> Ok s | _ -> Error "expected string"] - ; sync_status: Sync_status.Stable.V1.t - ; peers: Network_peer.Peer.Stable.V1.t list - ; block_producers: + ; sync_status : Sync_status.Stable.V1.t + ; peers : Network_peer.Peer.Stable.V1.t list + ; block_producers : Signature_lib.Public_key.Compressed.Stable.V1.t list - ; protocol_state_hash: State_hash.Stable.V1.t - ; ban_statuses: + ; protocol_state_hash : State_hash.Stable.V1.t + ; ban_statuses : ( Network_peer.Peer.Stable.V1.t * Trust_system.Peer_status.Stable.V1.t ) list - ; k_block_hashes_and_timestamps: + ; k_block_hashes_and_timestamps : (State_hash.Stable.V1.t * string) list - ; git_commit: string - ; uptime_minutes: int - ; block_height_opt: int option [@default None]} + ; git_commit : string + ; uptime_minutes : int + ; block_height_opt : int option [@default None] + } [@@deriving to_yojson, of_yojson] let to_latest = Fn.id @@ -603,7 +616,7 @@ module Rpcs = struct module V1 = struct type t = - { node_ip_addr: Core.Unix.Inet_addr.Stable.V1.t + { node_ip_addr : Core.Unix.Inet_addr.Stable.V1.t [@to_yojson fun ip_addr -> `String (Unix.Inet_addr.to_string ip_addr)] [@of_yojson @@ -612,38 +625,40 @@ module Rpcs = struct Ok (Unix.Inet_addr.of_string s) | _ -> Error "expected string"] - ; node_peer_id: Network_peer.Peer.Id.Stable.V1.t + ; node_peer_id : Network_peer.Peer.Id.Stable.V1.t [@to_yojson fun peer_id -> `String peer_id] [@of_yojson function `String s -> Ok s | _ -> Error "expected string"] - ; sync_status: Sync_status.Stable.V1.t - ; peers: Network_peer.Peer.Stable.V1.t list - ; block_producers: + ; sync_status : Sync_status.Stable.V1.t + ; peers : Network_peer.Peer.Stable.V1.t list + ; block_producers : Signature_lib.Public_key.Compressed.Stable.V1.t list - ; protocol_state_hash: State_hash.Stable.V1.t - ; ban_statuses: + ; protocol_state_hash : State_hash.Stable.V1.t + ; ban_statuses : ( Network_peer.Peer.Stable.V1.t * Trust_system.Peer_status.Stable.V1.t ) list - ; k_block_hashes_and_timestamps: + ; k_block_hashes_and_timestamps : (State_hash.Stable.V1.t * string) list - ; git_commit: string - ; uptime_minutes: int } + ; git_commit : string + ; uptime_minutes : int + } [@@deriving to_yojson, of_yojson] let to_latest status : Latest.t = - { node_ip_addr= status.node_ip_addr - ; node_peer_id= status.node_peer_id - ; sync_status= status.sync_status - ; peers= status.peers - ; block_producers= status.block_producers - ; protocol_state_hash= status.protocol_state_hash - ; ban_statuses= status.ban_statuses - ; k_block_hashes_and_timestamps= + { node_ip_addr = status.node_ip_addr + ; node_peer_id = status.node_peer_id + ; sync_status = status.sync_status + ; peers = status.peers + ; block_producers = status.block_producers + ; protocol_state_hash = status.protocol_state_hash + ; ban_statuses = status.ban_statuses + ; k_block_hashes_and_timestamps = status.k_block_hashes_and_timestamps - ; git_commit= status.git_commit - ; uptime_minutes= status.uptime_minutes - ; block_height_opt= None } + ; git_commit = status.git_commit + ; uptime_minutes = status.uptime_minutes + ; block_height_opt = None + } end end] end @@ -670,7 +685,7 @@ module Rpcs = struct | Ok status -> Node_status.Stable.Latest.to_yojson status | Error err -> - `Assoc [("error", Error_json.error_to_yojson err)] + `Assoc [ ("error", Error_json.error_to_yojson err) ] include Perf_histograms.Rpc.Plain.Extend (struct include M @@ -679,11 +694,10 @@ module Rpcs = struct module V2 = struct module T = struct - type query = unit [@@deriving bin_io, sexp, version {rpc}] + type query = unit [@@deriving bin_io, sexp, version { rpc }] - type response = - Node_status.Stable.V2.t Core_kernel.Or_error.Stable.V1.t - [@@deriving bin_io, version {rpc}] + type response = Node_status.Stable.V2.t Core_kernel.Or_error.Stable.V1.t + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -695,7 +709,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -707,11 +722,10 @@ module Rpcs = struct module V1 = struct module T = struct - type query = unit [@@deriving bin_io, sexp, version {rpc}] + type query = unit [@@deriving bin_io, sexp, version { rpc }] - type response = - Node_status.Stable.V1.t Core_kernel.Or_error.Stable.V1.t - [@@deriving bin_io, version {rpc}] + type response = Node_status.Stable.V1.t Core_kernel.Or_error.Stable.V1.t + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -722,17 +736,18 @@ module Rpcs = struct Error err | Ok (status : Node_status.Stable.Latest.t) -> Ok - { Node_status.Stable.V1.node_ip_addr= status.node_ip_addr - ; node_peer_id= status.node_peer_id - ; sync_status= status.sync_status - ; peers= status.peers - ; block_producers= status.block_producers - ; protocol_state_hash= status.protocol_state_hash - ; ban_statuses= status.ban_statuses - ; k_block_hashes_and_timestamps= + { Node_status.Stable.V1.node_ip_addr = status.node_ip_addr + ; node_peer_id = status.node_peer_id + ; sync_status = status.sync_status + ; peers = status.peers + ; block_producers = status.block_producers + ; protocol_state_hash = status.protocol_state_hash + ; ban_statuses = status.ban_statuses + ; k_block_hashes_and_timestamps = status.k_block_hashes_and_timestamps - ; git_commit= status.git_commit - ; uptime_minutes= status.uptime_minutes } + ; git_commit = status.git_commit + ; uptime_minutes = status.uptime_minutes + } let caller_model_of_response = function | Error err -> @@ -742,7 +757,8 @@ module Rpcs = struct end module T' = - Perf_histograms.Rpc.Plain.Decorate_bin_io (struct + Perf_histograms.Rpc.Plain.Decorate_bin_io + (struct include M include Master end) @@ -781,14 +797,15 @@ module Rpcs = struct type rpc_handler = | Rpc_handler : - { rpc: ('q, 'r) rpc - ; f: ('q, 'r) Rpc_intf.rpc_fn - ; cost: 'q -> int - ; budget: int * [`Per of Time.Span.t] } + { rpc : ('q, 'r) rpc + ; f : ('q, 'r) Rpc_intf.rpc_fn + ; cost : 'q -> int + ; budget : int * [ `Per of Time.Span.t ] + } -> rpc_handler - let implementation_of_rpc : type q r. - (q, r) rpc -> (q, r) Rpc_intf.rpc_implementation = function + let implementation_of_rpc : + type q r. (q, r) rpc -> (q, r) Rpc_intf.rpc_implementation = function | Get_some_initial_peers -> (module Get_some_initial_peers) | Get_staged_ledger_aux_and_pending_coinbases_at_hash -> @@ -810,12 +827,13 @@ module Rpcs = struct | Consensus_rpc rpc -> Consensus.Hooks.Rpcs.implementation_of_rpc rpc - let match_handler : type q r. + let match_handler : + type q r. rpc_handler -> (q, r) rpc -> do_:((q, r) Rpc_intf.rpc_fn -> 'a) -> 'a option = - fun (Rpc_handler {rpc= impl_rpc; f; cost; budget}) rpc ~do_ -> + fun (Rpc_handler { rpc = impl_rpc; f; cost; budget }) rpc ~do_ -> match (rpc, impl_rpc) with | Get_some_initial_peers, Get_some_initial_peers -> Some (do_ f) @@ -836,7 +854,7 @@ module Rpcs = struct Some (do_ f) | Consensus_rpc rpc_a, Consensus_rpc rpc_b -> Consensus.Hooks.Rpcs.match_handler - (Rpc_handler {rpc= rpc_b; f; cost; budget}) + (Rpc_handler { rpc = rpc_b; f; cost; budget }) rpc_a ~do_ (* TODO: Why is there a catch-all here? *) | _ -> @@ -847,45 +865,47 @@ module Gossip_net = Gossip_net.Make (Rpcs) module Config = struct type log_gossip_heard = - {snark_pool_diff: bool; transaction_pool_diff: bool; new_state: bool} + { snark_pool_diff : bool; transaction_pool_diff : bool; new_state : bool } [@@deriving make] type t = - { logger: Logger.t - ; trust_system: Trust_system.t - ; time_controller: Block_time.Controller.t - ; consensus_local_state: Consensus.Data.Local_state.t - ; genesis_ledger_hash: Ledger_hash.t - ; constraint_constants: Genesis_constants.Constraint_constants.t - ; creatable_gossip_net: Gossip_net.Any.creatable - ; is_seed: bool - ; log_gossip_heard: log_gossip_heard } + { logger : Logger.t + ; trust_system : Trust_system.t + ; time_controller : Block_time.Controller.t + ; consensus_local_state : Consensus.Data.Local_state.t + ; genesis_ledger_hash : Ledger_hash.t + ; constraint_constants : Genesis_constants.Constraint_constants.t + ; creatable_gossip_net : Gossip_net.Any.creatable + ; is_seed : bool + ; log_gossip_heard : log_gossip_heard + } [@@deriving make] end type t = - { logger: Logger.t - ; trust_system: Trust_system.t - ; gossip_net: Gossip_net.Any.t - ; states: + { logger : Logger.t + ; trust_system : Trust_system.t + ; gossip_net : Gossip_net.Any.t + ; states : ( External_transition.t Envelope.Incoming.t * Block_time.t * Mina_net2.Validation_callback.t ) Strict_pipe.Reader.t - ; transaction_pool_diffs: + ; transaction_pool_diffs : ( Transaction_pool.Resource_pool.Diff.t Envelope.Incoming.t * Mina_net2.Validation_callback.t ) Strict_pipe.Reader.t - ; snark_pool_diffs: + ; snark_pool_diffs : ( Snark_pool.Resource_pool.Diff.t Envelope.Incoming.t * Mina_net2.Validation_callback.t ) Strict_pipe.Reader.t - ; online_status: [`Offline | `Online] Broadcast_pipe.Reader.t - ; first_received_message_signal: unit Ivar.t } + ; online_status : [ `Offline | `Online ] Broadcast_pipe.Reader.t + ; first_received_message_signal : unit Ivar.t + } [@@deriving fields] let offline_time - {Genesis_constants.Constraint_constants.block_window_duration_ms; _} = + { Genesis_constants.Constraint_constants.block_window_duration_ms; _ } = (* This is a bit of a hack, see #3232. *) let inactivity_ms = block_window_duration_ms * 8 in Block_time.Span.of_ms @@ Int64.of_int inactivity_ms @@ -894,10 +914,9 @@ let setup_timer ~constraint_constants time_controller sync_state_broadcaster = Block_time.Timeout.create time_controller (offline_time constraint_constants) ~f:(fun _ -> Broadcast_pipe.Writer.write sync_state_broadcaster `Offline - |> don't_wait_for ) + |> don't_wait_for) -let online_broadcaster ~constraint_constants time_controller received_messages - = +let online_broadcaster ~constraint_constants time_controller received_messages = let online_reader, online_writer = Broadcast_pipe.create `Offline in let init = Block_time.Timeout.create time_controller @@ -907,7 +926,7 @@ let online_broadcaster ~constraint_constants time_controller received_messages Strict_pipe.Reader.fold received_messages ~init ~f:(fun old_timeout _ -> let%map () = Broadcast_pipe.Writer.write online_writer `Online in Block_time.Timeout.cancel time_controller old_timeout () ; - setup_timer ~constraint_constants time_controller online_writer ) + setup_timer ~constraint_constants time_controller online_writer) |> Deferred.ignore_m |> don't_wait_for ; online_reader @@ -968,7 +987,7 @@ let create (config : Config.t) in let validate_protocol_versions ~rpc_name sender external_transition = let open Trust_system.Actions in - let External_transition.{valid_current; valid_next; matches_daemon} = + let External_transition.{ valid_current; valid_next; matches_daemon } = External_transition.protocol_version_status external_transition in let%bind () = @@ -984,7 +1003,8 @@ let create (config : Config.t) , `String (Protocol_version.to_string (External_transition.current_protocol_version - external_transition)) ) ] ) ) + external_transition)) ) + ] ) ) in Trust_system.record_envelope_sender config.trust_system config.logger sender actions @@ -995,15 +1015,16 @@ let create (config : Config.t) let actions = ( Sent_invalid_protocol_version , Some - ( "$rpc_name: external transition with invalid proposed \ - protocol version" + ( "$rpc_name: external transition with invalid proposed protocol \ + version" , [ ("rpc_name", `String rpc_name) ; ( "proposed_protocol_version" , `String (Protocol_version.to_string (Option.value_exn (External_transition.proposed_protocol_version_opt - external_transition))) ) ] ) ) + external_transition))) ) + ] ) ) in Trust_system.record_envelope_sender config.trust_system config.logger sender actions @@ -1023,8 +1044,8 @@ let create (config : Config.t) (External_transition.current_protocol_version external_transition)) ) ; ( "daemon_current_protocol_version" - , `String Protocol_version.(to_string @@ get_current ()) ) ] - ) ) + , `String Protocol_version.(to_string @@ get_current ()) ) + ] ) ) in Trust_system.record_envelope_sender config.trust_system config.logger sender actions @@ -1036,7 +1057,7 @@ let create (config : Config.t) let get_staged_ledger_aux_and_pending_coinbases_at_hash_rpc conn ~version:_ hash = let action_msg = "Staged ledger and pending coinbases at hash: $hash" in - let msg_args = [("hash", State_hash.to_yojson hash)] in + let msg_args = [ ("hash", State_hash.to_yojson hash) ] in let%bind result, sender = run_for_rpc_result conn hash ~f:get_staged_ledger_aux_and_pending_coinbases_at_hash action_msg @@ -1049,7 +1070,7 @@ let create (config : Config.t) let%bind result, sender = run_for_rpc_result conn sync_query ~f:answer_sync_ledger_query "Answer_sync_ledger_query: $query" - [("query", Sync_ledger.Query.to_yojson query)] + [ ("query", Sync_ledger.Query.to_yojson query) ] in let%bind () = match result with @@ -1070,23 +1091,24 @@ let create (config : Config.t) ; ( "query" , Syncable_ledger.Query.to_yojson Ledger.Addr.to_yojson query ) - ; ("error", Error_json.error_to_yojson err) ] ) )) + ; ("error", Error_json.error_to_yojson err) + ] ) )) else return () in return result in - let md p = [("peer", Peer.to_yojson p)] in + let md p = [ ("peer", Peer.to_yojson p) ] in let get_ancestry_rpc conn ~version:_ query = [%log debug] "Sending root proof to $peer" ~metadata:(md conn) ; let action_msg = "Get_ancestry query: $query" in - let msg_args = [("query", Rpcs.Get_ancestry.query_to_yojson query)] in + let msg_args = [ ("query", Rpcs.Get_ancestry.query_to_yojson query) ] in let%bind result, sender = run_for_rpc_result conn query ~f:get_ancestry action_msg msg_args in match result with | None -> record_unknown_item result sender action_msg msg_args - | Some {proof= _, ext_trans; _} -> + | Some { proof = _, ext_trans; _ } -> let%map valid_protocol_versions = validate_protocol_versions ~rpc_name:"Get_ancestry" sender ext_trans in @@ -1095,7 +1117,7 @@ let create (config : Config.t) let get_some_initial_peers_rpc (conn : Peer.t) ~version:_ () = [%log trace] "Sending some initial peers to $peer" ~metadata:(md conn) ; let action_msg = "Get_some_initial_peers query: $query" in - let msg_args = [("query", `Assoc [])] in + let msg_args = [ ("query", `Assoc []) ] in let%map result, _sender = run_for_rpc_result conn () ~f:get_some_initial_peers action_msg msg_args in @@ -1104,14 +1126,14 @@ let create (config : Config.t) let get_best_tip_rpc conn ~version:_ (() : unit) = [%log debug] "Sending best_tip to $peer" ~metadata:(md conn) ; let action_msg = "Get_best_tip. query: $query" in - let msg_args = [("query", Rpcs.Get_best_tip.query_to_yojson ())] in + let msg_args = [ ("query", Rpcs.Get_best_tip.query_to_yojson ()) ] in let%bind result, sender = run_for_rpc_result conn () ~f:get_best_tip action_msg msg_args in match result with | None -> record_unknown_item result sender action_msg msg_args - | Some {data= data_ext_trans; proof= _, proof_ext_trans} -> + | Some { data = data_ext_trans; proof = _, proof_ext_trans } -> let%bind valid_data_protocol_versions = validate_protocol_versions ~rpc_name:"Get_best_tip (data)" sender data_ext_trans @@ -1128,7 +1150,7 @@ let create (config : Config.t) [%log info] "Sending transition_chain_proof to $peer" ~metadata:(md conn) ; let action_msg = "Get_transition_chain_proof query: $query" in let msg_args = - [("query", Rpcs.Get_transition_chain_proof.query_to_yojson query)] + [ ("query", Rpcs.Get_transition_chain_proof.query_to_yojson query) ] in let%bind result, sender = run_for_rpc_result conn query ~f:get_transition_chain_proof action_msg @@ -1140,7 +1162,7 @@ let create (config : Config.t) [%log info] "Sending transition_knowledge to $peer" ~metadata:(md conn) ; let action_msg = "Get_transition_knowledge query: $query" in let msg_args = - [("query", Rpcs.Get_transition_knowledge.query_to_yojson query)] + [ ("query", Rpcs.Get_transition_knowledge.query_to_yojson query) ] in run_for_rpc_result conn query ~f:get_transition_knowledge action_msg msg_args @@ -1150,7 +1172,7 @@ let create (config : Config.t) [%log info] "Sending transition_chain to $peer" ~metadata:(md conn) ; let action_msg = "Get_transition_chain query: $query" in let msg_args = - [("query", Rpcs.Get_transition_chain.query_to_yojson query)] + [ ("query", Rpcs.Get_transition_chain.query_to_yojson query) ] in let%bind result, sender = run_for_rpc_result conn query ~f:get_transition_chain action_msg msg_args @@ -1165,8 +1187,7 @@ let create (config : Config.t) (validate_protocol_versions ~rpc_name:"Get_transition_chain" sender) in - if List.for_all valid_protocol_versions ~f:(Bool.equal true) then - result + if List.for_all valid_protocol_versions ~f:(Bool.equal true) then result else None in let ban_notify_rpc conn ~version:_ ban_until = @@ -1175,7 +1196,8 @@ let create (config : Config.t) ~metadata: [ ("peer", Peer.to_yojson conn) ; ( "ban_until" - , `String (Time.to_string_abs ~zone:Time.Zone.utc ban_until) ) ] ; + , `String (Time.to_string_abs ~zone:Time.Zone.utc ban_until) ) + ] ; (* no computation to do; we're just getting notification *) Deferred.unit in @@ -1184,60 +1206,69 @@ let create (config : Config.t) let open Time.Span in let unit _ = 1 in [ Rpc_handler - { rpc= Get_some_initial_peers - ; f= get_some_initial_peers_rpc - ; budget= (1, `Per minute) - ; cost= unit } + { rpc = Get_some_initial_peers + ; f = get_some_initial_peers_rpc + ; budget = (1, `Per minute) + ; cost = unit + } ; Rpc_handler - { rpc= Get_staged_ledger_aux_and_pending_coinbases_at_hash - ; f= get_staged_ledger_aux_and_pending_coinbases_at_hash_rpc - ; budget= (4, `Per minute) - ; cost= unit } + { rpc = Get_staged_ledger_aux_and_pending_coinbases_at_hash + ; f = get_staged_ledger_aux_and_pending_coinbases_at_hash_rpc + ; budget = (4, `Per minute) + ; cost = unit + } ; Rpc_handler - { rpc= Answer_sync_ledger_query - ; f= answer_sync_ledger_query_rpc - ; budget= - (Int.pow 2 17, `Per minute) - (* Not that confident about this one. *) - ; cost= unit } + { rpc = Answer_sync_ledger_query + ; f = answer_sync_ledger_query_rpc + ; budget = + (Int.pow 2 17, `Per minute) (* Not that confident about this one. *) + ; cost = unit + } ; Rpc_handler - { rpc= Get_best_tip - ; f= get_best_tip_rpc - ; budget= (3, `Per minute) - ; cost= unit } + { rpc = Get_best_tip + ; f = get_best_tip_rpc + ; budget = (3, `Per minute) + ; cost = unit + } ; Rpc_handler - { rpc= Get_ancestry - ; f= get_ancestry_rpc - ; budget= (5, `Per minute) - ; cost= unit } + { rpc = Get_ancestry + ; f = get_ancestry_rpc + ; budget = (5, `Per minute) + ; cost = unit + } ; Rpc_handler - { rpc= Get_transition_knowledge - ; f= get_transition_knowledge_rpc - ; budget= (1, `Per minute) - ; cost= unit } + { rpc = Get_transition_knowledge + ; f = get_transition_knowledge_rpc + ; budget = (1, `Per minute) + ; cost = unit + } ; Rpc_handler - { rpc= Get_transition_chain - ; f= get_transition_chain_rpc - ; budget= (1, `Per second) (* Not that confident about this one. *) - ; cost= (fun x -> Int.max 1 (List.length x)) } + { rpc = Get_transition_chain + ; f = get_transition_chain_rpc + ; budget = (1, `Per second) (* Not that confident about this one. *) + ; cost = (fun x -> Int.max 1 (List.length x)) + } ; Rpc_handler - { rpc= Get_transition_chain_proof - ; f= get_transition_chain_proof_rpc - ; budget= (3, `Per minute) - ; cost= unit } + { rpc = Get_transition_chain_proof + ; f = get_transition_chain_proof_rpc + ; budget = (3, `Per minute) + ; cost = unit + } ; Rpc_handler - { rpc= Ban_notify - ; f= ban_notify_rpc - ; budget= (1, `Per minute) - ; cost= unit } ] + { rpc = Ban_notify + ; f = ban_notify_rpc + ; budget = (1, `Per minute) + ; cost = unit + } + ] @ Consensus.Hooks.Rpcs.( List.map (rpc_handlers ~logger:config.logger ~local_state:config.consensus_local_state ~genesis_ledger_hash: (Frozen_ledger_hash.of_ledger_hash config.genesis_ledger_hash)) - ~f:(fun (Rpc_handler {rpc; f; cost; budget}) -> - Rpcs.(Rpc_handler {rpc= Consensus_rpc rpc; f; cost; budget}) )) + ~f:(fun (Rpc_handler { rpc; f; cost; budget }) -> + Rpcs.(Rpc_handler { rpc = Consensus_rpc rpc; f; cost; budget }))) in let%map gossip_net = Gossip_net.Any.create config.creatable_gossip_net rpc_handlers @@ -1248,7 +1279,7 @@ let create (config : Config.t) let fake_time = Time.now () in Clock.every' (Time.Span.of_min 1.) (fun () -> match%bind - get_node_status {data= (); sender= Local; received_at= fake_time} + get_node_status { data = (); sender = Local; received_at = fake_time } with | Error _ -> Deferred.unit @@ -1256,7 +1287,7 @@ let create (config : Config.t) Gossip_net.Any.set_node_status gossip_net ( Rpcs.Get_node_status.Node_status.to_yojson data |> Yojson.Safe.to_string ) - >>| ignore ) ; + >>| ignore) ; don't_wait_for (Gossip_net.Any.on_first_connect gossip_net ~f:(fun () -> (* After first_connect this list will only be empty if we filtered out all the peers due to mismatched chain id. *) @@ -1266,7 +1297,7 @@ let create (config : Config.t) [%log fatal] "Failed to connect to any initial peers, possible chain id \ mismatch" ; - raise No_initial_peers )) )) ; + raise No_initial_peers )))) ; (* TODO: Think about buffering: I.e., what do we do when too many messages are coming in, or going out. For example, some things you really want to not drop (like your outgoing @@ -1297,10 +1328,12 @@ let create (config : Config.t) if config.log_gossip_heard.new_state then [%str_log info] ~metadata: - [("external_transition", External_transition.to_yojson state)] + [ ("external_transition", External_transition.to_yojson state) + ] (Block_received - { state_hash= External_transition.state_hash state - ; sender= Envelope.Incoming.sender envelope }) ; + { state_hash = External_transition.state_hash state + ; sender = Envelope.Incoming.sender envelope + }) ; `Fst ( Envelope.Incoming.map envelope ~f:(fun _ -> state) , Block_time.now config.time_controller @@ -1311,7 +1344,7 @@ let create (config : Config.t) ~f:(fun work -> [%str_log debug] (Snark_work_received - {work; sender= Envelope.Incoming.sender envelope}) ) ; + { work; sender = Envelope.Incoming.sender envelope })) ; Mina_metrics.( Counter.inc_one Snark_work.completed_snark_work_received_gossip) ; `Snd (Envelope.Incoming.map envelope ~f:(fun _ -> diff), valid_cb) @@ -1319,24 +1352,24 @@ let create (config : Config.t) if config.log_gossip_heard.transaction_pool_diff then [%str_log debug] (Transactions_received - {txns= diff; sender= Envelope.Incoming.sender envelope}) ; - `Trd (Envelope.Incoming.map envelope ~f:(fun _ -> diff), valid_cb) - ) + { txns = diff; sender = Envelope.Incoming.sender envelope }) ; + `Trd (Envelope.Incoming.map envelope ~f:(fun _ -> diff), valid_cb)) in { gossip_net - ; logger= config.logger - ; trust_system= config.trust_system + ; logger = config.logger + ; trust_system = config.trust_system ; states ; snark_pool_diffs ; transaction_pool_diffs ; online_status - ; first_received_message_signal } + ; first_received_message_signal + } (* lift and expose select gossip net functions *) include struct open Gossip_net.Any - let lift f {gossip_net; _} = f gossip_net + let lift f { gossip_net; _ } = f gossip_net let peers = lift peers @@ -1345,13 +1378,12 @@ include struct let%bind s = get_peer_node_status t.gossip_net peer in Or_error.try_with (fun () -> match - Rpcs.Get_node_status.Node_status.of_yojson - (Yojson.Safe.from_string s) + Rpcs.Get_node_status.Node_status.of_yojson (Yojson.Safe.from_string s) with | Ok x -> x | Error e -> - failwith e ) + failwith e) |> Deferred.return let add_peer = lift add_peer @@ -1362,13 +1394,13 @@ include struct let random_peers = lift random_peers - let query_peer ?heartbeat_timeout ?timeout {gossip_net; _} = + let query_peer ?heartbeat_timeout ?timeout { gossip_net; _ } = query_peer ?heartbeat_timeout ?timeout gossip_net - let query_peer' ?how ?heartbeat_timeout ?timeout {gossip_net; _} = + let query_peer' ?how ?heartbeat_timeout ?timeout { gossip_net; _ } = query_peer' ?how ?heartbeat_timeout ?timeout gossip_net - let restart_helper {gossip_net; _} = restart_helper gossip_net + let restart_helper { gossip_net; _ } = restart_helper gossip_net (* these cannot be directly lifted due to the value restriction *) let on_first_connect t = lift on_first_connect t @@ -1384,35 +1416,35 @@ include struct lift set_connection_gating t config end -let on_first_received_message {first_received_message_signal; _} ~f = +let on_first_received_message { first_received_message_signal; _ } ~f = Ivar.read first_received_message_signal >>| f -let fill_first_received_message_signal {first_received_message_signal; _} = +let fill_first_received_message_signal { first_received_message_signal; _ } = Ivar.fill_if_empty first_received_message_signal () (* TODO: Have better pushback behavior *) let broadcast t ~log_msg msg = [%str_log' trace t.logger] - ~metadata:[("message", Gossip_net.Message.msg_to_yojson msg)] + ~metadata:[ ("message", Gossip_net.Message.msg_to_yojson msg) ] log_msg ; Gossip_net.Any.broadcast t.gossip_net msg let broadcast_state t state = let msg = Gossip_net.Message.New_state (With_hash.data state) in [%str_log' info t.logger] - ~metadata:[("message", Gossip_net.Message.msg_to_yojson msg)] - (Gossip_new_state {state_hash= With_hash.hash state}) ; + ~metadata:[ ("message", Gossip_net.Message.msg_to_yojson msg) ] + (Gossip_new_state { state_hash = With_hash.hash state }) ; Gossip_net.Any.broadcast t.gossip_net msg let broadcast_transaction_pool_diff t diff = broadcast t (Gossip_net.Message.Transaction_pool_diff diff) - ~log_msg:(Gossip_transaction_pool_diff {txns= diff}) + ~log_msg:(Gossip_transaction_pool_diff { txns = diff }) let broadcast_snark_pool_diff t diff = broadcast t (Gossip_net.Message.Snark_pool_diff diff) ~log_msg: (Gossip_snark_pool_diff - { work= + { work = Option.value_exn (Snark_pool.Resource_pool.Diff.to_compact diff) }) @@ -1428,7 +1460,7 @@ let find_map' xs ~f = (* TODO: Validation applicative here *) if List.for_all ds ~f:Or_error.is_error then return (Or_error.error_string "all none") - else Deferred.never () ) + else Deferred.never ()) in Deferred.any (none_worked :: List.map ~f:(filter ~f:Or_error.is_ok) ds) @@ -1439,13 +1471,13 @@ let make_rpc_request ?heartbeat_timeout ?timeout ~rpc ~label t peer input = match%map query_peer ?heartbeat_timeout ?timeout t peer.Peer.peer_id rpc input with - | Connected {data= Ok (Some response); _} -> + | Connected { data = Ok (Some response); _ } -> Ok response - | Connected {data= Ok None; _} -> + | Connected { data = Ok None; _ } -> Or_error.errorf !"Peer %{sexp:Network_peer.Peer.Id.t} doesn't have the requested %s" peer.peer_id label - | Connected {data= Error e; _} -> + | Connected { data = Error e; _ } -> Error e | Failed_to_connect e -> Error (Error.tag e ~tag:"failed-to-connect") @@ -1481,7 +1513,7 @@ let try_non_preferred_peers (type b) t input peers ~rpc : query_peer t peer.Peer.peer_id rpc input in match response_or_error with - | Connected ({data= Ok (Some data); _} as envelope) -> + | Connected ({ data = Ok (Some data); _ } as envelope) -> let%bind () = Trust_system.( record t.trust_system t.logger peer @@ -1491,10 +1523,10 @@ let try_non_preferred_peers (type b) t input peers ~rpc : )) in return (Ok (Envelope.Incoming.map envelope ~f:(Fn.const data))) - | Connected {data= Ok None; _} -> + | Connected { data = Ok None; _ } -> loop remaining_peers (2 * num_peers) | _ -> - loop remaining_peers (2 * num_peers) ) + loop remaining_peers (2 * num_peers)) in loop peers 1 @@ -1505,7 +1537,7 @@ let rpc_peer_then_random (type b) t peer_id input ~rpc : try_non_preferred_peers t input peers ~rpc in match%bind query_peer t peer_id rpc input with - | Connected {data= Ok (Some response); sender; _} -> + | Connected { data = Ok (Some response); sender; _ } -> let%bind () = match sender with | Local -> @@ -1518,7 +1550,7 @@ let rpc_peer_then_random (type b) t peer_id input ~rpc : , Some ("Preferred peer returned valid response", []) )) in return (Ok (Envelope.Incoming.wrap ~data:response ~sender)) - | Connected {data= Ok None; sender; _} -> + | Connected { data = Ok None; sender; _ } -> let%bind () = match sender with | Remote peer -> @@ -1532,7 +1564,7 @@ let rpc_peer_then_random (type b) t peer_id input ~rpc : return () in retry () - | Connected {data= Error e; sender; _} -> + | Connected { data = Error e; sender; _ } -> (* FIXME #4094: determine if more specific actions apply here *) let%bind () = match sender with @@ -1543,7 +1575,7 @@ let rpc_peer_then_random (type b) t peer_id input ~rpc : ( Outgoing_connection_error , Some ( "Error while doing RPC" - , [("error", Error_json.error_to_yojson e)] ) )) + , [ ("error", Error_json.error_to_yojson e) ] ) )) | Local -> return () in @@ -1611,9 +1643,9 @@ let glue_sync_ledger : query_peer ~heartbeat_timeout ~timeout:(Time.Span.of_sec 10.) t peer.Peer.peer_id Rpcs.Answer_sync_ledger_query (h, Num_accounts) with - | Connected {data= Ok _; _} -> + | Connected { data = Ok _; _ } -> `Call (fun (h', _) -> Ledger_hash.equal h' h) - | Failed_to_connect _ | Connected {data= Error _; _} -> + | Failed_to_connect _ | Connected { data = Error _; _ } -> `Some [] in let%bind _ = Linear_pipe.values_available query_reader in @@ -1628,9 +1660,8 @@ let glue_sync_ledger : ~get:(fun (peer : Peer.t) qs -> List.iter qs ~f:(fun (h, _) -> if - not - (Ledger_hash.equal h (Broadcast_pipe.Reader.peek root_hash_r)) - then don't_wait_for (Broadcast_pipe.Writer.write root_hash_w h) ) ; + not (Ledger_hash.equal h (Broadcast_pipe.Reader.peek root_hash_r)) + then don't_wait_for (Broadcast_pipe.Writer.write root_hash_w h)) ; let%map rs = query_peer' ~how:`Parallel ~heartbeat_timeout ~timeout:(Time.Span.of_sec (Float.of_int (List.length qs) *. 2.)) @@ -1640,18 +1671,18 @@ let glue_sync_ledger : | Failed_to_connect e -> Error e | Connected res -> ( - match res.data with - | Error e -> - Error e - | Ok rs -> ( - match List.zip qs rs with - | Unequal_lengths -> - Or_error.error_string "mismatched lengths" - | Ok ps -> - Ok - (List.filter_map ps ~f:(fun (q, r) -> - match r with Ok r -> Some (q, r) | Error _ -> None )) ) - ) ) + match res.data with + | Error e -> + Error e + | Ok rs -> ( + match List.zip qs rs with + | Unequal_lengths -> + Or_error.error_string "mismatched lengths" + | Ok ps -> + Ok + (List.filter_map ps ~f:(fun (q, r) -> + match r with Ok r -> Some (q, r) | Error _ -> None)) + ) )) in don't_wait_for (let%bind downloader = downloader in @@ -1664,4 +1695,4 @@ let glue_sync_ledger : Deferred.unit | Ok (a, _) -> Linear_pipe.write_if_open response_writer - (fst q, snd q, {a with data= snd a.data}) )) + (fst q, snd q, { a with data = snd a.data }))) diff --git a/src/lib/mina_networking/mina_networking.mli b/src/lib/mina_networking/mina_networking.mli index e39fedaf572..fa15a533437 100644 --- a/src/lib/mina_networking/mina_networking.mli +++ b/src/lib/mina_networking/mina_networking.mli @@ -9,17 +9,19 @@ open Network_peer exception No_initial_peers type Structured_log_events.t += - | Block_received of {state_hash: State_hash.t; sender: Envelope.Sender.t} + | Block_received of { state_hash : State_hash.t; sender : Envelope.Sender.t } | Snark_work_received of - { work: Snark_pool.Resource_pool.Diff.compact - ; sender: Envelope.Sender.t } + { work : Snark_pool.Resource_pool.Diff.compact + ; sender : Envelope.Sender.t + } | Transactions_received of - { txns: Transaction_pool.Resource_pool.Diff.t - ; sender: Envelope.Sender.t } - | Gossip_new_state of {state_hash: State_hash.t} + { txns : Transaction_pool.Resource_pool.Diff.t + ; sender : Envelope.Sender.t + } + | Gossip_new_state of { state_hash : State_hash.t } | Gossip_transaction_pool_diff of - { txns: Transaction_pool.Resource_pool.Diff.t } - | Gossip_snark_pool_diff of {work: Snark_pool.Resource_pool.Diff.compact} + { txns : Transaction_pool.Resource_pool.Diff.t } + | Gossip_snark_pool_diff of { work : Snark_pool.Resource_pool.Diff.compact } [@@deriving register_event] val refused_answer_query_string : string @@ -93,22 +95,23 @@ module Rpcs : sig module Stable : sig module V2 : sig type t = - { node_ip_addr: Core.Unix.Inet_addr.Stable.V1.t - ; node_peer_id: Peer.Id.Stable.V1.t - ; sync_status: Sync_status.Stable.V1.t - ; peers: Network_peer.Peer.Stable.V1.t list - ; block_producers: + { node_ip_addr : Core.Unix.Inet_addr.Stable.V1.t + ; node_peer_id : Peer.Id.Stable.V1.t + ; sync_status : Sync_status.Stable.V1.t + ; peers : Network_peer.Peer.Stable.V1.t list + ; block_producers : Signature_lib.Public_key.Compressed.Stable.V1.t list - ; protocol_state_hash: State_hash.Stable.V1.t - ; ban_statuses: + ; protocol_state_hash : State_hash.Stable.V1.t + ; ban_statuses : ( Network_peer.Peer.Stable.V1.t * Trust_system.Peer_status.Stable.V1.t ) list - ; k_block_hashes_and_timestamps: + ; k_block_hashes_and_timestamps : (State_hash.Stable.V1.t * string) list - ; git_commit: string - ; uptime_minutes: int - ; block_height_opt: int option } + ; git_commit : string + ; uptime_minutes : int + ; block_height_opt : int option + } end end] end @@ -157,19 +160,20 @@ module Gossip_net : Gossip_net.S with module Rpc_intf := Rpcs module Config : sig type log_gossip_heard = - {snark_pool_diff: bool; transaction_pool_diff: bool; new_state: bool} + { snark_pool_diff : bool; transaction_pool_diff : bool; new_state : bool } [@@deriving make] type t = - { logger: Logger.t - ; trust_system: Trust_system.t - ; time_controller: Block_time.Controller.t - ; consensus_local_state: Consensus.Data.Local_state.t - ; genesis_ledger_hash: Ledger_hash.t - ; constraint_constants: Genesis_constants.Constraint_constants.t - ; creatable_gossip_net: Gossip_net.Any.creatable - ; is_seed: bool - ; log_gossip_heard: log_gossip_heard } + { logger : Logger.t + ; trust_system : Trust_system.t + ; time_controller : Block_time.Controller.t + ; consensus_local_state : Consensus.Data.Local_state.t + ; genesis_ledger_hash : Ledger_hash.t + ; constraint_constants : Genesis_constants.Constraint_constants.t + ; creatable_gossip_net : Gossip_net.Any.creatable + ; is_seed : bool + ; log_gossip_heard : log_gossip_heard + } [@@deriving make] end @@ -189,8 +193,7 @@ val get_peer_node_status : -> Network_peer.Peer.t -> Rpcs.Get_node_status.Node_status.t Deferred.Or_error.t -val add_peer : - t -> Network_peer.Peer.t -> seed:bool -> unit Deferred.Or_error.t +val add_peer : t -> Network_peer.Peer.t -> seed:bool -> unit Deferred.Or_error.t val on_first_received_message : t -> f:(unit -> 'a) -> 'a Deferred.t @@ -200,7 +203,7 @@ val on_first_connect : t -> f:(unit -> 'a) -> 'a Deferred.t val on_first_high_connectivity : t -> f:(unit -> 'a) -> 'a Deferred.t -val online_status : t -> [`Online | `Offline] Broadcast_pipe.Reader.t +val online_status : t -> [ `Online | `Offline ] Broadcast_pipe.Reader.t val random_peers : t -> int -> Network_peer.Peer.t list Deferred.t @@ -308,37 +311,33 @@ val ban_notification_reader : val create : Config.t - -> get_some_initial_peers:( Rpcs.Get_some_initial_peers.query - Envelope.Incoming.t - -> Rpcs.Get_some_initial_peers.response Deferred.t) - -> get_staged_ledger_aux_and_pending_coinbases_at_hash:( Rpcs - .Get_staged_ledger_aux_and_pending_coinbases_at_hash - .query - Envelope.Incoming - .t - -> Rpcs - .Get_staged_ledger_aux_and_pending_coinbases_at_hash - .response - Deferred.t) - -> answer_sync_ledger_query:( Rpcs.Answer_sync_ledger_query.query - Envelope.Incoming.t - -> Rpcs.Answer_sync_ledger_query.response - Deferred.t) - -> get_ancestry:( Rpcs.Get_ancestry.query Envelope.Incoming.t - -> Rpcs.Get_ancestry.response Deferred.t) - -> get_best_tip:( Rpcs.Get_best_tip.query Envelope.Incoming.t - -> Rpcs.Get_best_tip.response Deferred.t) - -> get_node_status:( Rpcs.Get_node_status.query Envelope.Incoming.t - -> Rpcs.Get_node_status.response Deferred.t) - -> get_transition_chain_proof:( Rpcs.Get_transition_chain_proof.query - Envelope.Incoming.t - -> Rpcs.Get_transition_chain_proof.response - Deferred.t) - -> get_transition_chain:( Rpcs.Get_transition_chain.query - Envelope.Incoming.t - -> Rpcs.Get_transition_chain.response Deferred.t) - -> get_transition_knowledge:( Rpcs.Get_transition_knowledge.query - Envelope.Incoming.t - -> Rpcs.Get_transition_knowledge.response - Deferred.t) + -> get_some_initial_peers: + ( Rpcs.Get_some_initial_peers.query Envelope.Incoming.t + -> Rpcs.Get_some_initial_peers.response Deferred.t) + -> get_staged_ledger_aux_and_pending_coinbases_at_hash: + ( Rpcs.Get_staged_ledger_aux_and_pending_coinbases_at_hash.query + Envelope.Incoming.t + -> Rpcs.Get_staged_ledger_aux_and_pending_coinbases_at_hash.response + Deferred.t) + -> answer_sync_ledger_query: + ( Rpcs.Answer_sync_ledger_query.query Envelope.Incoming.t + -> Rpcs.Answer_sync_ledger_query.response Deferred.t) + -> get_ancestry: + ( Rpcs.Get_ancestry.query Envelope.Incoming.t + -> Rpcs.Get_ancestry.response Deferred.t) + -> get_best_tip: + ( Rpcs.Get_best_tip.query Envelope.Incoming.t + -> Rpcs.Get_best_tip.response Deferred.t) + -> get_node_status: + ( Rpcs.Get_node_status.query Envelope.Incoming.t + -> Rpcs.Get_node_status.response Deferred.t) + -> get_transition_chain_proof: + ( Rpcs.Get_transition_chain_proof.query Envelope.Incoming.t + -> Rpcs.Get_transition_chain_proof.response Deferred.t) + -> get_transition_chain: + ( Rpcs.Get_transition_chain.query Envelope.Incoming.t + -> Rpcs.Get_transition_chain.response Deferred.t) + -> get_transition_knowledge: + ( Rpcs.Get_transition_knowledge.query Envelope.Incoming.t + -> Rpcs.Get_transition_knowledge.response Deferred.t) -> t Deferred.t diff --git a/src/lib/mina_numbers/intf.ml b/src/lib/mina_numbers/intf.ml index d31ffe39d82..2b3555a9078 100644 --- a/src/lib/mina_numbers/intf.ml +++ b/src/lib/mina_numbers/intf.ml @@ -1,13 +1,11 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Fold_lib open Tuple_lib open Unsigned -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_bits @@ -50,7 +48,7 @@ module type S_unchecked = sig val to_int : t -> int (* Someday: I think this only does ones greater than zero, but it doesn't really matter for - selecting the nonce *) + selecting the nonce *) val random : unit -> t @@ -69,8 +67,7 @@ module type S_unchecked = sig val fold : t -> bool Triple.t Fold.t end -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module type S_checked = sig type unchecked @@ -97,7 +94,7 @@ module type S_checked = sig - If the argument to [`Underflow] is false, [x >= y] and the returned integer value is equal to [x - y] *) - val sub_or_zero : t -> t -> ([`Underflow of Boolean.var] * t, _) Checked.t + val sub_or_zero : t -> t -> ([ `Underflow of Boolean.var ] * t, _) Checked.t (** [sub ~m x y] computes [x - y] and ensures that [0 <= x - y] *) val sub : t -> t -> (t, _) Checked.t @@ -172,7 +169,8 @@ module type UInt32 = sig val to_uint32 : t -> uint32 val of_uint32 : uint32 -> t -end [@@warning "-32"] +end +[@@warning "-32"] module type UInt64 = sig [%%versioned: @@ -188,26 +186,26 @@ module type UInt64 = sig val to_uint64 : t -> uint64 val of_uint64 : uint64 -> t -end [@@warning "-32"] +end +[@@warning "-32"] module type F = functor - (N :sig - - type t [@@deriving bin_io, sexp, compare, hash] + (N : sig + type t [@@deriving bin_io, sexp, compare, hash] - include Unsigned_extended.S with type t := t + include Unsigned_extended.S with type t := t - val random : unit -> t - end) + val random : unit -> t + end) (Bits : Bits_intf.Convertible_bits with type t := N.t) -> S with type t := N.t and module Bits := Bits -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module type F_checked = functor (N : Unsigned_extended.S) (Bits : Bits_intf.Convertible_bits with type t := N.t) - -> S_checked with type unchecked := N.t [@@warning "-67"] + -> S_checked with type unchecked := N.t +[@@warning "-67"] [%%endif] diff --git a/src/lib/mina_numbers/nat.ml b/src/lib/mina_numbers/nat.ml index 15fc43916e5..4d86f8ad652 100644 --- a/src/lib/mina_numbers/nat.ml +++ b/src/lib/mina_numbers/nat.ml @@ -1,13 +1,11 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Fold_lib include Intf module Intf = Intf -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_bits @@ -38,7 +36,7 @@ struct let to_input t = Checked.map (to_bits t) ~f:(fun bits -> Random_oracle.Input.bitstring - (Bitstring_lib.Bitstring.Lsb_first.to_list bits) ) + (Bitstring_lib.Bitstring.Lsb_first.to_list bits)) let constant n = Integer.constant ~length:N.length_in_bits ~m (N.to_bigint n) @@ -58,11 +56,11 @@ struct let bs = List.take (Field.unpack x) N.length_in_bits in (* TODO: Make this efficient *) List.foldi bs ~init:N.zero ~f:(fun i acc b -> - if b then N.(logor (shift_left one i) acc) else acc ) + if b then N.(logor (shift_left one i) acc) else acc) in Typ.Read.map (Field.typ.read (Integer.to_field v)) ~f:of_field_elt in - {alloc; store; check; read} + { alloc; store; check; read } type t = var @@ -79,12 +77,12 @@ struct let succ_if t c = make_checked (fun () -> let t = Integer.succ_if ~m t c in - t ) + t) let succ t = make_checked (fun () -> let t = Integer.succ ~m t in - t ) + t) let op op a b = make_checked (fun () -> op ~m a b) @@ -143,8 +141,7 @@ struct let sub x y = if x < y then None else Some (N.sub x y) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] module Checked = Make_checked (N) (Bits) @@ -198,7 +195,8 @@ module Make32 () : UInt32 = struct end end] - include Make (struct + include Make + (struct include UInt32 let random () = @@ -230,7 +228,8 @@ module Make64 () : UInt64 = struct end end] - include Make (struct + include Make + (struct include UInt64 let random () = diff --git a/src/lib/mina_plugins/mina_plugins.ml b/src/lib/mina_plugins/mina_plugins.ml index b3895f3a44b..e6cdb16ac91 100644 --- a/src/lib/mina_plugins/mina_plugins.ml +++ b/src/lib/mina_plugins/mina_plugins.ml @@ -12,15 +12,16 @@ let init_plugins ~logger coda plugin_paths = mina_lib' := Some coda ; List.iter plugin_paths ~f:(fun path -> [%log info] "Initializing plugin from $path" - ~metadata:[("path", `String path)] ; + ~metadata:[ ("path", `String path) ] ; try Dynlink.loadfile path ; [%log info] "Plugin successfully loaded from $path" - ~metadata:[("path", `String path)] + ~metadata:[ ("path", `String path) ] with Dynlink.Error err as exn -> [%log error] "Failed to load plugin from $path: $error" ~metadata: [ ("path", `String path) - ; ("error", `String (Dynlink.error_message err)) ] ; - raise exn ) ; + ; ("error", `String (Dynlink.error_message err)) + ] ; + raise exn) ; mina_lib' := None diff --git a/src/lib/mina_state/ancestor.ml b/src/lib/mina_state/ancestor.ml index bf0a9a18269..6227fcf4dcf 100644 --- a/src/lib/mina_state/ancestor.ml +++ b/src/lib/mina_state/ancestor.ml @@ -5,7 +5,7 @@ module Input = struct [%%versioned module Stable = struct module V1 = struct - type t = {descendant: State_hash.Stable.V1.t; generations: int} + type t = { descendant : State_hash.Stable.V1.t; generations : int } [@@deriving sexp] let to_latest = Fn.id @@ -33,11 +33,11 @@ let verify = | h :: hs -> let acc = Protocol_state.hash_abstract ~hash_body:Fn.id - {previous_state_hash= acc; body= h} + { previous_state_hash = acc; body = h } in go acc hs in - fun ({descendant; generations} : Input.t) (ancestor : Output.t) + fun ({ descendant; generations } : Input.t) (ancestor : Output.t) (proof : Proof.t) -> List.length proof = generations && State_hash.equal descendant (go ancestor proof) @@ -84,13 +84,13 @@ end = struct let length = Mina_numbers.Length.succ length in let full_state_hash = Protocol_state.hash_abstract ~hash_body:Fn.id - {previous_state_hash= acc; body} + { previous_state_hash = acc; body } in go ((acc, full_state_hash, length, body) :: hs) full_state_hash length bs in - fun (t : t) ({descendant; generations} : Input.t) (ancestor : Output.t) + fun (t : t) ({ descendant; generations } : Input.t) (ancestor : Output.t) ~ancestor_length (proof : Proof.t) -> let open Or_error.Let_syntax in let%bind () = @@ -99,9 +99,9 @@ end = struct let h, to_add = go [] ancestor ancestor_length proof in let%map () = check (State_hash.equal h descendant) "Bad merkle proof" in List.iter to_add ~f:(fun (prev, h, length, body) -> - add t ~prev_hash:prev ~hash:h ~length ~body_hash:body ) + add t ~prev_hash:prev ~hash:h ~length ~body_hash:body) - let prove (t : t) {Input.descendant; generations} : + let prove (t : t) { Input.descendant; generations } : (Output.t * Proof.t) option = T.ancestor_of_depth t ~depth:generations ~source:descendant end @@ -121,16 +121,16 @@ let%test_unit "completeness" = let length = Length.succ length in let h = Protocol_state.hash_abstract ~hash_body:Fn.id - {previous_state_hash= prev; body} + { previous_state_hash = prev; body } in Prover.add prover ~prev_hash:prev ~hash:h ~length ~body_hash:body ; - ((h, length), h) ) + ((h, length), h)) in List.iteri hashes ~f:(fun i h -> - let input = {Input.generations= i + 1; descendant= h} in + let input = { Input.generations = i + 1; descendant = h } in let a, proof = - Prover.prove prover {generations= i + 1; descendant= h} + Prover.prove prover { generations = i + 1; descendant = h } |> Option.value_exn ?here:None ?error:None ?message:None in [%test_eq: State_hash.t] a ancestor ; - assert (verify input a proof) ) ) + assert (verify input a proof))) diff --git a/src/lib/mina_state/blockchain_state.ml b/src/lib/mina_state/blockchain_state.ml index 786abffe386..6f123897de1 100644 --- a/src/lib/mina_state/blockchain_state.ml +++ b/src/lib/mina_state/blockchain_state.ml @@ -7,11 +7,12 @@ module Poly = struct module Stable = struct module V1 = struct type ('staged_ledger_hash, 'snarked_ledger_hash, 'token_id, 'time) t = - { staged_ledger_hash: 'staged_ledger_hash - ; snarked_ledger_hash: 'snarked_ledger_hash - ; genesis_ledger_hash: 'snarked_ledger_hash - ; snarked_next_available_token: 'token_id - ; timestamp: 'time } + { staged_ledger_hash : 'staged_ledger_hash + ; snarked_ledger_hash : 'snarked_ledger_hash + ; genesis_ledger_hash : 'snarked_ledger_hash + ; snarked_next_available_token : 'token_id + ; timestamp : 'time + } [@@deriving sexp, fields, equal, compare, hash, yojson, hlist] end end] @@ -57,7 +58,8 @@ let create_value ~staged_ledger_hash ~snarked_ledger_hash ~genesis_ledger_hash ; snarked_ledger_hash ; genesis_ledger_hash ; snarked_next_available_token - ; timestamp } + ; timestamp + } let data_spec = let open Data_spec in @@ -65,7 +67,8 @@ let data_spec = ; Frozen_ledger_hash.typ ; Frozen_ledger_hash.typ ; Token_id.typ - ; Block_time.Unpacked.typ ] + ; Block_time.Unpacked.typ + ] let typ : (var, Value.t) Typ.t = Typ.of_hlistable data_spec ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist @@ -76,7 +79,8 @@ let var_to_input ; snarked_ledger_hash ; genesis_ledger_hash ; snarked_next_available_token - ; timestamp } : + ; timestamp + } : var) = let open Random_oracle.Input in let%map.Checked snarked_next_available_token = @@ -89,14 +93,16 @@ let var_to_input ; snarked_next_available_token ; bitstring (Bitstring_lib.Bitstring.Lsb_first.to_list - (Block_time.Unpacked.var_to_bits timestamp)) ] + (Block_time.Unpacked.var_to_bits timestamp)) + ] let to_input ({ staged_ledger_hash ; snarked_ledger_hash ; genesis_ledger_hash ; snarked_next_available_token - ; timestamp } : + ; timestamp + } : Value.t) = let open Random_oracle.Input in List.reduce_exn ~f:append @@ -104,9 +110,10 @@ let to_input ; field (snarked_ledger_hash :> Field.t) ; field (genesis_ledger_hash :> Field.t) ; Token_id.to_input snarked_next_available_token - ; bitstring (Block_time.Bits.to_bits timestamp) ] + ; bitstring (Block_time.Bits.to_bits timestamp) + ] -let set_timestamp t timestamp = {t with Poly.timestamp} +let set_timestamp t timestamp = { t with Poly.timestamp } let negative_one ~(constraint_constants : Genesis_constants.Constraint_constants.t) @@ -115,12 +122,13 @@ let negative_one let genesis_ledger_hash = Frozen_ledger_hash.of_ledger_hash genesis_ledger_hash in - { staged_ledger_hash= + { staged_ledger_hash = Staged_ledger_hash.genesis ~constraint_constants ~genesis_ledger_hash - ; snarked_ledger_hash= genesis_ledger_hash + ; snarked_ledger_hash = genesis_ledger_hash ; genesis_ledger_hash ; snarked_next_available_token - ; timestamp= consensus_constants.genesis_state_timestamp } + ; timestamp = consensus_constants.genesis_state_timestamp + } (* negative_one and genesis blockchain states are equivalent *) let genesis = negative_one @@ -133,18 +141,19 @@ let display ; snarked_ledger_hash ; genesis_ledger_hash ; snarked_next_available_token - ; timestamp } = - { Poly.staged_ledger_hash= + ; timestamp + } = + { Poly.staged_ledger_hash = Visualization.display_prefix_of_string @@ Ledger_hash.to_string @@ Staged_ledger_hash.ledger_hash staged_ledger_hash - ; snarked_ledger_hash= + ; snarked_ledger_hash = Visualization.display_prefix_of_string @@ Frozen_ledger_hash.to_string snarked_ledger_hash - ; genesis_ledger_hash= + ; genesis_ledger_hash = Visualization.display_prefix_of_string @@ Frozen_ledger_hash.to_string genesis_ledger_hash - ; snarked_next_available_token= + ; snarked_next_available_token = Token_id.to_string snarked_next_available_token - ; timestamp= + ; timestamp = Time.to_string_trimmed ~zone:Time.Zone.utc (Block_time.to_time timestamp) } diff --git a/src/lib/mina_state/blockchain_state.mli b/src/lib/mina_state/blockchain_state.mli index c8e0dc1a3aa..3a6204f277f 100644 --- a/src/lib/mina_state/blockchain_state.mli +++ b/src/lib/mina_state/blockchain_state.mli @@ -7,11 +7,12 @@ module Poly : sig module Stable : sig module V1 : sig type ('staged_ledger_hash, 'snarked_ledger_hash, 'token_id, 'time) t = - { staged_ledger_hash: 'staged_ledger_hash - ; snarked_ledger_hash: 'snarked_ledger_hash - ; genesis_ledger_hash: 'snarked_ledger_hash - ; snarked_next_available_token: 'token_id - ; timestamp: 'time } + { staged_ledger_hash : 'staged_ledger_hash + ; snarked_ledger_hash : 'snarked_ledger_hash + ; genesis_ledger_hash : 'snarked_ledger_hash + ; snarked_next_available_token : 'token_id + ; timestamp : 'time + } [@@deriving sexp, equal, compare, fields, yojson] end end] @@ -34,13 +35,13 @@ end include Snarkable.S - with type var = - ( Staged_ledger_hash.var - , Frozen_ledger_hash.var - , Token_id.var - , Block_time.Unpacked.var ) - Poly.t - and type value := Value.t + with type var = + ( Staged_ledger_hash.var + , Frozen_ledger_hash.var + , Token_id.var + , Block_time.Unpacked.var ) + Poly.t + and type value := Value.t val staged_ledger_hash : ('staged_ledger_hash, _, _, _) Poly.t -> 'staged_ledger_hash diff --git a/src/lib/mina_state/consensus_state_hooks.mli b/src/lib/mina_state/consensus_state_hooks.mli index 6ae5d4ad9eb..4093fbba8b9 100644 --- a/src/lib/mina_state/consensus_state_hooks.mli +++ b/src/lib/mina_state/consensus_state_hooks.mli @@ -3,11 +3,11 @@ open Data include Intf.State_hooks - with type blockchain_state := Blockchain_state.Value.t - and type protocol_state := Protocol_state.Value.t - and type protocol_state_var := Protocol_state.var - and type snark_transition_var := Snark_transition.var - and type consensus_state := Consensus_state.Value.t - and type consensus_state_var := Consensus_state.var - and type consensus_transition := Consensus_transition.Value.t - and type block_data := Block_data.t + with type blockchain_state := Blockchain_state.Value.t + and type protocol_state := Protocol_state.Value.t + and type protocol_state_var := Protocol_state.var + and type snark_transition_var := Snark_transition.var + and type consensus_state := Consensus_state.Value.t + and type consensus_state_var := Consensus_state.var + and type consensus_transition := Consensus_transition.Value.t + and type block_data := Block_data.t diff --git a/src/lib/mina_state/genesis_protocol_state.ml b/src/lib/mina_state/genesis_protocol_state.ml index bd67fff280e..d78101344d4 100644 --- a/src/lib/mina_state/genesis_protocol_state.ml +++ b/src/lib/mina_state/genesis_protocol_state.ml @@ -27,7 +27,7 @@ let t ~genesis_ledger ~genesis_epoch_data ~constraint_constants ~previous_state_hash: (Option.value_map constraint_constants.fork ~default:negative_one_protocol_state_hash - ~f:(fun {previous_state_hash; _} -> previous_state_hash)) + ~f:(fun { previous_state_hash; _ } -> previous_state_hash)) ~blockchain_state: (Blockchain_state.genesis ~constraint_constants ~consensus_constants ~genesis_ledger_hash ~snarked_next_available_token) diff --git a/src/lib/mina_state/protocol_state.ml b/src/lib/mina_state/protocol_state.ml index 10d1c7e1a75..a8521de3500 100644 --- a/src/lib/mina_state/protocol_state.ml +++ b/src/lib/mina_state/protocol_state.ml @@ -1,11 +1,9 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Mina_base -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -16,17 +14,17 @@ module Poly = struct module Stable = struct module V1 = struct type ('state_hash, 'body) t = - {previous_state_hash: 'state_hash; body: 'body} + { previous_state_hash : 'state_hash; body : 'body } [@@deriving equal, ord, hash, sexp, yojson, hlist] end end] end let hash_abstract ~hash_body - ({previous_state_hash; body} : (State_hash.t, _) Poly.t) = + ({ previous_state_hash; body } : (State_hash.t, _) Poly.t) = let body : State_body_hash.t = hash_body body in Random_oracle.hash ~init:Hash_prefix.protocol_state - [|(previous_state_hash :> Field.t); (body :> Field.t)|] + [| (previous_state_hash :> Field.t); (body :> Field.t) |] |> State_hash.of_hash module Body = struct @@ -35,10 +33,11 @@ module Body = struct module Stable = struct module V1 = struct type ('state_hash, 'blockchain_state, 'consensus_state, 'constants) t = - { genesis_state_hash: 'state_hash - ; blockchain_state: 'blockchain_state - ; consensus_state: 'consensus_state - ; constants: 'constants } + { genesis_state_hash : 'state_hash + ; blockchain_state : 'blockchain_state + ; consensus_state : 'consensus_state + ; constants : 'constants + } [@@deriving sexp, equal, compare, yojson, hash, version, hlist] end end] @@ -66,8 +65,7 @@ module Body = struct type value = Value.t [@@deriving sexp, yojson] - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] type var = ( State_hash.var @@ -81,7 +79,8 @@ module Body = struct [ State_hash.typ ; Blockchain_state.typ ; Consensus.Data.Consensus_state.typ ~constraint_constants - ; Protocol_constants_checked.typ ] + ; Protocol_constants_checked.typ + ] let typ ~constraint_constants = Typ.of_hlistable @@ -90,10 +89,11 @@ module Body = struct ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist let to_input - { Poly.genesis_state_hash: State_hash.t + { Poly.genesis_state_hash : State_hash.t ; blockchain_state ; consensus_state - ; constants } = + ; constants + } = Random_oracle.Input.( append (Blockchain_state.to_input blockchain_state) @@ -102,7 +102,8 @@ module Body = struct |> append (Protocol_constants_checked.to_input constants)) let var_to_input - {Poly.genesis_state_hash; blockchain_state; consensus_state; constants} = + { Poly.genesis_state_hash; blockchain_state; consensus_state; constants } + = let%bind blockchain_state = Blockchain_state.var_to_input blockchain_state in @@ -120,25 +121,26 @@ module Body = struct make_checked (fun () -> Random_oracle.Checked.( hash ~init:Hash_prefix.protocol_state_body (pack_input input) - |> State_body_hash.var_of_hash_packed) ) + |> State_body_hash.var_of_hash_packed)) - let consensus_state {Poly.consensus_state; _} = consensus_state + let consensus_state { Poly.consensus_state; _ } = consensus_state let view_checked (t : var) : Snapp_predicate.Protocol_state.View.Checked.t = let module C = Consensus.Proof_of_stake.Exported.Consensus_state in let cs : Consensus.Data.Consensus_state.var = t.consensus_state in - { snarked_ledger_hash= t.blockchain_state.snarked_ledger_hash - ; snarked_next_available_token= + { snarked_ledger_hash = t.blockchain_state.snarked_ledger_hash + ; snarked_next_available_token = t.blockchain_state.snarked_next_available_token - ; timestamp= t.blockchain_state.timestamp - ; blockchain_length= C.blockchain_length_var cs - ; min_window_density= C.min_window_density_var cs - ; last_vrf_output= () - ; total_currency= C.total_currency_var cs - ; curr_global_slot= C.curr_global_slot_var cs - ; global_slot_since_genesis= C.global_slot_since_genesis_var cs - ; staking_epoch_data= C.staking_epoch_data_var cs - ; next_epoch_data= C.next_epoch_data_var cs } + ; timestamp = t.blockchain_state.timestamp + ; blockchain_length = C.blockchain_length_var cs + ; min_window_density = C.min_window_density_var cs + ; last_vrf_output = () + ; total_currency = C.total_currency_var cs + ; curr_global_slot = C.curr_global_slot_var cs + ; global_slot_since_genesis = C.global_slot_since_genesis_var cs + ; staking_epoch_data = C.staking_epoch_data_var cs + ; next_epoch_data = C.next_epoch_data_var cs + } [%%endif] @@ -150,26 +152,26 @@ module Body = struct let view (t : Value.t) : Snapp_predicate.Protocol_state.View.t = let module C = Consensus.Proof_of_stake.Exported.Consensus_state in let cs = t.consensus_state in - { snarked_ledger_hash= t.blockchain_state.snarked_ledger_hash - ; snarked_next_available_token= + { snarked_ledger_hash = t.blockchain_state.snarked_ledger_hash + ; snarked_next_available_token = t.blockchain_state.snarked_next_available_token - ; timestamp= t.blockchain_state.timestamp - ; blockchain_length= C.blockchain_length cs - ; min_window_density= C.min_window_density cs - ; last_vrf_output= () - ; total_currency= C.total_currency cs - ; curr_global_slot= C.curr_global_slot cs - ; global_slot_since_genesis= C.global_slot_since_genesis cs - ; staking_epoch_data= C.staking_epoch_data cs - ; next_epoch_data= C.next_epoch_data cs } + ; timestamp = t.blockchain_state.timestamp + ; blockchain_length = C.blockchain_length cs + ; min_window_density = C.min_window_density cs + ; last_vrf_output = () + ; total_currency = C.total_currency cs + ; curr_global_slot = C.curr_global_slot cs + ; global_slot_since_genesis = C.global_slot_since_genesis cs + ; staking_epoch_data = C.staking_epoch_data cs + ; next_epoch_data = C.next_epoch_data cs + } end module Value = struct [%%versioned module Stable = struct module V1 = struct - type t = - (State_hash.Stable.V1.t, Body.Value.Stable.V1.t) Poly.Stable.V1.t + type t = (State_hash.Stable.V1.t, Body.Value.Stable.V1.t) Poly.Stable.V1.t [@@deriving sexp, hash, compare, equal, yojson] let to_latest = Fn.id @@ -181,8 +183,7 @@ end type value = Value.t [@@deriving sexp, yojson] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = (State_hash.var, Body.var) Poly.t @@ -192,42 +193,43 @@ module Proof = Proof module Hash = State_hash let create ~previous_state_hash ~body = - {Poly.Stable.Latest.previous_state_hash; body} + { Poly.Stable.Latest.previous_state_hash; body } let create' ~previous_state_hash ~genesis_state_hash ~blockchain_state ~consensus_state ~constants = { Poly.Stable.Latest.previous_state_hash - ; body= + ; body = { Body.Poly.genesis_state_hash ; blockchain_state ; consensus_state - ; constants } } + ; constants + } + } let create_value = create' -let body {Poly.Stable.Latest.body; _} = body +let body { Poly.Stable.Latest.body; _ } = body -let previous_state_hash {Poly.Stable.Latest.previous_state_hash; _} = +let previous_state_hash { Poly.Stable.Latest.previous_state_hash; _ } = previous_state_hash let blockchain_state - {Poly.Stable.Latest.body= {Body.Poly.blockchain_state; _}; _} = + { Poly.Stable.Latest.body = { Body.Poly.blockchain_state; _ }; _ } = blockchain_state -let consensus_state {Poly.Stable.Latest.body= {Body.Poly.consensus_state; _}; _} - = +let consensus_state + { Poly.Stable.Latest.body = { Body.Poly.consensus_state; _ }; _ } = consensus_state -let constants {Poly.Stable.Latest.body= {Body.Poly.constants; _}; _} = +let constants { Poly.Stable.Latest.body = { Body.Poly.constants; _ }; _ } = constants -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let create_var = create' let data_spec ~constraint_constants = - Data_spec.[State_hash.typ; Body.typ ~constraint_constants] + Data_spec.[ State_hash.typ; Body.typ ~constraint_constants ] let typ ~constraint_constants = Typ.of_hlistable @@ -235,14 +237,15 @@ let typ ~constraint_constants = ~var_to_hlist:Poly.to_hlist ~var_of_hlist:Poly.of_hlist ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist -let hash_checked ({previous_state_hash; body} : var) = +let hash_checked ({ previous_state_hash; body } : var) = let%bind body = Body.hash_checked body in let%map hash = make_checked (fun () -> Random_oracle.Checked.hash ~init:Hash_prefix.protocol_state [| Hash.var_to_hash_packed previous_state_hash - ; State_body_hash.var_to_hash_packed body |] - |> State_hash.var_of_hash_packed ) + ; State_body_hash.var_to_hash_packed body + |] + |> State_hash.var_of_hash_packed) in (hash, body) @@ -262,7 +265,7 @@ let hash = hash_abstract ~hash_body:Body.hash let hash_with_body t ~body_hash = hash_abstract ~hash_body:Fn.id - {Poly.previous_state_hash= t.Poly.previous_state_hash; body= body_hash} + { Poly.previous_state_hash = t.Poly.previous_state_hash; body = body_hash } let genesis_state_hash ?(state_hash = None) state = (*If this is the genesis state then simply return its hash @@ -271,8 +274,7 @@ let genesis_state_hash ?(state_hash = None) state = then match state_hash with None -> hash state | Some hash -> hash else state.body.genesis_state_hash -[%%if -call_logger] +[%%if call_logger] let hash s = Mina_debug.Call_logger.record_call "Protocol_state.hash" ; @@ -282,10 +284,10 @@ let hash s = let negative_one ~genesis_ledger ~genesis_epoch_data ~constraint_constants ~consensus_constants = - { Poly.Stable.Latest.previous_state_hash= + { Poly.Stable.Latest.previous_state_hash = State_hash.of_hash Outside_hash_image.t - ; body= - { Body.Poly.blockchain_state= + ; body = + { Body.Poly.blockchain_state = Blockchain_state.negative_one ~constraint_constants ~consensus_constants ~genesis_ledger_hash: @@ -293,10 +295,12 @@ let negative_one ~genesis_ledger ~genesis_epoch_data ~constraint_constants ~snarked_next_available_token: (Mina_base.Ledger.next_available_token (Lazy.force genesis_ledger)) - ; genesis_state_hash= State_hash.of_hash Outside_hash_image.t - ; consensus_state= + ; genesis_state_hash = State_hash.of_hash Outside_hash_image.t + ; consensus_state = Consensus.Data.Consensus_state.negative_one ~genesis_ledger ~genesis_epoch_data ~constants:consensus_constants ~constraint_constants - ; constants= - Consensus.Constants.to_protocol_constants consensus_constants } } + ; constants = + Consensus.Constants.to_protocol_constants consensus_constants + } + } diff --git a/src/lib/mina_state/protocol_state.mli b/src/lib/mina_state/protocol_state.mli index a9a93e9a999..2041e016835 100644 --- a/src/lib/mina_state/protocol_state.mli +++ b/src/lib/mina_state/protocol_state.mli @@ -7,7 +7,7 @@ module Poly : sig module Stable : sig module V1 : sig type ('state_hash, 'body) t = - {previous_state_hash: 'state_hash; body: 'body} + { previous_state_hash : 'state_hash; body : 'body } [@@deriving equal, ord, hash, sexp, to_yojson] end end] @@ -71,8 +71,7 @@ module Value : sig [%%versioned: module Stable : sig module V1 : sig - type t = - (State_hash.Stable.V1.t, Body.Value.Stable.V1.t) Poly.Stable.V1.t + type t = (State_hash.Stable.V1.t, Body.Value.Stable.V1.t) Poly.Stable.V1.t [@@deriving sexp, compare, equal, yojson] end end] diff --git a/src/lib/mina_state/snark_transition.ml b/src/lib/mina_state/snark_transition.ml index abb05291a6b..c70bec4dfcf 100644 --- a/src/lib/mina_state/snark_transition.ml +++ b/src/lib/mina_state/snark_transition.ml @@ -9,9 +9,10 @@ module Poly = struct , 'consensus_transition , 'pending_coinbase_update ) t = - { blockchain_state: 'blockchain_state - ; consensus_transition: 'consensus_transition - ; pending_coinbase_update: 'pending_coinbase_update } + { blockchain_state : 'blockchain_state + ; consensus_transition : 'consensus_transition + ; pending_coinbase_update : 'pending_coinbase_update + } [@@deriving to_yojson, sexp, fields, hlist] end end] @@ -51,18 +52,18 @@ type var = let create_value ~blockchain_state ~consensus_transition ~pending_coinbase_update () : Value.t = - {blockchain_state; consensus_transition; pending_coinbase_update} + { blockchain_state; consensus_transition; pending_coinbase_update } -let genesis ~constraint_constants ~consensus_constants ~genesis_ledger : value - = +let genesis ~constraint_constants ~consensus_constants ~genesis_ledger : value = let genesis_ledger = Lazy.force genesis_ledger in - { Poly.blockchain_state= + { Poly.blockchain_state = Blockchain_state.genesis ~constraint_constants ~consensus_constants ~genesis_ledger_hash:(Ledger.merkle_root genesis_ledger) ~snarked_next_available_token: (Ledger.next_available_token genesis_ledger) - ; consensus_transition= Consensus.Data.Consensus_transition.genesis - ; pending_coinbase_update= Pending_coinbase.Update.genesis } + ; consensus_transition = Consensus.Data.Consensus_transition.genesis + ; pending_coinbase_update = Pending_coinbase.Update.genesis + } let typ = let open Snark_params.Tick.Typ in @@ -70,4 +71,5 @@ let typ = ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist [ Blockchain_state.typ ; Consensus.Data.Consensus_transition.typ - ; Pending_coinbase.Update.typ ] + ; Pending_coinbase.Update.typ + ] diff --git a/src/lib/mina_state/snark_transition.mli b/src/lib/mina_state/snark_transition.mli index c068932949e..b3c51c12ab3 100644 --- a/src/lib/mina_state/snark_transition.mli +++ b/src/lib/mina_state/snark_transition.mli @@ -3,31 +3,28 @@ open Mina_base module Poly : sig type ('blockchain_state, 'consensus_transition, 'pending_coinbase_update) t = - { blockchain_state: 'blockchain_state - ; consensus_transition: 'consensus_transition - ; pending_coinbase_update: 'pending_coinbase_update } + { blockchain_state : 'blockchain_state + ; consensus_transition : 'consensus_transition + ; pending_coinbase_update : 'pending_coinbase_update + } [@@deriving sexp, fields] - module Stable : - sig - module V1 : sig - type ( 'blockchain_state - , 'consensus_transition - , 'pending_coinbase_update ) - t - [@@deriving bin_io, sexp, version] - end - - module Latest : module type of V1 + module Stable : sig + module V1 : sig + type ( 'blockchain_state + , 'consensus_transition + , 'pending_coinbase_update ) + t + [@@deriving bin_io, sexp, version] end - with type ( 'blockchain_state - , 'consensus_transition - , 'pending_coinbase_update ) - V1.t = - ( 'blockchain_state - , 'consensus_transition - , 'pending_coinbase_update ) - t + + module Latest : module type of V1 + end + with type ( 'blockchain_state + , 'consensus_transition + , 'pending_coinbase_update ) + V1.t = + ('blockchain_state, 'consensus_transition, 'pending_coinbase_update) t end module Value : sig diff --git a/src/lib/mina_transition/external_transition.ml b/src/lib/mina_transition/external_transition.ml index 532d4e41097..cd86bef1435 100644 --- a/src/lib/mina_transition/external_transition.ml +++ b/src/lib/mina_transition/external_transition.ml @@ -31,14 +31,15 @@ module Raw_versioned__ = struct module Stable = struct module V1 = struct type t = - { protocol_state: Protocol_state.Value.Stable.V1.t - ; protocol_state_proof: Proof.Stable.V1.t [@sexp.opaque] - ; staged_ledger_diff: Staged_ledger_diff.Stable.V1.t - ; delta_transition_chain_proof: + { protocol_state : Protocol_state.Value.Stable.V1.t + ; protocol_state_proof : Proof.Stable.V1.t [@sexp.opaque] + ; staged_ledger_diff : Staged_ledger_diff.Stable.V1.t + ; delta_transition_chain_proof : State_hash.Stable.V1.t * State_body_hash.Stable.V1.t list - ; current_protocol_version: Protocol_version.Stable.V1.t - ; proposed_protocol_version_opt: Protocol_version.Stable.V1.t option - ; mutable validation_callback: Validate_content.t } + ; current_protocol_version : Protocol_version.Stable.V1.t + ; proposed_protocol_version_opt : Protocol_version.Stable.V1.t option + ; mutable validation_callback : Validate_content.t + } [@@deriving compare, sexp, fields] let to_latest = Fn.id @@ -54,8 +55,8 @@ module Raw_versioned__ = struct -> 'a let map_creator c ~f ~protocol_state ~protocol_state_proof - ~staged_ledger_diff ~delta_transition_chain_proof - ~validation_callback ?proposed_protocol_version_opt () = + ~staged_ledger_diff ~delta_transition_chain_proof ~validation_callback + ?proposed_protocol_version_opt () = f (c ~protocol_state ~protocol_state_proof ~staged_ledger_diff ~delta_transition_chain_proof ~validation_callback @@ -77,7 +78,8 @@ module Raw_versioned__ = struct ; delta_transition_chain_proof ; current_protocol_version ; proposed_protocol_version_opt - ; validation_callback } + ; validation_callback + } end end] end @@ -100,8 +102,7 @@ Raw_versioned__. , set_validation_callback , compare )] -[%%define_locally -Stable.Latest.(create, sexp_of_t, t_of_sexp)] +[%%define_locally Stable.Latest.(create, sexp_of_t, t_of_sexp)] type external_transition = t @@ -121,9 +122,7 @@ module Precomputed_block = struct type t = Proof.t let to_bin_string proof = - let proof_string = - Binable.to_string (module Proof.Stable.Latest) proof - in + let proof_string = Binable.to_string (module Proof.Stable.Latest) proof in (* We use base64 with the uri-safe alphabet to ensure that encoding and decoding is cheap, and that the proof can be easily sent over http etc. without escaping or re-encoding. @@ -155,19 +154,20 @@ module Precomputed_block = struct |> Result.map_error ~f:(fun err -> sprintf "External_transition.Precomputed_block.Proof.of_yojson: %s" - (Error.to_string_hum err) ) + (Error.to_string_hum err)) | json -> Proof.of_yojson json end module T = struct type t = - { scheduled_time: Block_time.t - ; protocol_state: Protocol_state.value - ; protocol_state_proof: Proof.t - ; staged_ledger_diff: Staged_ledger_diff.t - ; delta_transition_chain_proof: - Frozen_ledger_hash.t * Frozen_ledger_hash.t list } + { scheduled_time : Block_time.t + ; protocol_state : Protocol_state.value + ; protocol_state_proof : Proof.t + ; staged_ledger_diff : Staged_ledger_diff.t + ; delta_transition_chain_proof : + Frozen_ledger_hash.t * Frozen_ledger_hash.t list + } [@@deriving sexp, yojson] end @@ -179,13 +179,13 @@ module Precomputed_block = struct module V1 = struct type t = T.t = - { scheduled_time: Block_time.Stable.V1.t - ; protocol_state: Protocol_state.Value.Stable.V1.t - ; protocol_state_proof: Mina_base.Proof.Stable.V1.t - ; staged_ledger_diff: Staged_ledger_diff.Stable.V1.t - ; delta_transition_chain_proof: - Frozen_ledger_hash.Stable.V1.t - * Frozen_ledger_hash.Stable.V1.t list } + { scheduled_time : Block_time.Stable.V1.t + ; protocol_state : Protocol_state.Value.Stable.V1.t + ; protocol_state_proof : Mina_base.Proof.Stable.V1.t + ; staged_ledger_diff : Staged_ledger_diff.Stable.V1.t + ; delta_transition_chain_proof : + Frozen_ledger_hash.Stable.V1.t * Frozen_ledger_hash.Stable.V1.t list + } let to_latest = Fn.id end @@ -193,10 +193,11 @@ module Precomputed_block = struct let of_external_transition ~scheduled_time (t : external_transition) = { scheduled_time - ; protocol_state= t.protocol_state - ; protocol_state_proof= t.protocol_state_proof - ; staged_ledger_diff= t.staged_ledger_diff - ; delta_transition_chain_proof= t.delta_transition_chain_proof } + ; protocol_state = t.protocol_state + ; protocol_state_proof = t.protocol_state_proof + ; staged_ledger_diff = t.staged_ledger_diff + ; delta_transition_chain_proof = t.delta_transition_chain_proof + } (* NOTE: This serialization is used externally and MUST NOT change. If the underlying types change, you should write a conversion, or add @@ -247,8 +248,7 @@ end let consensus_state = Fn.compose Protocol_state.consensus_state protocol_state -let blockchain_state = - Fn.compose Protocol_state.blockchain_state protocol_state +let blockchain_state = Fn.compose Protocol_state.blockchain_state protocol_state let state_hash = Fn.compose Protocol_state.hash protocol_state @@ -270,8 +270,7 @@ let coinbase_receiver = Fn.compose Consensus.Data.Consensus_state.coinbase_receiver consensus_state let supercharge_coinbase = - Fn.compose Consensus.Data.Consensus_state.supercharge_coinbase - consensus_state + Fn.compose Consensus.Data.Consensus_state.supercharge_coinbase consensus_state let block_winner = Fn.compose Consensus.Data.Consensus_state.block_stake_winner consensus_state @@ -293,7 +292,8 @@ let to_yojson t = , `String (Option.value_map (proposed_protocol_version_opt t) - ~default:"" ~f:Protocol_version.to_string) ) ] + ~default:"" ~f:Protocol_version.to_string) ) + ] let equal = Comparable.lift Consensus.Data.Consensus_state.Value.equal ~f:consensus_state @@ -317,11 +317,12 @@ let transactions ~constraint_constants t = let payments t = List.filter_map (commands t) ~f:(function - | {data= Signed_command ({payload= {body= Payment _; _}; _} as c); status} - -> - Some {With_status.data= c; status} + | { data = Signed_command ({ payload = { body = Payment _; _ }; _ } as c) + ; status + } -> + Some { With_status.data = c; status } | _ -> - None ) + None) let broadcast t = Mina_net2.Validation_callback.fire_exn (validation_callback t) `Accept @@ -336,7 +337,7 @@ let timestamp = (Fn.compose Protocol_state.blockchain_state protocol_state) type protocol_version_status = - {valid_current: bool; valid_next: bool; matches_daemon: bool} + { valid_current : bool; valid_next : bool; matches_daemon : bool } let protocol_version_status t = let valid_current = Protocol_version.is_valid (current_protocol_version t) in @@ -348,7 +349,7 @@ let protocol_version_status t = let matches_daemon = Protocol_version.compatible_with_daemon (current_protocol_version t) in - {valid_current; valid_next; matches_daemon} + { valid_current; valid_next; matches_daemon } module Validation = struct type ( 'time_received @@ -366,36 +367,36 @@ module Validation = struct * 'frontier_dependencies * 'staged_ledger_diff * 'protocol_versions - constraint 'time_received = [`Time_received] * (unit, _) Truth.t - constraint 'genesis_state = [`Genesis_state] * (unit, _) Truth.t - constraint 'proof = [`Proof] * (unit, _) Truth.t + constraint 'time_received = [ `Time_received ] * (unit, _) Truth.t + constraint 'genesis_state = [ `Genesis_state ] * (unit, _) Truth.t + constraint 'proof = [ `Proof ] * (unit, _) Truth.t constraint 'delta_transition_chain = - [`Delta_transition_chain] * (State_hash.t Non_empty_list.t, _) Truth.t + [ `Delta_transition_chain ] * (State_hash.t Non_empty_list.t, _) Truth.t constraint 'frontier_dependencies = - [`Frontier_dependencies] * (unit, _) Truth.t - constraint 'staged_ledger_diff = [`Staged_ledger_diff] * (unit, _) Truth.t - constraint 'protocol_versions = [`Protocol_versions] * (unit, _) Truth.t + [ `Frontier_dependencies ] * (unit, _) Truth.t + constraint 'staged_ledger_diff = [ `Staged_ledger_diff ] * (unit, _) Truth.t + constraint 'protocol_versions = [ `Protocol_versions ] * (unit, _) Truth.t type fully_invalid = - ( [`Time_received] * unit Truth.false_t - , [`Genesis_state] * unit Truth.false_t - , [`Proof] * unit Truth.false_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.false_t - , [`Frontier_dependencies] * unit Truth.false_t - , [`Staged_ledger_diff] * unit Truth.false_t - , [`Protocol_versions] * unit Truth.false_t ) + ( [ `Time_received ] * unit Truth.false_t + , [ `Genesis_state ] * unit Truth.false_t + , [ `Proof ] * unit Truth.false_t + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.false_t + , [ `Frontier_dependencies ] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t + , [ `Protocol_versions ] * unit Truth.false_t ) t type fully_valid = - ( [`Time_received] * unit Truth.true_t - , [`Genesis_state] * unit Truth.true_t - , [`Proof] * unit Truth.true_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.true_t - , [`Frontier_dependencies] * unit Truth.true_t - , [`Staged_ledger_diff] * unit Truth.true_t - , [`Protocol_versions] * unit Truth.true_t ) + ( [ `Time_received ] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t + , [ `Protocol_versions ] * unit Truth.true_t ) t type ( 'time_received @@ -426,23 +427,23 @@ module Validation = struct , (`Protocol_versions, Truth.False) ) type initial_valid = - ( [`Time_received] * unit Truth.true_t - , [`Genesis_state] * unit Truth.true_t - , [`Proof] * unit Truth.true_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.true_t - , [`Frontier_dependencies] * unit Truth.false_t - , [`Staged_ledger_diff] * unit Truth.false_t - , [`Protocol_versions] * unit Truth.true_t ) + ( [ `Time_received ] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t + , [ `Protocol_versions ] * unit Truth.true_t ) t type almost_valid = - ( [`Time_received] * unit Truth.true_t - , [`Genesis_state] * unit Truth.true_t - , [`Proof] * unit Truth.true_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.true_t - , [`Frontier_dependencies] * unit Truth.true_t - , [`Staged_ledger_diff] * unit Truth.false_t - , [`Protocol_versions] * unit Truth.true_t ) + ( [ `Time_received ] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.false_t + , [ `Protocol_versions ] * unit Truth.true_t ) t let wrap t = (t, fully_invalid) @@ -506,7 +507,7 @@ module Validation = struct module Unsafe = struct let set_valid_time_received : - ( [`Time_received] * unit Truth.false_t + ( [ `Time_received ] * unit Truth.false_t , 'genesis_state , 'proof , 'delta_transition_chain @@ -514,7 +515,7 @@ module Validation = struct , 'staged_ledger_diff , 'protocol_versions ) t - -> ( [`Time_received] * unit Truth.true_t + -> ( [ `Time_received ] * unit Truth.true_t , 'genesis_state , 'proof , 'delta_transition_chain @@ -542,7 +543,7 @@ module Validation = struct let set_valid_proof : ( 'time_received , 'genesis_state - , [`Proof] * unit Truth.false_t + , [ `Proof ] * unit Truth.false_t , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff @@ -550,7 +551,7 @@ module Validation = struct t -> ( 'time_received , 'genesis_state - , [`Proof] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff @@ -575,7 +576,7 @@ module Validation = struct let set_valid_genesis_state : ( 'time_received - , [`Genesis_state] * unit Truth.false_t + , [ `Genesis_state ] * unit Truth.false_t , 'proof , 'delta_transition_chain , 'frontier_dependencies @@ -583,7 +584,7 @@ module Validation = struct , 'protocol_versions ) t -> ( 'time_received - , [`Genesis_state] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t , 'proof , 'delta_transition_chain , 'frontier_dependencies @@ -611,7 +612,7 @@ module Validation = struct ( 'time_received , 'genesis_state , 'proof - , [`Delta_transition_chain] + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.false_t , 'frontier_dependencies , 'staged_ledger_diff @@ -621,7 +622,7 @@ module Validation = struct -> ( 'time_received , 'genesis_state , 'proof - , [`Delta_transition_chain] + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t , 'frontier_dependencies , 'staged_ledger_diff @@ -651,7 +652,7 @@ module Validation = struct , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.false_t + , [ `Frontier_dependencies ] * unit Truth.false_t , 'staged_ledger_diff , 'protocol_versions ) t @@ -659,7 +660,7 @@ module Validation = struct , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t , 'staged_ledger_diff , 'protocol_versions ) t = function @@ -686,7 +687,7 @@ module Validation = struct , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t , 'protocol_versions ) t -> ( 'time_received @@ -694,7 +695,7 @@ module Validation = struct , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t , 'protocol_versions ) t = function | ( time_received @@ -721,7 +722,7 @@ module Validation = struct , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff - , [`Protocol_versions] * unit Truth.false_t ) + , [ `Protocol_versions ] * unit Truth.false_t ) t -> ( 'time_received , 'genesis_state @@ -729,7 +730,7 @@ module Validation = struct , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff - , [`Protocol_versions] * unit Truth.true_t ) + , [ `Protocol_versions ] * unit Truth.true_t ) t = function | ( time_received , genesis_state @@ -804,13 +805,13 @@ let validate_proofs tvs ~verifier = let transition = With_hash.data t in Blockchain_snark.Blockchain.create ~state:(protocol_state transition) - ~proof:(protocol_state_proof transition) )) + ~proof:(protocol_state_proof transition))) with | Ok verified -> if verified then Ok (List.map tvs ~f:(fun (t, validation) -> - (t, Validation.Unsafe.set_valid_proof validation) )) + (t, Validation.Unsafe.set_valid_proof validation))) else Error `Invalid_proof | Error e -> Error (`Verifier_error e) @@ -823,14 +824,12 @@ let validate_delta_transition_chain (t, validation) = with | Some hashes -> Ok - ( t - , Validation.Unsafe.set_valid_delta_transition_chain validation hashes - ) + (t, Validation.Unsafe.set_valid_delta_transition_chain validation hashes) | None -> Error `Invalid_delta_transition_chain_proof let validate_protocol_versions (t, validation) = - let {valid_current; valid_next; matches_daemon} = + let { valid_current; valid_next; matches_daemon } = protocol_version_status (With_hash.data t) in if not (valid_current && valid_next) then Error `Invalid_protocol_version @@ -922,7 +921,7 @@ module Initial_validated = struct (external_transition, State_hash.t) With_hash.t * Validation.initial_valid type nonrec protocol_version_status = protocol_version_status = - {valid_current: bool; valid_next: bool; matches_daemon: bool} + { valid_current : bool; valid_next : bool; matches_daemon : bool } include With_validation end @@ -932,7 +931,7 @@ module Almost_validated = struct (external_transition, State_hash.t) With_hash.t * Validation.almost_valid type nonrec protocol_version_status = protocol_version_status = - {valid_current: bool; valid_next: bool; matches_daemon: bool} + { valid_current : bool; valid_next : bool; matches_daemon : bool } include With_validation end @@ -943,14 +942,14 @@ module Validated = struct module V1 = struct type t = (external_transition, State_hash.t) With_hash.t - * ( [`Time_received] * (unit, Truth.True.t) Truth.t - , [`Genesis_state] * (unit, Truth.True.t) Truth.t - , [`Proof] * (unit, Truth.True.t) Truth.t - , [`Delta_transition_chain] + * ( [ `Time_received ] * (unit, Truth.True.t) Truth.t + , [ `Genesis_state ] * (unit, Truth.True.t) Truth.t + , [ `Proof ] * (unit, Truth.True.t) Truth.t + , [ `Delta_transition_chain ] * (State_hash.t Non_empty_list.t, Truth.True.t) Truth.t - , [`Frontier_dependencies] * (unit, Truth.True.t) Truth.t - , [`Staged_ledger_diff] * (unit, Truth.True.t) Truth.t - , [`Protocol_versions] * (unit, Truth.True.t) Truth.t ) + , [ `Frontier_dependencies ] * (unit, Truth.True.t) Truth.t + , [ `Staged_ledger_diff ] * (unit, Truth.True.t) Truth.t + , [ `Protocol_versions ] * (unit, Truth.True.t) Truth.t ) Validation.t let to_latest = Fn.id @@ -958,7 +957,7 @@ module Validated = struct module Erased = struct (* if this type receives a new version, that changes the serialization of the type `t', so that type must also get a new version - *) + *) [%%versioned module Stable = struct module V1 = struct @@ -1034,7 +1033,7 @@ module Validated = struct end] type nonrec protocol_version_status = protocol_version_status = - {valid_current: bool; valid_next: bool; matches_daemon: bool} + { valid_current : bool; valid_next : bool; matches_daemon : bool } [%%define_locally Stable.Latest. @@ -1083,7 +1082,7 @@ module Validated = struct let (`If_this_is_used_it_should_have_a_comment_justifying_it c) = User_command.to_valid_unsafe x.data in - {x with data= c} ) + { x with data = c }) end let genesis ~precomputed_values = @@ -1098,17 +1097,17 @@ let genesis ~precomputed_values = create ~protocol_state (* We pass a dummy proof here, with the understanding that it will - never be validated except as part of the snark for the first - block produced (where we will explicitly generate the genesis - proof). - *) + never be validated except as part of the snark for the first + block produced (where we will explicitly generate the genesis + proof). + *) ~protocol_state_proof:Proof.blockchain_dummy ~staged_ledger_diff:empty_diff ~validation_callback: (Mina_net2.Validation_callback.create_without_expiration ()) ~delta_transition_chain_proof: (Protocol_state.previous_state_hash protocol_state, []) - () )) + ())) in transition @@ -1167,7 +1166,8 @@ struct [ ( "selection_context" , `String "External_transition.Transition_frontier_validation.validate_frontier_dependencies" - ) ]) + ) + ]) ~existing:(With_hash.map ~f:consensus_state root_transition) ~candidate:(With_hash.map ~f:consensus_state t) ) ~error:`Not_selected_over_frontier_root @@ -1186,13 +1186,13 @@ module Staged_ledger_validation = struct Fn.compose statement_target statement let validate_staged_ledger_diff : - ?skip_staged_ledger_verification:[`All | `Proofs] + ?skip_staged_ledger_verification:[ `All | `Proofs ] -> ( 'time_received , 'genesis_state , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t , 'protocol_versions ) Validation.with_transition -> logger:Logger.t @@ -1200,17 +1200,17 @@ module Staged_ledger_validation = struct -> verifier:Verifier.t -> parent_staged_ledger:Staged_ledger.t -> parent_protocol_state:Protocol_state.value - -> ( [`Just_emitted_a_proof of bool] + -> ( [ `Just_emitted_a_proof of bool ] * [ `External_transition_with_validation of ( 'time_received , 'genesis_state , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t , 'protocol_versions ) Validation.with_transition ] - * [`Staged_ledger of Staged_ledger.t] + * [ `Staged_ledger of Staged_ledger.t ] , [ `Invalid_staged_ledger_diff of [ `Incorrect_target_staged_ledger_hash | `Incorrect_target_snarked_ledger_hash ] @@ -1250,7 +1250,7 @@ module Staged_ledger_validation = struct , body_hash )) ~coinbase_receiver ~supercharge_coinbase |> Deferred.Result.map_error ~f:(fun e -> - `Staged_ledger_application_failed e ) + `Staged_ledger_application_failed e) in [%log debug] ~metadata: @@ -1282,7 +1282,8 @@ module Staged_ledger_validation = struct (not (Frozen_ledger_hash.equal target_ledger_hash (Blockchain_state.snarked_ledger_hash blockchain_state))) - `Incorrect_target_snarked_ledger_hash ] + `Incorrect_target_snarked_ledger_hash + ] in Deferred.return ( match maybe_errors with diff --git a/src/lib/mina_transition/external_transition_intf.ml b/src/lib/mina_transition/external_transition_intf.ml index eb70d7aace2..e12a2a7524a 100644 --- a/src/lib/mina_transition/external_transition_intf.ml +++ b/src/lib/mina_transition/external_transition_intf.ml @@ -8,7 +8,7 @@ module type External_transition_common_intf = sig type t type protocol_version_status = - {valid_current: bool; valid_next: bool; matches_daemon: bool} + { valid_current : bool; valid_next : bool; matches_daemon : bool } val protocol_version_status : t -> protocol_version_status @@ -95,12 +95,13 @@ module type S = sig end type t = - { scheduled_time: Block_time.Time.t - ; protocol_state: Protocol_state.value - ; protocol_state_proof: Proof.t - ; staged_ledger_diff: Staged_ledger_diff.t - ; delta_transition_chain_proof: - Frozen_ledger_hash.t * Frozen_ledger_hash.t list } + { scheduled_time : Block_time.Time.t + ; protocol_state : Protocol_state.value + ; protocol_state_proof : Proof.t + ; staged_ledger_diff : Staged_ledger_diff.t + ; delta_transition_chain_proof : + Frozen_ledger_hash.t * Frozen_ledger_hash.t list + } [@@deriving sexp, yojson] [%%versioned: @@ -109,13 +110,14 @@ module type S = sig module V1 : sig type nonrec t = t = - { scheduled_time: Block_time.Stable.V1.t - ; protocol_state: Protocol_state.Value.Stable.V1.t - ; protocol_state_proof: Mina_base.Proof.Stable.V1.t - ; staged_ledger_diff: Staged_ledger_diff.Stable.V1.t - ; delta_transition_chain_proof: + { scheduled_time : Block_time.Stable.V1.t + ; protocol_state : Protocol_state.Value.Stable.V1.t + ; protocol_state_proof : Mina_base.Proof.Stable.V1.t + ; staged_ledger_diff : Staged_ledger_diff.Stable.V1.t + ; delta_transition_chain_proof : Frozen_ledger_hash.Stable.V1.t - * Frozen_ledger_hash.Stable.V1.t list } + * Frozen_ledger_hash.Stable.V1.t list + } val to_latest : t -> t end @@ -141,58 +143,59 @@ module type S = sig * 'frontier_dependencies * 'staged_ledger_diff * 'protocol_versions - constraint 'time_received = [`Time_received] * (unit, _) Truth.t - constraint 'genesis_state = [`Genesis_state] * (unit, _) Truth.t - constraint 'proof = [`Proof] * (unit, _) Truth.t + constraint 'time_received = [ `Time_received ] * (unit, _) Truth.t + constraint 'genesis_state = [ `Genesis_state ] * (unit, _) Truth.t + constraint 'proof = [ `Proof ] * (unit, _) Truth.t constraint 'delta_transition_chain = - [`Delta_transition_chain] * (State_hash.t Non_empty_list.t, _) Truth.t + [ `Delta_transition_chain ] * (State_hash.t Non_empty_list.t, _) Truth.t constraint 'frontier_dependencies = - [`Frontier_dependencies] * (unit, _) Truth.t + [ `Frontier_dependencies ] * (unit, _) Truth.t constraint 'staged_ledger_diff = - [`Staged_ledger_diff] * (unit, _) Truth.t - constraint 'protocol_versions = [`Protocol_versions] * (unit, _) Truth.t + [ `Staged_ledger_diff ] * (unit, _) Truth.t + constraint 'protocol_versions = [ `Protocol_versions ] * (unit, _) Truth.t type fully_invalid = - ( [`Time_received] * unit Truth.false_t - , [`Genesis_state] * unit Truth.false_t - , [`Proof] * unit Truth.false_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.false_t - , [`Frontier_dependencies] * unit Truth.false_t - , [`Staged_ledger_diff] * unit Truth.false_t - , [`Protocol_versions] * unit Truth.false_t ) + ( [ `Time_received ] * unit Truth.false_t + , [ `Genesis_state ] * unit Truth.false_t + , [ `Proof ] * unit Truth.false_t + , [ `Delta_transition_chain ] + * State_hash.t Non_empty_list.t Truth.false_t + , [ `Frontier_dependencies ] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t + , [ `Protocol_versions ] * unit Truth.false_t ) t type fully_valid = - ( [`Time_received] * unit Truth.true_t - , [`Genesis_state] * unit Truth.true_t - , [`Proof] * unit Truth.true_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.true_t - , [`Frontier_dependencies] * unit Truth.true_t - , [`Staged_ledger_diff] * unit Truth.true_t - , [`Protocol_versions] * unit Truth.true_t ) + ( [ `Time_received ] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t + , [ `Protocol_versions ] * unit Truth.true_t ) t type initial_valid = - ( [`Time_received] * unit Truth.true_t - , [`Genesis_state] * unit Truth.true_t - , [`Proof] * unit Truth.true_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.true_t - , [`Frontier_dependencies] * unit Truth.false_t - , [`Staged_ledger_diff] * unit Truth.false_t - , [`Protocol_versions] * unit Truth.true_t ) + ( [ `Time_received ] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t + , [ `Protocol_versions ] * unit Truth.true_t ) t type almost_valid = - ( [`Time_received] * unit Truth.true_t - , [`Genesis_state] * unit Truth.true_t - , [`Proof] * unit Truth.true_t - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.true_t - , [`Frontier_dependencies] * unit Truth.true_t - , [`Staged_ledger_diff] * unit Truth.false_t - , [`Protocol_versions] * unit Truth.true_t ) + ( [ `Time_received ] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.false_t + , [ `Protocol_versions ] * unit Truth.true_t ) t type ( 'time_received @@ -223,7 +226,7 @@ module type S = sig ( 'time_received , 'genesis_state , 'proof - , [`Delta_transition_chain] + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t , 'frontier_dependencies , 'staged_ledger_diff @@ -236,7 +239,7 @@ module type S = sig , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t , 'staged_ledger_diff , 'protocol_versions ) with_transition @@ -244,7 +247,7 @@ module type S = sig , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.false_t + , [ `Frontier_dependencies ] * unit Truth.false_t , 'staged_ledger_diff , 'protocol_versions ) with_transition @@ -255,7 +258,7 @@ module type S = sig , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t , 'protocol_versions ) with_transition -> ( 'time_received @@ -263,7 +266,7 @@ module type S = sig , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t , 'protocol_versions ) with_transition @@ -292,8 +295,7 @@ module type S = sig module Initial_validated : sig type t = - (external_transition, State_hash.t) With_hash.t - * Validation.initial_valid + (external_transition, State_hash.t) With_hash.t * Validation.initial_valid [@@deriving compare] include External_transition_common_intf with type t := t @@ -314,13 +316,11 @@ module type S = sig val erase : t - -> ( Stable.Latest.t - , State_hash.Stable.Latest.t ) - With_hash.Stable.Latest.t + -> (Stable.Latest.t, State_hash.Stable.Latest.t) With_hash.Stable.Latest.t * State_hash.Stable.Latest.t Non_empty_list.Stable.Latest.t val create_unsafe : - external_transition -> [`I_swear_this_is_safe_see_my_comment of t] + external_transition -> [ `I_swear_this_is_safe_see_my_comment of t ] include External_transition_base_intf with type t := t @@ -358,8 +358,8 @@ module type S = sig val timestamp : t -> Block_time.t val skip_time_received_validation : - [`This_transition_was_not_received_via_gossip] - -> ( [`Time_received] * unit Truth.false_t + [ `This_transition_was_not_received_via_gossip ] + -> ( [ `Time_received ] * unit Truth.false_t , 'genesis_state , 'proof , 'delta_transition_chain @@ -367,7 +367,7 @@ module type S = sig , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition - -> ( [`Time_received] * unit Truth.true_t + -> ( [ `Time_received ] * unit Truth.true_t , 'genesis_state , 'proof , 'delta_transition_chain @@ -378,7 +378,7 @@ module type S = sig val validate_time_received : precomputed_values:Precomputed_values.t - -> ( [`Time_received] * unit Truth.false_t + -> ( [ `Time_received ] * unit Truth.false_t , 'genesis_state , 'proof , 'delta_transition_chain @@ -387,7 +387,7 @@ module type S = sig , 'protocol_versions ) Validation.with_transition -> time_received:Block_time.t - -> ( ( [`Time_received] * unit Truth.true_t + -> ( ( [ `Time_received ] * unit Truth.true_t , 'genesis_state , 'proof , 'delta_transition_chain @@ -395,14 +395,14 @@ module type S = sig , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition - , [> `Invalid_time_received of [`Too_early | `Too_late of int64]] ) + , [> `Invalid_time_received of [ `Too_early | `Too_late of int64 ] ] ) Result.t val skip_proof_validation : - [`This_transition_was_generated_internally] + [ `This_transition_was_generated_internally ] -> ( 'time_received , 'genesis_state - , [`Proof] * unit Truth.false_t + , [ `Proof ] * unit Truth.false_t , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff @@ -410,7 +410,7 @@ module type S = sig Validation.with_transition -> ( 'time_received , 'genesis_state - , [`Proof] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff @@ -418,11 +418,11 @@ module type S = sig Validation.with_transition val skip_delta_transition_chain_validation : - [`This_transition_was_not_received_via_gossip] + [ `This_transition_was_not_received_via_gossip ] -> ( 'time_received , 'genesis_state , 'proof - , [`Delta_transition_chain] + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.false_t , 'frontier_dependencies , 'staged_ledger_diff @@ -431,16 +431,17 @@ module type S = sig -> ( 'time_received , 'genesis_state , 'proof - , [`Delta_transition_chain] * State_hash.t Non_empty_list.t Truth.true_t + , [ `Delta_transition_chain ] + * State_hash.t Non_empty_list.t Truth.true_t , 'frontier_dependencies , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition val skip_genesis_protocol_state_validation : - [`This_transition_was_generated_internally] + [ `This_transition_was_generated_internally ] -> ( 'time_received - , [`Genesis_state] * unit Truth.false_t + , [ `Genesis_state ] * unit Truth.false_t , 'proof , 'delta_transition_chain , 'frontier_dependencies @@ -448,7 +449,7 @@ module type S = sig , 'protocol_versions ) Validation.with_transition -> ( 'time_received - , [`Genesis_state] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t , 'proof , 'delta_transition_chain , 'frontier_dependencies @@ -459,7 +460,7 @@ module type S = sig val validate_genesis_protocol_state : genesis_state_hash:State_hash.t -> ( 'time_received - , [`Genesis_state] * unit Truth.false_t + , [ `Genesis_state ] * unit Truth.false_t , 'proof , 'delta_transition_chain , 'frontier_dependencies @@ -467,20 +468,20 @@ module type S = sig , 'protocol_versions ) Validation.with_transition -> ( ( 'time_received - , [`Genesis_state] * unit Truth.true_t + , [ `Genesis_state ] * unit Truth.true_t , 'proof , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition - , [> `Invalid_genesis_protocol_state] ) + , [> `Invalid_genesis_protocol_state ] ) Result.t val validate_proofs : ( 'time_received , 'genesis_state - , [`Proof] * unit Truth.false_t + , [ `Proof ] * unit Truth.false_t , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff @@ -490,21 +491,21 @@ module type S = sig -> verifier:Verifier.t -> ( ( 'time_received , 'genesis_state - , [`Proof] * unit Truth.true_t + , [ `Proof ] * unit Truth.true_t , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition list - , [> `Invalid_proof | `Verifier_error of Error.t] ) + , [> `Invalid_proof | `Verifier_error of Error.t ] ) Deferred.Result.t val validate_delta_transition_chain : ( 'time_received , 'genesis_state , 'proof - , [`Delta_transition_chain] + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.false_t , 'frontier_dependencies , 'staged_ledger_diff @@ -513,13 +514,13 @@ module type S = sig -> ( ( 'time_received , 'genesis_state , 'proof - , [`Delta_transition_chain] + , [ `Delta_transition_chain ] * State_hash.t Non_empty_list.t Truth.true_t , 'frontier_dependencies , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition - , [> `Invalid_delta_transition_chain_proof] ) + , [> `Invalid_delta_transition_chain_proof ] ) Result.t val validate_protocol_versions : @@ -529,7 +530,7 @@ module type S = sig , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff - , [`Protocol_versions] * unit Truth.false_t ) + , [ `Protocol_versions ] * unit Truth.false_t ) Validation.with_transition -> ( ( 'time_received , 'genesis_state @@ -537,9 +538,9 @@ module type S = sig , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff - , [`Protocol_versions] * unit Truth.true_t ) + , [ `Protocol_versions ] * unit Truth.true_t ) Validation.with_transition - , [> `Invalid_protocol_version | `Mismatched_protocol_version] ) + , [> `Invalid_protocol_version | `Mismatched_protocol_version ] ) Result.t (* This functor is necessary to break the dependency cycle between the Transition_fronter and the External_transition *) @@ -561,7 +562,7 @@ module type S = sig , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.false_t + , [ `Frontier_dependencies ] * unit Truth.false_t , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition @@ -572,7 +573,7 @@ module type S = sig , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition @@ -589,7 +590,7 @@ module type S = sig , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.false_t + , [ `Frontier_dependencies ] * unit Truth.false_t , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition @@ -597,19 +598,19 @@ module type S = sig , 'genesis_state , 'proof , 'delta_transition_chain - , [`Frontier_dependencies] * unit Truth.true_t + , [ `Frontier_dependencies ] * unit Truth.true_t , 'staged_ledger_diff , 'protocol_versions ) Validation.with_transition val validate_staged_ledger_hash : - [`Staged_ledger_already_materialized of Staged_ledger_hash.t] + [ `Staged_ledger_already_materialized of Staged_ledger_hash.t ] -> ( 'time_received , 'genesis_state , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t , 'protocol_versions ) Validation.with_transition -> ( ( 'time_received @@ -617,20 +618,20 @@ module type S = sig , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t , 'protocol_versions ) Validation.with_transition - , [> `Staged_ledger_hash_mismatch] ) + , [> `Staged_ledger_hash_mismatch ] ) Result.t val skip_staged_ledger_diff_validation : - [`This_transition_has_a_trusted_staged_ledger] + [ `This_transition_has_a_trusted_staged_ledger ] -> ( 'time_received , 'genesis_state , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t , 'protocol_versions ) Validation.with_transition -> ( 'time_received @@ -638,19 +639,19 @@ module type S = sig , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t , 'protocol_versions ) Validation.with_transition val skip_protocol_versions_validation : - [`This_transition_has_valid_protocol_versions] + [ `This_transition_has_valid_protocol_versions ] -> ( 'time_received , 'genesis_state , 'proof , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff - , [`Protocol_versions] * unit Truth.false_t ) + , [ `Protocol_versions ] * unit Truth.false_t ) Validation.with_transition -> ( 'time_received , 'genesis_state @@ -658,18 +659,18 @@ module type S = sig , 'delta_transition_chain , 'frontier_dependencies , 'staged_ledger_diff - , [`Protocol_versions] * unit Truth.true_t ) + , [ `Protocol_versions ] * unit Truth.true_t ) Validation.with_transition module Staged_ledger_validation : sig val validate_staged_ledger_diff : - ?skip_staged_ledger_verification:[`All | `Proofs] + ?skip_staged_ledger_verification:[ `All | `Proofs ] -> ( 'time_received , 'genesis_state , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.false_t + , [ `Staged_ledger_diff ] * unit Truth.false_t , 'protocol_versions ) Validation.with_transition -> logger:Logger.t @@ -677,17 +678,17 @@ module type S = sig -> verifier:Verifier.t -> parent_staged_ledger:Staged_ledger.t -> parent_protocol_state:Protocol_state.value - -> ( [`Just_emitted_a_proof of bool] + -> ( [ `Just_emitted_a_proof of bool ] * [ `External_transition_with_validation of ( 'time_received , 'genesis_state , 'proof , 'delta_transition_chain , 'frontier_dependencies - , [`Staged_ledger_diff] * unit Truth.true_t + , [ `Staged_ledger_diff ] * unit Truth.true_t , 'protocol_versions ) Validation.with_transition ] - * [`Staged_ledger of Staged_ledger.t] + * [ `Staged_ledger of Staged_ledger.t ] , [ `Invalid_staged_ledger_diff of [ `Incorrect_target_staged_ledger_hash | `Incorrect_target_snarked_ledger_hash ] diff --git a/src/lib/mina_transition/internal_transition.ml b/src/lib/mina_transition/internal_transition.ml index 1df6e3ada24..d8f9c5d9c81 100644 --- a/src/lib/mina_transition/internal_transition.ml +++ b/src/lib/mina_transition/internal_transition.ml @@ -4,15 +4,14 @@ open Mina_state module type S = sig type t [@@deriving sexp, to_yojson] - module Stable : - sig - module V1 : sig - type t [@@deriving sexp, to_yojson, bin_io] - end - - module Latest = V1 + module Stable : sig + module V1 : sig + type t [@@deriving sexp, to_yojson, bin_io] end - with type V1.t = t + + module Latest = V1 + end + with type V1.t = t val create : snark_transition:Snark_transition.Value.t @@ -36,10 +35,11 @@ module Stable = struct module V1 = struct type t = - { snark_transition: Snark_transition.Value.Stable.V1.t - ; ledger_proof: Ledger_proof.Stable.V1.t option - ; prover_state: Consensus.Data.Prover_state.Stable.V1.t - ; staged_ledger_diff: Staged_ledger_diff.Stable.V1.t } + { snark_transition : Snark_transition.Value.Stable.V1.t + ; ledger_proof : Ledger_proof.Stable.V1.t option + ; prover_state : Consensus.Data.Prover_state.Stable.V1.t + ; staged_ledger_diff : Staged_ledger_diff.Stable.V1.t + } let to_latest = Fn.id end @@ -47,14 +47,16 @@ end] (* bin_io, version omitted *) type t = Stable.Latest.t = - { snark_transition: Snark_transition.Value.t - ; ledger_proof: Ledger_proof.t option - ; prover_state: Consensus.Data.Prover_state.t - ; staged_ledger_diff: Staged_ledger_diff.t } + { snark_transition : Snark_transition.Value.t + ; ledger_proof : Ledger_proof.t option + ; prover_state : Consensus.Data.Prover_state.t + ; staged_ledger_diff : Staged_ledger_diff.t + } [@@deriving sexp, fields, to_yojson] let create ~snark_transition ~ledger_proof ~prover_state ~staged_ledger_diff = { Stable.Latest.snark_transition ; ledger_proof ; staged_ledger_diff - ; prover_state } + ; prover_state + } diff --git a/src/lib/mina_user_error/mina_user_error.ml b/src/lib/mina_user_error/mina_user_error.ml index d2218baa07c..0c620c2b360 100644 --- a/src/lib/mina_user_error/mina_user_error.ml +++ b/src/lib/mina_user_error/mina_user_error.ml @@ -1,14 +1,14 @@ -exception Mina_user_error of {message: string; where: string option} +exception Mina_user_error of { message : string; where : string option } let raisef ?where = - Format.ksprintf (fun message -> raise (Mina_user_error {message; where})) + Format.ksprintf (fun message -> raise (Mina_user_error { message; where })) -let raise ?where message = raise (Mina_user_error {message; where}) +let raise ?where message = raise (Mina_user_error { message; where }) let () = Stdlib.Printexc.register_printer (fun exn -> match exn with - | Mina_user_error {message; where} -> + | Mina_user_error { message; where } -> let error = match where with | None -> @@ -26,4 +26,4 @@ FATAL ERROR %!|err} error message) | _ -> - None ) + None) diff --git a/src/lib/network_peer/envelope.ml b/src/lib/network_peer/envelope.ml index 813103ada10..6a974ac0f0f 100644 --- a/src/lib/network_peer/envelope.ml +++ b/src/lib/network_peer/envelope.ml @@ -12,13 +12,13 @@ module Sender = struct | Local -> `String "Local" | Remote p -> - `Assoc [("Remote", Peer.to_yojson p)] + `Assoc [ ("Remote", Peer.to_yojson p) ] let of_yojson (json : Yojson.Safe.t) : (t, string) Result.t = match json with | `String "Local" -> Ok Local - | `Assoc [("Remote", peer_json)] -> + | `Assoc [ ("Remote", peer_json) ] -> let open Result.Let_syntax in let%map peer = Peer.of_yojson peer_json in Remote peer @@ -68,10 +68,11 @@ module Incoming = struct Error "time_of_yojson: Expected string" type 'a t = - { data: 'a - ; sender: Sender.t - ; received_at: Time.t - [@to_yojson time_to_yojson] [@of_yojson time_of_yojson] } + { data : 'a + ; sender : Sender.t + ; received_at : Time.t + [@to_yojson time_to_yojson] [@of_yojson time_of_yojson] + } [@@deriving equal, sexp, yojson, compare] let sender t = t.sender @@ -82,18 +83,18 @@ module Incoming = struct let wrap ~data ~sender = let received_at = Time.now () in - {data; sender; received_at} + { data; sender; received_at } let wrap_peer ~data ~sender = let received_at = Time.now () in - {data; sender= Sender.of_peer sender; received_at} + { data; sender = Sender.of_peer sender; received_at } - let map ~f t = {t with data= f t.data} + let map ~f t = { t with data = f t.data } let local data = let received_at = Time.now () in let sender = Sender.Local in - {data; sender; received_at} + { data; sender; received_at } let remote_sender_exn t = match t.sender with @@ -107,5 +108,5 @@ module Incoming = struct let%bind data = gen_a in let%map sender = Sender.gen in let received_at = Time.now () in - {data; sender; received_at} + { data; sender; received_at } end diff --git a/src/lib/network_peer/envelope.mli b/src/lib/network_peer/envelope.mli index e54cb0df493..1a2dc389424 100644 --- a/src/lib/network_peer/envelope.mli +++ b/src/lib/network_peer/envelope.mli @@ -7,7 +7,7 @@ module Sender : sig end module Incoming : sig - type 'a t = {data: 'a; sender: Sender.t; received_at: Time.t} + type 'a t = { data : 'a; sender : Sender.t; received_at : Time.t } [@@deriving equal, sexp, yojson, compare] val sender : 'a t -> Sender.t diff --git a/src/lib/network_peer/network_peer.ml b/src/lib/network_peer/network_peer.ml index c92ba02e91e..0b9a2b33b13 100644 --- a/src/lib/network_peer/network_peer.ml +++ b/src/lib/network_peer/network_peer.ml @@ -2,9 +2,9 @@ module Peer = Peer module Envelope = Envelope type query_peer = - { query: + { query : 'r 'q. Peer.t -> ( Async_rpc_kernel.Versioned_rpc.Connection_with_menu.t -> 'q - -> 'r Async.Deferred.Or_error.t) -> 'q - -> 'r Async.Deferred.Or_error.t } + -> 'r Async.Deferred.Or_error.t) -> 'q -> 'r Async.Deferred.Or_error.t + } diff --git a/src/lib/network_peer/peer.ml b/src/lib/network_peer/peer.ml index 2bc44a3e920..212843327e2 100644 --- a/src/lib/network_peer/peer.ml +++ b/src/lib/network_peer/peer.ml @@ -26,9 +26,10 @@ module Stable = struct module V1 = struct type t = - { host: Core.Unix.Inet_addr.Stable.V1.t (* IPv4 or IPv6 address *) - ; libp2p_port: int (* TCP *) - ; peer_id: Id.Stable.V1.t } + { host : Core.Unix.Inet_addr.Stable.V1.t (* IPv4 or IPv6 address *) + ; libp2p_port : int (* TCP *) + ; peer_id : Id.Stable.V1.t + } [@@deriving compare, sexp] let to_latest = Fn.id @@ -41,11 +42,12 @@ module Stable = struct let hash : t -> int = Ppx_hash_lib.Std.Hash.of_fold hash_fold_t - let to_yojson {host; peer_id; libp2p_port} = + let to_yojson { host; peer_id; libp2p_port } = `Assoc [ ("host", `String (Unix.Inet_addr.to_string host)) ; ("peer_id", `String peer_id) - ; ("libp2p_port", `Int libp2p_port) ] + ; ("libp2p_port", `Int libp2p_port) + ] let of_yojson = let lift_string = function `String s -> Some s | _ -> None in @@ -61,41 +63,39 @@ module Stable = struct List.Assoc.find ls "peer_id" ~equal:String.equal >>= lift_string in let%map libp2p_port = - List.Assoc.find ls "libp2p_port" ~equal:String.equal - >>= lift_int + List.Assoc.find ls "libp2p_port" ~equal:String.equal >>= lift_int in let host = Unix.Inet_addr.of_string host_str in - {host; peer_id; libp2p_port}) + { host; peer_id; libp2p_port }) | _ -> Error "expected object" end end] type t = Stable.Latest.t = - {host: Unix.Inet_addr.Blocking_sexp.t; libp2p_port: int; peer_id: string} + { host : Unix.Inet_addr.Blocking_sexp.t; libp2p_port : int; peer_id : string } [@@deriving compare, sexp] -[%%define_locally -Stable.Latest.(of_yojson, to_yojson)] +[%%define_locally Stable.Latest.(of_yojson, to_yojson)] include Hashable.Make (Stable.Latest) include Comparable.Make_binable (Stable.Latest) -let create host ~libp2p_port ~peer_id = {host; libp2p_port; peer_id} +let create host ~libp2p_port ~peer_id = { host; libp2p_port; peer_id } let to_discovery_host_and_port t = Host_and_port.create ~host:(Unix.Inet_addr.to_string t.host) ~port:t.libp2p_port -let to_string {host; libp2p_port; peer_id} = +let to_string { host; libp2p_port; peer_id } = sprintf !"[host : %s, libp2p_port : %s, peer_id : %s]" (Unix.Inet_addr.to_string host) (Int.to_string libp2p_port) peer_id -let to_multiaddr_string {host; libp2p_port; peer_id} = +let to_multiaddr_string { host; libp2p_port; peer_id } = sprintf "/ip4/%s/tcp/%d/p2p/%s" (Unix.Inet_addr.to_string host) libp2p_port peer_id @@ -115,28 +115,31 @@ module Display = struct [@@@no_toplevel_latest_type] module V1 = struct - type t = {host: string; libp2p_port: int; peer_id: string} + type t = { host : string; libp2p_port : int; peer_id : string } [@@deriving yojson, version, sexp, fields] let to_latest = Fn.id end end] - type t = Stable.Latest.t = {host: string; libp2p_port: int; peer_id: string} + type t = Stable.Latest.t = + { host : string; libp2p_port : int; peer_id : string } [@@deriving yojson, sexp] module Fields = Stable.Latest.Fields end -let ip {host; _} = host +let ip { host; _ } = host -let to_display {host; libp2p_port; peer_id} = +let to_display { host; libp2p_port; peer_id } = Display. - { host= Unix.Inet_addr.to_string host + { host = Unix.Inet_addr.to_string host ; libp2p_port - ; peer_id= Id.to_string peer_id } + ; peer_id = Id.to_string peer_id + } -let of_display {Display.host; libp2p_port; peer_id} = - { host= Unix.Inet_addr.of_string host +let of_display { Display.host; libp2p_port; peer_id } = + { host = Unix.Inet_addr.of_string host ; libp2p_port - ; peer_id= Id.unsafe_of_string peer_id } + ; peer_id = Id.unsafe_of_string peer_id + } diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml index 0679db89166..b1459ea68d7 100644 --- a/src/lib/network_pool/batcher.ml +++ b/src/lib/network_pool/batcher.ml @@ -5,50 +5,53 @@ open Network_peer module Id = Unique_id.Int () type ('init, 'result) elt = - { id: Id.t - ; data: 'init - ; weight: int - ; res: (('result, unit) Result.t Or_error.t Ivar.t [@sexp.opaque]) } + { id : Id.t + ; data : 'init + ; weight : int + ; res : (('result, unit) Result.t Or_error.t Ivar.t[@sexp.opaque]) + } [@@deriving sexp] type ('proof, 'result) state = | Waiting - | Verifying of {out_for_verification: ('proof, 'result) elt list} + | Verifying of { out_for_verification : ('proof, 'result) elt list } [@@deriving sexp] module Q = Doubly_linked type ('init, 'partially_validated, 'result) t = - { mutable state: ('init, 'result) state - ; how_to_add: [`Insert | `Enqueue_back] - ; queue: ('init, 'result) elt Q.t - ; compare_init: ('init -> 'init -> int) option - ; logger: (Logger.t [@sexp.opaque]) - ; weight: 'init -> int - ; max_weight_per_call: int option - ; verifier: - (* The batched verifier may make partial progress on its input so that we can - save time when it is re-verified in a smaller batch in the case that a batch - fails to verify. *) - ( [`Init of 'init | `Partially_validated of 'partially_validated] list - -> [ `Valid of 'result - | `Invalid - | `Potentially_invalid of 'partially_validated ] - list - Deferred.Or_error.t) - [@sexp.opaque] } + { mutable state : ('init, 'result) state + ; how_to_add : [ `Insert | `Enqueue_back ] + ; queue : ('init, 'result) elt Q.t + ; compare_init : ('init -> 'init -> int) option + ; logger : (Logger.t[@sexp.opaque]) + ; weight : 'init -> int + ; max_weight_per_call : int option + ; verifier : + (* The batched verifier may make partial progress on its input so that we can + save time when it is re-verified in a smaller batch in the case that a batch + fails to verify. *) + [ `Init of 'init | `Partially_validated of 'partially_validated ] list + -> [ `Valid of 'result + | `Invalid + | `Potentially_invalid of 'partially_validated ] + list + Deferred.Or_error.t + [@sexp.opaque] + } [@@deriving sexp] let create ?(how_to_add = `Enqueue_back) ?logger ?compare_init ?(weight = fun _ -> 1) ?max_weight_per_call verifier = - { state= Waiting - ; queue= Q.create () + { state = Waiting + ; queue = Q.create () ; how_to_add ; compare_init ; verifier ; weight ; max_weight_per_call - ; logger= Option.value logger ~default:(Logger.create ()) } + ; logger = Option.value logger ~default:(Logger.create ()) + } let call_verifier t (ps : 'proof list) = t.verifier ps @@ -56,14 +59,15 @@ let call_verifier t (ps : 'proof list) = t.verifier ps In the average case this should show better performance. We could implement the trusted/untrusted batches from the snark pool batching RFC #4882 to further mitigate possible DoS/DDoS here*) -let rec determine_outcome : type p r partial. +let rec determine_outcome : + type p r partial. (p, r) elt list - -> [`Valid of r | `Invalid | `Potentially_invalid of partial] list + -> [ `Valid of r | `Invalid | `Potentially_invalid of partial ] list -> (p, partial, r) t -> unit Deferred.Or_error.t = fun ps res v -> (* First separate out all the known results. That information will definitely be included - in the outcome. *) + in the outcome. *) let potentially_invalid = List.filter_map (List.zip_exn ps res) ~f:(fun (elt, r) -> match r with @@ -78,14 +82,14 @@ let rec determine_outcome : type p r partial. Ivar.fill elt.res (Ok (Error ())) ; None | `Potentially_invalid new_hint -> - Some (elt, new_hint) ) + Some (elt, new_hint)) in let open Deferred.Or_error.Let_syntax in match potentially_invalid with | [] -> (* All results are known *) return () - | [({res; _}, _)] -> + | [ ({ res; _ }, _) ] -> if Ivar.is_full res then [%log' error (Logger.create ())] "Ivar.fill bug is here!" ; Ivar.fill res (Ok (Error ())) ; @@ -97,7 +101,7 @@ let rec determine_outcome : type p r partial. let%bind res_xs = call_verifier v (List.map xs ~f:(fun (_e, new_hint) -> - `Partially_validated new_hint )) + `Partially_validated new_hint)) in determine_outcome (List.map xs ~f:fst) res_xs v in @@ -128,7 +132,7 @@ let rec start_verifier : type proof partial r. (proof, partial, r) t -> unit = t.state <- Waiting else ( [%log' debug t.logger] "Verifying proofs in batch of size $num_proofs" - ~metadata:[("num_proofs", `Int (Q.length t.queue))] ; + ~metadata:[ ("num_proofs", `Int (Q.length t.queue)) ] ; let out_for_verification = let proofs = match t.max_weight_per_call with @@ -140,7 +144,7 @@ let rec start_verifier : type proof partial r. (proof, partial, r) t -> unit = match Q.first t.queue with | None -> acc - | Some ({weight; _} as proof) -> + | Some ({ weight; _ } as proof) -> if weight <= capacity then ( ignore (Q.remove_first t.queue : (proof, r) elt option) ; take (capacity - weight) (proof :: acc) ) @@ -156,32 +160,37 @@ let rec start_verifier : type proof partial r. (proof, partial, r) t -> unit = ; ( "ids" , `List (List.map - ~f:(fun {id; _} -> `Int (Id.to_int_exn id)) - out_for_verification) ) ] ; + ~f:(fun { id; _ } -> `Int (Id.to_int_exn id)) + out_for_verification) ) + ] ; let res = match%bind call_verifier t - (List.map out_for_verification ~f:(fun {data= p; _} -> `Init p)) + (List.map out_for_verification ~f:(fun { data = p; _ } -> `Init p)) with | Error e -> Deferred.return (Error e) | Ok res -> determine_outcome out_for_verification res t in - t.state <- Verifying {out_for_verification} ; + t.state <- Verifying { out_for_verification } ; upon res (fun r -> ( match r with | Ok () -> () | Error e -> List.iter out_for_verification ~f:(fun x -> - Ivar.fill_if_empty x.res (Error e) ) ) ; - start_verifier t ) ) + Ivar.fill_if_empty x.res (Error e)) ) ; + start_verifier t) ) let verify (type p r partial) (t : (p, partial, r) t) (proof : p) : (r, unit) Result.t Deferred.Or_error.t = let elt = - {id= Id.create (); data= proof; weight= t.weight proof; res= Ivar.create ()} + { id = Id.create () + ; data = proof + ; weight = t.weight proof + ; res = Ivar.create () + } in ignore ( match (t.how_to_add, t.compare_init) with @@ -202,8 +211,7 @@ let verify (type p r partial) (t : (p, partial, r) t) (proof : p) : type ('a, 'b, 'c) batcher = ('a, 'b, 'c) t [@@deriving sexp] -let compare_envelope (e1 : _ Envelope.Incoming.t) (e2 : _ Envelope.Incoming.t) - = +let compare_envelope (e1 : _ Envelope.Incoming.t) (e2 : _ Envelope.Incoming.t) = Envelope.Sender.compare e1.sender e2.sender module Transaction_pool = struct @@ -231,24 +239,24 @@ module Transaction_pool = struct type t = (diff, partial, User_command.Valid.t list) batcher [@@deriving sexp] - type input = [`Init of diff | `Partially_validated of partial] + type input = [ `Init of diff | `Partially_validated of partial ] let init_result (ds : input list) = (* We store a result for every diff in the input. *) Array.of_list_map ds ~f:(function | `Init d -> (* Initially, the status of all the transactions in a never-before-seen - diff are unknown. *) + diff are unknown. *) `In_progress (Array.of_list_map d.data ~f:(fun _ -> `Unknown)) | `Partially_validated d -> (* We've seen this diff before, so we have some information about its - transactions. *) + transactions. *) `In_progress (Array.of_list_map d ~f:(function | `Valid c -> `Valid c | `Valid_assuming x -> - `Valid_assuming x )) ) + `Valid_assuming x))) let list_of_array_map a ~f = List.init (Array.length a) ~f:(fun i -> f a.(i)) @@ -275,7 +283,7 @@ module Transaction_pool = struct None | `Valid_assuming (v, _) -> (* TODO: This rechecks the signatures on snapp transactions... oh well for now *) - Some ((i, j), v) ) ) + Some ((i, j), v))) in let%map res = (* Verify the unknowns *) @@ -287,42 +295,42 @@ module Transaction_pool = struct match r with | `Invalid -> (* A diff is invalid is any of the transactions it contains are invalid. - Invalidate the whole diff that this transaction comes from. *) + Invalidate the whole diff that this transaction comes from. *) result.(i) <- `Invalid | `Valid_assuming xs -> ( - match result.(i) with - | `Invalid -> - (* If this diff has already been declared invalid, knowing that one of its - transactions is partially valid is not useful. *) - () - | `In_progress a -> - (* The diff may still be valid. *) - a.(j) <- `Valid_assuming (v, xs) ) + match result.(i) with + | `Invalid -> + (* If this diff has already been declared invalid, knowing that one of its + transactions is partially valid is not useful. *) + () + | `In_progress a -> + (* The diff may still be valid. *) + a.(j) <- `Valid_assuming (v, xs) ) | `Valid c -> ( - (* Similar to the above. *) - match result.(i) with - | `Invalid -> - () - | `In_progress a -> - a.(j) <- `Valid c ) ) ; + (* Similar to the above. *) + match result.(i) with + | `Invalid -> + () + | `In_progress a -> + a.(j) <- `Valid c )) ; list_of_array_map result ~f:(function | `Invalid -> `Invalid | `In_progress a -> ( - (* If the diff is all valid, we're done. If not, we return a partial - result. *) - match all_valid a with - | Some res -> - `Valid res - | None -> - `Potentially_invalid - (list_of_array_map a ~f:(function - | `Unknown -> - assert false - | `Valid c -> - `Valid c - | `Valid_assuming (v, xs) -> - `Valid_assuming (v, xs) )) ) ) ) + (* If the diff is all valid, we're done. If not, we return a partial + result. *) + match all_valid a with + | Some res -> + `Valid res + | None -> + `Potentially_invalid + (list_of_array_map a ~f:(function + | `Unknown -> + assert false + | `Valid c -> + `Valid c + | `Valid_assuming (v, xs) -> + `Valid_assuming (v, xs))) ))) let verify (t : t) = verify t end @@ -345,7 +353,7 @@ module Snark_pool = struct create (* TODO: Make this a proper config detail once we have data on what a good default would be. - *) + *) ~max_weight_per_call: (Option.value_map ~default:1000 ~f:Int.of_string (Sys.getenv_opt "MAX_VERIFIER_BATCH_SIZE")) @@ -355,8 +363,7 @@ module Snark_pool = struct List.concat_map ps0 ~f:(function | `Partially_validated env | `Init env -> let ps, message = env.data in - One_or_two.map ps ~f:(fun p -> (p, message)) - |> One_or_two.to_list ) + One_or_two.map ps ~f:(fun p -> (p, message)) |> One_or_two.to_list) in let open Deferred.Or_error.Let_syntax in match%map Verifier.verify_transaction_snarks verifier ps with @@ -364,7 +371,7 @@ module Snark_pool = struct List.map ps0 ~f:(fun _ -> `Valid ()) | false -> List.map ps0 ~f:(function `Partially_validated env | `Init env -> - `Potentially_invalid env ) ) + `Potentially_invalid env)) module Work_key = struct module T = struct @@ -376,7 +383,7 @@ module Snark_pool = struct let of_proof_envelope t = Envelope.Incoming.map t ~f:(fun (ps, message) -> - (One_or_two.map ~f:Ledger_proof.statement ps, message) ) + (One_or_two.map ~f:Ledger_proof.statement ps, message)) include T include Comparable.Make (T) @@ -386,7 +393,7 @@ module Snark_pool = struct let open Deferred.Or_error.Let_syntax in let%map invalid = Deferred.Or_error.List.filter_map ps ~f:(fun p -> - match%map verify t p with true -> None | false -> Some p ) + match%map verify t p with true -> None | false -> Some p) in `Invalid (Work_key.Set.of_list (List.map invalid ~f:Work_key.of_proof_envelope)) @@ -407,7 +414,7 @@ module Snark_pool = struct Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) let gen_proofs = let open Quickcheck.Generator.Let_syntax in @@ -415,7 +422,7 @@ module Snark_pool = struct let%bind statements = One_or_two.gen Transaction_snark.Statement.gen in - let%map {fee; prover} = Fee_with_prover.gen in + let%map { fee; prover } = Fee_with_prover.gen in let message = Mina_base.Sok_message.create ~fee ~prover in ( One_or_two.map statements ~f:Ledger_proof.For_tests.mk_dummy_proof , message ) @@ -428,7 +435,7 @@ module Snark_pool = struct let%bind statements = One_or_two.gen Transaction_snark.Statement.gen in - let%bind {fee; prover} = Fee_with_prover.gen in + let%bind { fee; prover } = Fee_with_prover.gen in let%map invalid_prover = Quickcheck.Generator.filter Signature_lib.Public_key.Compressed.gen ~f:(Signature_lib.Public_key.Compressed.( <> ) prover) @@ -439,7 +446,7 @@ module Snark_pool = struct let message = Mina_base.Sok_message.create ~fee ~prover in ( One_or_two.map statements ~f:(fun statement -> Ledger_proof.create ~statement ~sok_digest - ~proof:Proof.transaction_dummy ) + ~proof:Proof.transaction_dummy) , message ) in Envelope.Incoming.gen data_gen @@ -449,10 +456,10 @@ module Snark_pool = struct Deferred.List.iter proof_lists ~f:(fun (invalid_proofs, proof_list) -> let%map r = verify' batcher proof_list in let (`Invalid ps) = Or_error.ok_exn r in - assert (Work_key.Set.equal ps invalid_proofs) ) + assert (Work_key.Set.equal ps invalid_proofs)) - let gen ~(valid_count : [`Any | `Count of int]) - ~(invalid_count : [`Any | `Count of int]) = + let gen ~(valid_count : [ `Any | `Count of int ]) + ~(invalid_count : [ `Any | `Count of int ]) = let open Quickcheck.Generator.Let_syntax in let gen_with_count count gen = match count with @@ -468,27 +475,27 @@ module Snark_pool = struct in List.map lst ~f:(fun (valid, invalid) -> ( Work_key.(Set.of_list (List.map ~f:of_proof_envelope invalid)) - , List.permute valid @ invalid ) ) + , List.permute valid @ invalid )) let%test_unit "all valid proofs" = Quickcheck.test ~trials:10 (gen ~valid_count:`Any ~invalid_count:(`Count 0)) ~f:(fun proof_lists -> Async.Thread_safe.block_on_async_exn (fun () -> - run_test proof_lists ) ) + run_test proof_lists)) let%test_unit "some invalid proofs" = Quickcheck.test ~trials:10 (gen ~valid_count:`Any ~invalid_count:`Any) ~f:(fun proof_lists -> Async.Thread_safe.block_on_async_exn (fun () -> - run_test proof_lists ) ) + run_test proof_lists)) let%test_unit "all invalid proofs" = Quickcheck.test ~trials:10 (gen ~valid_count:(`Count 0) ~invalid_count:`Any) ~f:(fun proof_lists -> Async.Thread_safe.block_on_async_exn (fun () -> - run_test proof_lists ) ) + run_test proof_lists)) end ) end diff --git a/src/lib/network_pool/batcher.mli b/src/lib/network_pool/batcher.mli index 41de07c17cc..04e3176d0bb 100644 --- a/src/lib/network_pool/batcher.mli +++ b/src/lib/network_pool/batcher.mli @@ -17,12 +17,12 @@ end type ('initial, 'partially_validated, 'result) t val create : - ?how_to_add:[`Insert | `Enqueue_back] + ?how_to_add:[ `Insert | `Enqueue_back ] -> ?logger:Logger.t -> ?compare_init:('init -> 'init -> int) -> ?weight:('init -> int) -> ?max_weight_per_call:int - -> ( [`Init of 'init | `Partially_validated of 'partially_validated] list + -> ( [ `Init of 'init | `Partially_validated of 'partially_validated ] list -> [ `Valid of 'result | `Invalid | `Potentially_invalid of 'partially_validated ] diff --git a/src/lib/network_pool/f_sequence.ml b/src/lib/network_pool/f_sequence.ml index dbca4a3244e..7f892ed2c2f 100644 --- a/src/lib/network_pool/f_sequence.ml +++ b/src/lib/network_pool/f_sequence.ml @@ -42,7 +42,7 @@ module Digit = struct 1 | Three (x1, x2, x3), Three (y1, y2, y3) -> fallthrough (cmp_e x1 y1) ~f:(fun () -> - fallthrough (cmp_e x2 y2) ~f:(fun () -> cmp_e x3 y3) ) + fallthrough (cmp_e x2 y2) ~f:(fun () -> cmp_e x3 y3)) | Three _, _ -> -1 | _, Three _ -> @@ -50,7 +50,7 @@ module Digit = struct | Four (x1, x2, x3, x4), Four (y1, y2, y3, y4) -> fallthrough (cmp_e x1 y1) ~f:(fun () -> fallthrough (cmp_e x2 y2) ~f:(fun () -> - fallthrough (cmp_e x3 y3) ~f:(fun () -> cmp_e x4 y4) ) ) + fallthrough (cmp_e x3 y3) ~f:(fun () -> cmp_e x4 y4))) | Four _, _ -> . | _, Four _ -> @@ -61,7 +61,8 @@ module Digit = struct makes the typechecker understand existentials under or-patterns isn't in our compiler version. (ocaml/ocaml#2110) *) - let addable_elim : type a r. + let addable_elim : + type a r. ((addable, r, 'e) t -> 'o) (** Function handling addable case *) -> ((not_addable, removable, 'e) t -> 'o) (** Function handling non-addable case *) @@ -70,7 +71,8 @@ module Digit = struct fun f g t -> match t with One _ -> f t | Two _ -> f t | Three _ -> f t | Four _ -> g t - let removable_elim : type a r. + let removable_elim : + type a r. ((a, removable, 'e) t -> 'o) (** Function handling removable case*) -> ((addable, not_removable, 'e) t -> 'o) (** Function handling non-removable case *) @@ -173,7 +175,8 @@ module Digit = struct [ Mk_any_ar (One a) ; Mk_any_ar (Two (a, b)) ; Mk_any_ar (Three (a, b, c)) - ; Mk_any_ar (Four (a, b, c, d)) ] + ; Mk_any_ar (Four (a, b, c, d)) + ] (** Given a measurement function, compute the total measure of a digit. See below for an explanation of what measure is. @@ -182,7 +185,8 @@ module Digit = struct fun measure' -> foldl (fun m e -> m + measure' e) 0 (** Split a digit by measure. Again see below. *) - let split : type a r. + let split : + type a r. ('e -> int) -> int -> int @@ -192,7 +196,8 @@ module Digit = struct (* Addable inputs go to addable outputs, but non-addable inputs may go to either. We use a separate function for addables to represent this and minimizing the amount of Obj.magicking we need to do. *) - let rec split_addable : type r. + let rec split_addable : + type r. int -> (addable, r, 'e) t -> (addable, 'e) t_any_r option * 'e * (addable, 'e) t_any_r option = @@ -217,21 +222,19 @@ module Digit = struct in (Some (Mk_any_r cons_res'), m, rhs) | None, m, rhs -> - (Some (Mk_any_r (One head)), m, rhs) ) + (Some (Mk_any_r (One head)), m, rhs)) (fun (One a) -> if acc + measure' a >= target then (None, a, None) - else failwith "Digit.split index out of bounds" ) + else failwith "Digit.split index out of bounds") t in addable_elim (fun t' -> let lhs, m, rhs = split_addable acc t' in - (Option.map ~f:broaden_any_r lhs, m, Option.map ~f:broaden_any_r rhs) - ) + (Option.map ~f:broaden_any_r lhs, m, Option.map ~f:broaden_any_r rhs)) (fun t' -> let head, Mk_any_r tail = uncons t' in - if acc + measure' head >= target then - (None, head, Some (Mk_any_ar tail)) + if acc + measure' head >= target then (None, head, Some (Mk_any_ar tail)) else let lhs, m, rhs = split_addable (acc + measure' head) tail in match lhs with @@ -240,7 +243,7 @@ module Digit = struct | Some (Mk_any_r lhs') -> ( Some (broaden_any_a (cons head lhs')) , m - , Option.map ~f:broaden_any_r rhs ) ) + , Option.map ~f:broaden_any_r rhs )) t let opt_to_list : 'a t_any_ar option -> 'a list = function @@ -258,7 +261,7 @@ module Digit = struct ~f:(fun (Mk_any_ar dig, target) -> let lhs_opt, m, rhs_opt = split Fn.id target 0 dig in let lhs', rhs' = (opt_to_list lhs_opt, opt_to_list rhs_opt) in - [%test_eq: int list] (lhs' @ [m] @ rhs') (to_list dig) ) + [%test_eq: int list] (lhs' @ [ m ] @ rhs') (to_list dig)) let%test_unit "Digit.split matches list implementation" = Quickcheck.test @@ -266,7 +269,7 @@ module Digit = struct Tuple2.sexp_of_t (List.sexp_of_t Int.sexp_of_t) Int.sexp_of_t - (to_list dig, idx) ) + (to_list dig, idx)) (let open Quickcheck.Generator.Let_syntax in let%bind (Mk_any_ar dig) = gen_any_ar in let%bind idx = Int.gen_incl 1 (List.length @@ to_list dig) in @@ -286,7 +289,7 @@ module Digit = struct [%test_eq: int] m_list m_fseq ; [%test_eq: int list] rhs_list rhs_fseq' ; [%test_eq: int] (List.length lhs_fseq') (idx - 1) ; - [%test_eq: int] (List.length rhs_fseq') (List.length as_list - idx) ) + [%test_eq: int] (List.length rhs_fseq') (List.length as_list - idx)) (* See comment below about measures for why index 0 is an edge case. *) let%test_unit "Digit.split with index 0 is trivial" = @@ -301,7 +304,7 @@ module Digit = struct | None -> [%test_eq: int list] [] (List.tl_exn as_list) | Some (Mk_any_ar rhs') -> - [%test_eq: int list] (to_list rhs') (List.tl_exn as_list) ) + [%test_eq: int list] (to_list rhs') (List.tl_exn as_list)) let%test _ = match split Fn.id 1 0 (One 1) with None, 1, None -> true | _ -> false @@ -341,8 +344,7 @@ module Node = struct let measure : 'e t -> int = fun t -> match t with Two (m, _, _) -> m | Three (m, _, _, _) -> m - let to_digit : 'e t -> (Digit.addable, Digit.removable, 'e) Digit.t = - function + let to_digit : 'e t -> (Digit.addable, Digit.removable, 'e) Digit.t = function | Two (_m, a, b) -> Digit.Two (a, b) | Three (_m, a, b, c) -> @@ -475,12 +477,12 @@ let rec cons' : 'e. ('e -> int) -> 'e -> 'e t -> 'e t = Digit.addable_elim (fun prefix' -> let (Mk_any_a prefix'') = Digit.cons v prefix' in - deep measure' prefix'' (Lazy.force middle) suffix ) + deep measure' prefix'' (Lazy.force middle) suffix) (fun (Four (a, b, c, d)) -> deep measure' (Digit.Two (v, a)) (cons' Node.measure (Node.mk_3 measure' b c d) @@ Lazy.force middle) - suffix ) + suffix) prefix let cons : 'e -> 'e t -> 'e t = fun x xs -> cons' (Fn.const 1) x xs @@ -497,11 +499,11 @@ let rec snoc' : 'e. ('e -> int) -> 'e t -> 'e -> 'e t = Digit.addable_elim (fun digit -> let (Mk_any_a digit') = Digit.snoc digit v in - deep measure' prefix (Lazy.force middle) digit' ) + deep measure' prefix (Lazy.force middle) digit') (fun (Four (a, b, c, d)) -> deep measure' prefix (snoc' Node.measure (Lazy.force middle) @@ Node.mk_3 measure' a b c) - (Digit.Two (d, v)) ) + (Digit.Two (d, v))) suffix let snoc : 'e t -> 'e -> 'e t = fun xs x -> snoc' (Fn.const 1) xs x @@ -523,13 +525,13 @@ let rec uncons' : 'e. ('e -> int) -> 'e t -> ('e * 'e t) option = Digit.removable_elim (fun prefix' -> let head, Mk_any_r prefix_rest = Digit.uncons prefix' in - Some (head, deep measure' prefix_rest (force middle) suffix) ) + Some (head, deep measure' prefix_rest (force middle) suffix)) (fun (One e) -> match uncons' Node.measure (force middle) with | None -> Some (e, tree_of_digit measure' suffix) | Some (node, rest) -> - Some (e, deep measure' (Node.to_digit node) rest suffix) ) + Some (e, deep measure' (Node.to_digit node) rest suffix)) prefix (** Uncons for the top level trees. *) @@ -547,13 +549,13 @@ let rec unsnoc' : 'e. ('e -> int) -> 'e t -> ('e t * 'e) option = Digit.removable_elim (fun suffix' -> let Mk_any_r liat, deah = Digit.unsnoc suffix' in - Some (deep measure' prefix (force middle) liat, deah) ) + Some (deep measure' prefix (force middle) liat, deah)) (fun (One e) -> match unsnoc' Node.measure (force middle) with | None -> Some (tree_of_digit measure' prefix, e) | Some (rest, node) -> - Some (deep measure' prefix rest (Node.to_digit node), e) ) + Some (deep measure' prefix rest (Node.to_digit node), e)) suffix (** Mirror of uncons. *) @@ -561,8 +563,7 @@ let unsnoc : 'e t -> ('e t * 'e) option = fun t -> unsnoc' (Fn.const 1) t let head_exn : 'e t -> 'e = fun t -> Option.value_exn (uncons t) |> Tuple2.get1 -let last_exn : 'e t -> 'e = - fun t -> unsnoc t |> Option.value_exn |> Tuple2.get2 +let last_exn : 'e t -> 'e = fun t -> unsnoc t |> Option.value_exn |> Tuple2.get2 let rec foldl : ('a -> 'e -> 'a) -> 'a -> 'e t -> 'a = fun f acc t -> @@ -641,11 +642,11 @@ let rec split : 'e. ('e -> int) -> 'e t -> int -> int -> 'e t * 'e * 'e t = , (* right part of digit split + subtree + suffix *) match dr with | None -> ( - match uncons' Node.measure @@ force middle with - | None -> - tree_of_digit measure' suffix - | Some (head, tail) -> - deep measure' (Node.to_digit head) tail suffix ) + match uncons' Node.measure @@ force middle with + | None -> + tree_of_digit measure' suffix + | Some (head, tail) -> + deep measure' (Node.to_digit head) tail suffix ) | Some (Mk_any_ar dig) -> deep measure' dig (force middle) suffix ) else @@ -664,11 +665,11 @@ let rec split : 'e. ('e -> int) -> 'e t -> int -> int -> 'e t * 'e * 'e t = them midpoint of the subtree *) ( match m_lhs with | None -> ( - match unsnoc' Node.measure lhs with - | None -> - tree_of_digit measure' prefix - | Some (liat, deah) -> - deep measure' prefix liat (Node.to_digit deah) ) + match unsnoc' Node.measure lhs with + | None -> + tree_of_digit measure' prefix + | Some (liat, deah) -> + deep measure' prefix liat (Node.to_digit deah) ) | Some (Mk_any_ar dig) -> deep measure' prefix lhs dig ) , (* midpoint of the split of the subtree *) @@ -677,11 +678,11 @@ let rec split : 'e. ('e -> int) -> 'e t -> int -> int -> 'e t * 'e * 'e t = split of the subtree + suffix *) match m_rhs with | None -> ( - match uncons' Node.measure rhs with - | None -> - tree_of_digit measure' suffix - | Some (head, tail) -> - deep measure' (Node.to_digit head) tail suffix ) + match uncons' Node.measure rhs with + | None -> + tree_of_digit measure' suffix + | Some (head, tail) -> + deep measure' (Node.to_digit head) tail suffix ) | Some (Mk_any_ar dig) -> deep measure' dig rhs suffix ) else @@ -692,11 +693,11 @@ let rec split : 'e. ('e -> int) -> 'e t -> int -> int -> 'e t * 'e * 'e t = ( (* prefix + subtree + left part of digit split *) ( match dl with | None -> ( - match unsnoc' Node.measure (force middle) with - | None -> - tree_of_digit measure' prefix - | Some (liat, deah) -> - deep measure' prefix liat (Node.to_digit deah) ) + match unsnoc' Node.measure (force middle) with + | None -> + tree_of_digit measure' prefix + | Some (liat, deah) -> + deep measure' prefix liat (Node.to_digit deah) ) | Some (Mk_any_ar dig) -> deep measure' prefix (force middle) dig ) , (* midpoint of digit split *) @@ -761,14 +762,14 @@ let%test_unit "list isomorphism - cons" = let xs_fseq = List.fold_right xs ~f:cons ~init:empty in assert_measure (Fn.const 1) xs_fseq ; [%test_eq: int list] xs (to_list xs_fseq) ; - [%test_eq: int] (List.length xs) (length xs_fseq) ) + [%test_eq: int] (List.length xs) (length xs_fseq)) let%test_unit "list isomorphism - snoc" = Quickcheck.test (big_list Int.quickcheck_generator) ~f:(fun xs -> let xs_fseq = List.fold_left xs ~init:empty ~f:snoc in assert_measure (Fn.const 1) xs_fseq ; [%test_eq: int list] xs (to_list xs_fseq) ; - [%test_eq: int] (List.length xs) (length xs_fseq) ) + [%test_eq: int] (List.length xs) (length xs_fseq)) let%test_unit "alternating cons/snoc" = Quickcheck.test @@ -784,9 +785,9 @@ let%test_unit "alternating cons/snoc" = | `A x :: rest -> go (x :: list) (cons x fseq) rest | `B x :: rest -> - go (list @ [x]) (snoc fseq x) rest + go (list @ [ x ]) (snoc fseq x) rest in - go [] empty cmds ) + go [] empty cmds) let%test_unit "split properties" = let gen = @@ -804,7 +805,7 @@ let%test_unit "split properties" = ( Sequence.range ~start:`inclusive ~stop:`inclusive 1 5 |> Sequence.filter_map ~f:(fun offset -> let res = idx - offset in - if res >= 0 then Some (xs, res) else None ) ) ) + if res >= 0 then Some (xs, res) else None) )) in Quickcheck.test gen ~shrink_attempts:`Exhaustive ~sexp_of:[%sexp_of: int list * int] ~shrinker ~f:(fun (xs, idx) -> @@ -823,7 +824,7 @@ let%test_unit "split properties" = [%test_eq: int list] split_r_list split_r_fseq' ; [%test_eq: int] (List.length split_l_fseq') (length split_l_fseq) ; [%test_eq: int] (List.length split_r_fseq') (length split_r_fseq) ; - [%test_eq: int] (length split_l_fseq + length split_r_fseq) len ) + [%test_eq: int] (length split_l_fseq + length split_r_fseq) len) (* Exercise all the functions that generate sequences, in random combinations. *) let%test_module "random sequence generation, with splits" = @@ -871,7 +872,8 @@ let%test_module "random sequence generation, with splits" = [ List.take acts (List.length acts / 2) ; List.take acts (List.length acts - 1) ; List.map acts ~f:(function `Snoc x -> `Cons x | x -> x) - ; List.map acts ~f:(function `Cons x -> `Snoc x | x -> x) ] ) + ; List.map acts ~f:(function `Cons x -> `Snoc x | x -> x) + ]) in Quickcheck.test gen ~trials:100_000 ~shrinker ~sexp_of:(List.sexp_of_t sexp_of_action) ~f:(fun acts -> @@ -892,5 +894,5 @@ let%test_module "random sequence generation, with splits" = | `Split_take_right idx :: acts_rest -> go (assert_m @@ Tuple2.get2 @@ split_at fseq idx) acts_rest in - go empty acts ) + go empty acts) end ) diff --git a/src/lib/network_pool/indexed_pool.ml b/src/lib/network_pool/indexed_pool.ml index 2ab4d6b0a47..62fd3255b10 100644 --- a/src/lib/network_pool/indexed_pool.ml +++ b/src/lib/network_pool/indexed_pool.ml @@ -27,11 +27,11 @@ let replace_fee : Currency.Fee.t = Currency.Fee.of_int 5_000_000_000 of the queues in all_by_sender *) type t = - { applicable_by_fee: + { applicable_by_fee : Transaction_hash.User_command_with_valid_signature.Set.t Currency.Fee.Map.t (** Transactions valid against the current ledger, indexed by fee. *) - ; all_by_sender: + ; all_by_sender : ( Transaction_hash.User_command_with_valid_signature.t F_sequence.t * Currency.Amount.t ) Account_id.Map.t @@ -39,21 +39,21 @@ type t = execute them -- plus any currency spent from this account by transactions from other accounts -- indexed by sender account. Ordered by nonce inside the accounts. *) - ; all_by_fee: + ; all_by_fee : Transaction_hash.User_command_with_valid_signature.Set.t Currency.Fee.Map.t (** All transactions in the pool indexed by fee. *) - ; all_by_hash: + ; all_by_hash : Transaction_hash.User_command_with_valid_signature.t Transaction_hash.Map.t - ; transactions_with_expiration: - Transaction_hash.User_command_with_valid_signature.Set.t - Global_slot.Map.t + ; transactions_with_expiration : + Transaction_hash.User_command_with_valid_signature.Set.t Global_slot.Map.t (*Only transactions that have an expiry*) - ; size: int - ; constraint_constants: Genesis_constants.Constraint_constants.t - ; consensus_constants: Consensus.Constants.t - ; time_controller: Block_time.Controller.t } + ; size : int + ; constraint_constants : Genesis_constants.Constraint_constants.t + ; consensus_constants : Consensus.Constants.t + ; time_controller : Block_time.Controller.t + } [@@deriving sexp_of, equal, compare] module Command_error = struct @@ -62,16 +62,17 @@ module Command_error = struct [ `Expected of Account.Nonce.t | `Between of Account.Nonce.t * Account.Nonce.t ] * Account.Nonce.t - | Insufficient_funds of [`Balance of Currency.Amount.t] * Currency.Amount.t + | Insufficient_funds of + [ `Balance of Currency.Amount.t ] * Currency.Amount.t | (* NOTE: don't punish for this, attackers can induce nodes to banlist each other that way! *) Insufficient_replace_fee of - [`Replace_fee of Currency.Fee.t] * Currency.Fee.t + [ `Replace_fee of Currency.Fee.t ] * Currency.Fee.t | Overflow | Bad_token | Expired of - [`Valid_until of Mina_numbers.Global_slot.t] - * [`Current_global_slot of Mina_numbers.Global_slot.t] + [ `Valid_until of Mina_numbers.Global_slot.t ] + * [ `Current_global_slot of Mina_numbers.Global_slot.t ] | Unwanted_fee_token of Token_id.t | Invalid_transaction [@@deriving sexp_of, to_yojson] @@ -90,24 +91,24 @@ let currency_consumed_unchecked : let amt = match cmd with | Signed_command c -> ( - match c.payload.body with - | Payment ({amount; _} as payload) -> - if - Token_id.equal c.payload.common.fee_token - (Payment_payload.token payload) - then - (* The fee-payer is also the sender account, include the amount. *) - amount - else (* The payment won't affect the balance of this account. *) + match c.payload.body with + | Payment ({ amount; _ } as payload) -> + if + Token_id.equal c.payload.common.fee_token + (Payment_payload.token payload) + then + (* The fee-payer is also the sender account, include the amount. *) + amount + else (* The payment won't affect the balance of this account. *) + zero + | Stake_delegation _ -> zero - | Stake_delegation _ -> - zero - | Create_new_token _ -> - Currency.Amount.of_fee constraint_constants.account_creation_fee - | Create_token_account _ -> - Currency.Amount.of_fee constraint_constants.account_creation_fee - | Mint_tokens _ -> - zero ) + | Create_new_token _ -> + Currency.Amount.of_fee constraint_constants.account_creation_fee + | Create_token_account _ -> + Currency.Amount.of_fee constraint_constants.account_creation_fee + | Mint_tokens _ -> + zero ) | Snapp_command c -> ( let open Snapp_command.Party in let f (x1 : ((Body.t, _) Predicated.Poly.t, _) Authorized.Poly.t) @@ -115,7 +116,7 @@ let currency_consumed_unchecked : token_id (fee_payment : Mina_base.Other_fee_payer.t option) = let fee_payer = match fee_payment with - | Some {payload= {pk; token_id; _}; _} -> + | Some { payload = { pk; token_id; _ }; _ } -> Some (Account_id.create pk token_id) | None -> None @@ -133,11 +134,11 @@ let currency_consumed_unchecked : (* Fee payer is distinct from this account. *) None | _ -> ( - match p.delta.sgn with - | Pos -> - None - | Neg -> - Some p.delta.magnitude ) ) + match p.delta.sgn with + | Pos -> + None + | Neg -> + Some p.delta.magnitude )) |> Option.value ~default:Currency.Amount.zero in match c with @@ -177,7 +178,8 @@ module For_tests = struct ; all_by_hash ; size ; constraint_constants - ; _ } -> + ; _ + } -> let assert_all_by_fee tx = if Set.mem @@ -211,7 +213,7 @@ module For_tests = struct [%test_eq: Transaction_hash.User_command_with_valid_signature.t] tx tx' ; assert_all_by_fee tx ; - assert_all_by_hash tx ) ) ; + assert_all_by_hash tx)) ; Map.iteri all_by_sender ~f:(fun ~key:fee_payer ~data:(tx_seq, currency_reserved) -> assert (F_sequence.length tx_seq > 0) ; @@ -250,13 +252,13 @@ module For_tests = struct Currency.Amount.( Option.value_exn (currency_consumed ~constraint_constants tx) - + currency_acc) ) ) + + currency_acc) )) ( User_command.nonce_exn applicable_unchecked , Option.value_exn (currency_consumed ~constraint_constants applicable) ) inapplicables in - [%test_eq: Currency.Amount.t] currency_reserved currency_reserved' ) ; + [%test_eq: Currency.Amount.t] currency_reserved currency_reserved') ; let check_sender_applicable fee tx = let unchecked = Transaction_hash.User_command_with_valid_signature.command tx @@ -276,14 +278,12 @@ module For_tests = struct |> User_command.fee_exn )) applicable ) ; let first_nonce = - applicable - |> Transaction_hash.User_command_with_valid_signature.command + applicable |> Transaction_hash.User_command_with_valid_signature.command |> User_command.nonce_exn |> Account_nonce.to_int in let _split_l, split_r = F_sequence.split_at sender_txs - ( Account_nonce.to_int (User_command.nonce_exn unchecked) - - first_nonce ) + (Account_nonce.to_int (User_command.nonce_exn unchecked) - first_nonce) in let tx' = F_sequence.head_exn split_r in [%test_eq: Transaction_hash.User_command_with_valid_signature.t] tx tx' @@ -291,31 +291,32 @@ module For_tests = struct Map.iteri all_by_fee ~f:(fun ~key:fee ~data:tx_set -> Set.iter tx_set ~f:(fun tx -> check_sender_applicable fee tx ; - assert_all_by_hash tx ) ) ; + assert_all_by_hash tx)) ; Map.iter all_by_hash ~f:(fun tx -> check_sender_applicable (User_command.fee_exn (Transaction_hash.User_command_with_valid_signature.command tx)) tx ; - assert_all_by_fee tx ) ; + assert_all_by_fee tx) ; [%test_eq: int] (Map.length all_by_hash) size end let empty ~constraint_constants ~consensus_constants ~time_controller : t = - { applicable_by_fee= Currency.Fee.Map.empty - ; all_by_sender= Account_id.Map.empty - ; all_by_fee= Currency.Fee.Map.empty - ; all_by_hash= Transaction_hash.Map.empty - ; transactions_with_expiration= Global_slot.Map.empty - ; size= 0 + { applicable_by_fee = Currency.Fee.Map.empty + ; all_by_sender = Account_id.Map.empty + ; all_by_fee = Currency.Fee.Map.empty + ; all_by_hash = Transaction_hash.Map.empty + ; transactions_with_expiration = Global_slot.Map.empty + ; size = 0 ; constraint_constants ; consensus_constants - ; time_controller } + ; time_controller + } let size : t -> int = fun t -> t.size let min_fee : t -> Currency.Fee.t option = - fun {all_by_fee; _} -> Option.map ~f:Tuple2.get1 @@ Map.min_elt all_by_fee + fun { all_by_fee; _ } -> Option.map ~f:Tuple2.get1 @@ Map.min_elt all_by_fee let member : t -> Transaction_hash.User_command.t -> bool = fun t cmd -> @@ -326,11 +327,11 @@ let all_from_account : t -> Account_id.t -> Transaction_hash.User_command_with_valid_signature.t list = - fun {all_by_sender; _} account_id -> + fun { all_by_sender; _ } account_id -> Option.value_map ~default:[] (Map.find all_by_sender account_id) ~f:(fun (user_commands, _) -> F_sequence.to_list user_commands) -let get_all {all_by_hash; _} : +let get_all { all_by_hash; _ } : Transaction_hash.User_command_with_valid_signature.t list = Map.data all_by_hash @@ -338,7 +339,7 @@ let find_by_hash : t -> Transaction_hash.t -> Transaction_hash.User_command_with_valid_signature.t option = - fun {all_by_hash; _} hash -> Map.find all_by_hash hash + fun { all_by_hash; _ } hash -> Map.find all_by_hash hash let current_global_slot t = let current_time = Block_time.now t.time_controller in @@ -348,7 +349,7 @@ let current_global_slot t = |> to_global_slot) in match t.constraint_constants.fork with - | Some {previous_global_slot; _} -> + | Some { previous_global_slot; _ } -> Mina_numbers.Global_slot.(add previous_global_slot current_slot) | None -> current_slot @@ -393,7 +394,7 @@ let remove_applicable_exn : Transaction_hash.User_command_with_valid_signature.command cmd |> User_command.fee_exn in - {t with applicable_by_fee= Map_set.remove_exn t.applicable_by_fee fee cmd} + { t with applicable_by_fee = Map_set.remove_exn t.applicable_by_fee fee cmd } (* Remove a command from the all_by_fee and all_by_hash fields, and decrement size. This may break an invariant. *) @@ -405,13 +406,14 @@ let remove_all_by_fee_and_hash_and_expiration_exn : |> User_command.fee_exn in { t with - all_by_fee= Map_set.remove_exn t.all_by_fee fee cmd - ; all_by_hash= + all_by_fee = Map_set.remove_exn t.all_by_fee fee cmd + ; all_by_hash = Map.remove t.all_by_hash (Transaction_hash.User_command_with_valid_signature.hash cmd) - ; transactions_with_expiration= + ; transactions_with_expiration = remove_from_expiration_exn t.transactions_with_expiration cmd - ; size= t.size - 1 } + ; size = t.size - 1 + } (* Remove a given command from the pool, as well as any commands that depend on it. Called from revalidate and remove_lowest_fee, and when replacing @@ -420,7 +422,7 @@ let remove_with_dependents_exn : t -> Transaction_hash.User_command_with_valid_signature.t -> Transaction_hash.User_command_with_valid_signature.t Sequence.t * t = - fun ({constraint_constants; _} as t) cmd -> + fun ({ constraint_constants; _ } as t) cmd -> let unchecked = Transaction_hash.User_command_with_valid_signature.command cmd in @@ -444,7 +446,7 @@ let remove_with_dependents_exn : (* safe because we check for overflow when we add commands. *) (let open Option.Let_syntax in let%bind consumed = currency_consumed ~constraint_constants cmd' in - Currency.Amount.(consumed + acc)) ) + Currency.Amount.(consumed + acc))) Currency.Amount.zero drop_queue in let reserved_currency' = @@ -459,22 +461,22 @@ let remove_with_dependents_exn : in ( F_sequence.to_seq drop_queue , { t' with - all_by_sender= + all_by_sender = ( if not (F_sequence.is_empty keep_queue) then Map.set t'.all_by_sender ~key:sender ~data:(keep_queue, reserved_currency') else ( assert (Currency.Amount.(equal reserved_currency' zero)) ; Map.remove t'.all_by_sender sender ) ) - ; applicable_by_fee= + ; applicable_by_fee = ( if - Transaction_hash.User_command_with_valid_signature.equal first_cmd - cmd + Transaction_hash.User_command_with_valid_signature.equal first_cmd cmd then Map_set.remove_exn t'.applicable_by_fee (User_command.fee_exn unchecked) cmd - else t'.applicable_by_fee ) } ) + else t'.applicable_by_fee ) + } ) (** Drop commands from the end of the queue until the total currency consumed is <= the current balance. *) @@ -514,11 +516,12 @@ let revalidate : t -> (Account_id.t -> Account_nonce.t * Currency.Amount.t) -> t * Transaction_hash.User_command_with_valid_signature.t Sequence.t = - fun ({constraint_constants; _} as t) f -> + fun ({ constraint_constants; _ } as t) f -> Map.fold t.all_by_sender ~init:(t, Sequence.empty) - ~f:(fun ~key:sender - ~data:(queue, currency_reserved) - ((t', dropped_acc) as acc) + ~f:(fun + ~key:sender + ~data:(queue, currency_reserved) + ((t', dropped_acc) as acc) -> let current_nonce, current_balance = f sender in let first_cmd = F_sequence.head_exn queue in @@ -540,7 +543,7 @@ let revalidate : Currency.Amount.( c - Option.value_exn - (currency_consumed ~constraint_constants cmd)) ) + (currency_consumed ~constraint_constants cmd))) currency_reserved drop_queue in let keep_queue', currency_reserved'', dropped_for_balance = @@ -564,10 +567,11 @@ let revalidate : ~f:remove_all_by_fee_and_hash_and_expiration_exn in ( { t'' with - all_by_sender= + all_by_sender = Map.set t''.all_by_sender ~key:sender - ~data:(keep_queue', currency_reserved'') } - , Sequence.append dropped_acc to_drop ) ) + ~data:(keep_queue', currency_reserved'') + } + , Sequence.append dropped_acc to_drop )) let remove_expired t : Transaction_hash.User_command_with_valid_signature.t Sequence.t * t = @@ -582,7 +586,7 @@ let remove_expired t : if member t (Transaction_hash.User_command.of_checked cmd) then let removed, t' = remove_with_dependents_exn t cmd in (Sequence.append dropped_acc removed, t') - else acc' ) ) + else acc')) let handle_committed_txn : t @@ -595,7 +599,7 @@ let handle_committed_txn : * Transaction_hash.User_command_with_valid_signature.t Sequence.t ] ) Result.t = - fun ({constraint_constants; _} as t) committed ~fee_payer_balance + fun ({ constraint_constants; _ } as t) committed ~fee_payer_balance ~fee_payer_nonce -> let committed' = Transaction_hash.User_command_with_valid_signature.command committed @@ -652,20 +656,21 @@ let handle_committed_txn : let set_all_by_sender account_id commands currency_reserved t = match F_sequence.uncons commands with | None -> - {t with all_by_sender= Map.remove t.all_by_sender account_id} + { t with all_by_sender = Map.remove t.all_by_sender account_id } | Some (head_cmd, _) -> { t with - all_by_sender= + all_by_sender = Map.set t.all_by_sender ~key:account_id ~data:(commands, currency_reserved) - ; applicable_by_fee= + ; applicable_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t.applicable_by_fee ( head_cmd |> Transaction_hash.User_command_with_valid_signature .command |> User_command.fee_exn ) - head_cmd } + head_cmd + } in let t3 = set_all_by_sender fee_payer new_queued_cmds currency_reserved'' t2 @@ -723,7 +728,7 @@ let rec add_from_gossip_exn : * Transaction_hash.User_command_with_valid_signature.t Sequence.t , Command_error.t ) Result.t = - fun ({constraint_constants; consensus_constants; time_controller; _} as t) + fun ({ constraint_constants; consensus_constants; time_controller; _ } as t) ~verify cmd0 current_nonce balance -> let open Command_error in let open Result.Let_syntax in @@ -781,28 +786,28 @@ let rec add_from_gossip_exn : in let%map cmd = verified () in ( cmd - , { applicable_by_fee= + , { applicable_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t.applicable_by_fee fee cmd - ; all_by_sender= + ; all_by_sender = Map.set t.all_by_sender ~key:fee_payer ~data:(F_sequence.singleton cmd, consumed) - ; all_by_fee= + ; all_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t.all_by_fee fee cmd - ; all_by_hash= + ; all_by_hash = Map.set t.all_by_hash - ~key: - (Transaction_hash.User_command_with_valid_signature.hash cmd) + ~key:(Transaction_hash.User_command_with_valid_signature.hash cmd) ~data:cmd - ; transactions_with_expiration= + ; transactions_with_expiration = add_to_expiration t.transactions_with_expiration cmd - ; size= t.size + 1 + ; size = t.size + 1 ; constraint_constants ; consensus_constants - ; time_controller } + ; time_controller + } , Sequence.empty ) | Some (queued_cmds, reserved_currency) -> (* commands queued for this sender *) @@ -828,21 +833,22 @@ let rec add_from_gossip_exn : let%map cmd = verified () in ( cmd , { t with - all_by_sender= + all_by_sender = Map.set t.all_by_sender ~key:fee_payer ~data:(F_sequence.snoc queued_cmds cmd, reserved_currency') - ; all_by_fee= + ; all_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t.all_by_fee fee cmd - ; all_by_hash= + ; all_by_hash = Map.set t.all_by_hash ~key: (Transaction_hash.User_command_with_valid_signature.hash cmd) ~data:cmd - ; transactions_with_expiration= + ; transactions_with_expiration = add_to_expiration t.transactions_with_expiration cmd - ; size= t.size + 1 } + ; size = t.size + 1 + } , Sequence.empty ) else (* we're replacing a command *) @@ -923,8 +929,7 @@ let rec add_from_gossip_exn : Ok (t', increment, Sequence.empty) | Some (cmd, dropped), Some _ -> ( let cmd_unchecked = - Transaction_hash.User_command_with_valid_signature.command - cmd + Transaction_hash.User_command_with_valid_signature.command cmd in let replace_fee = User_command.fee_exn cmd_unchecked in match Currency.Fee.(increment - replace_fee) with @@ -964,7 +969,8 @@ let add_from_backtrack : t -> Transaction_hash.User_command_with_valid_signature.t -> (t, Command_error.t) Result.t = - fun ({constraint_constants; consensus_constants; time_controller; _} as t) cmd -> + fun ({ constraint_constants; consensus_constants; time_controller; _ } as t) + cmd -> let open Result.Let_syntax in let unchecked = Transaction_hash.User_command_with_valid_signature.command cmd @@ -977,30 +983,31 @@ let add_from_backtrack : in match Map.find t.all_by_sender fee_payer with | None -> - { all_by_sender= + { all_by_sender = (* If the command comes from backtracking, then we know it doesn't cause overflow, so it's OK to throw here. *) Map.add_exn t.all_by_sender ~key:fee_payer ~data:(F_sequence.singleton cmd, consumed) - ; all_by_fee= + ; all_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t.all_by_fee fee cmd - ; all_by_hash= + ; all_by_hash = Map.set t.all_by_hash ~key:(Transaction_hash.User_command_with_valid_signature.hash cmd) ~data:cmd - ; applicable_by_fee= + ; applicable_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t.applicable_by_fee fee cmd - ; transactions_with_expiration= + ; transactions_with_expiration = add_to_expiration t.transactions_with_expiration cmd - ; size= t.size + 1 + ; size = t.size + 1 ; constraint_constants ; consensus_constants - ; time_controller } + ; time_controller + } | Some (queue, currency_reserved) -> let first_queued = F_sequence.head_exn queue in if @@ -1015,34 +1022,35 @@ let add_from_backtrack : @@ sprintf !"indexed pool nonces inconsistent when adding from backtrack. \ Trying to add \ - %{sexp:Transaction_hash.User_command_with_valid_signature.t} \ - to %{sexp: t}" + %{sexp:Transaction_hash.User_command_with_valid_signature.t} to \ + %{sexp: t}" cmd t ; let t' = remove_applicable_exn t first_queued in - { applicable_by_fee= + { applicable_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t'.applicable_by_fee fee cmd - ; all_by_fee= + ; all_by_fee = Map_set.insert (module Transaction_hash.User_command_with_valid_signature) t'.all_by_fee fee cmd - ; all_by_hash= + ; all_by_hash = Map.set t.all_by_hash ~key:(Transaction_hash.User_command_with_valid_signature.hash cmd) ~data:cmd - ; all_by_sender= + ; all_by_sender = Map.set t'.all_by_sender ~key:fee_payer ~data: ( F_sequence.cons cmd queue , Option.value_exn Currency.Amount.(currency_reserved + consumed) ) - ; transactions_with_expiration= + ; transactions_with_expiration = add_to_expiration t.transactions_with_expiration cmd - ; size= t.size + 1 + ; size = t.size + 1 ; constraint_constants ; consensus_constants - ; time_controller } + ; time_controller + } let%test_module _ = ( module struct @@ -1106,7 +1114,7 @@ let%test_module _ = Sequence.t] dropped' (Sequence.singleton cmd) ; [%test_eq: t] ~equal pool pool'' | _ -> - failwith "should've succeeded" ) + failwith "should've succeeded") let%test_unit "sequential adds (all valid)" = let gen : @@ -1126,7 +1134,7 @@ let%test_module _ = Quickcheck.Shrinker.t = Quickcheck.Shrinker.create (fun (init_state, cmds) -> Sequence.singleton - (init_state, List.take cmds (List.length cmds - 1)) ) + (init_state, List.take cmds (List.length cmds - 1))) in Quickcheck.test gen ~sexp_of: @@ -1142,7 +1150,7 @@ let%test_module _ = ~f:(fun (kp, balance, nonce, _) -> let compressed = Public_key.compress kp.public_key in Hashtbl.add_exn balances ~key:compressed ~data:balance ; - Hashtbl.add_exn nonces ~key:compressed ~data:nonce ) ; + Hashtbl.add_exn nonces ~key:compressed ~data:nonce) ; let pool = ref empty in let rec go cmds_acc = match cmds_acc with @@ -1150,8 +1158,7 @@ let%test_module _ = () | cmd :: rest -> ( let unchecked = - Transaction_hash.User_command_with_valid_signature.command - cmd + Transaction_hash.User_command_with_valid_signature.command cmd in let account_id = User_command.fee_payer unchecked in let pk = Account_id.public_key account_id in @@ -1208,20 +1215,20 @@ let%test_module _ = , `Current_global_slot current_global_slot )) -> failwithf !"Expired user command. Current global slot is \ - %{sexp:Mina_numbers.Global_slot.t} but user command \ - is only valid until %{sexp:Mina_numbers.Global_slot.t}" + %{sexp:Mina_numbers.Global_slot.t} but user command is \ + only valid until %{sexp:Mina_numbers.Global_slot.t}" current_global_slot valid_until () ) in - go cmds ) + go cmds) let%test_unit "replacement" = let modify_payment (c : User_command.t) ~sender ~common:fc ~body:fb = let modified_payload : Signed_command.Payload.t = match c with - | Signed_command {payload= {body= Payment payment_payload; common}; _} - -> - { common= fc common - ; body= Signed_command.Payload.Body.Payment (fb payment_payload) + | Signed_command + { payload = { body = Payment payment_payload; common }; _ } -> + { common = fc common + ; body = Signed_command.Payload.Body.Payment (fb payment_payload) } | _ -> failwith "generated user command that wasn't a payment" @@ -1268,8 +1275,8 @@ let%test_module _ = in let cmd' = modify_payment cmd ~sender - ~common:(fun c -> {c with fee= Currency.Amount.to_fee fee}) - ~body:(fun b -> {b with amount}) + ~common:(fun c -> { c with fee = Currency.Amount.to_fee fee }) + ~body:(fun b -> { b with amount }) in let consumed = Option.value_exn (currency_consumed ~constraint_constants cmd') @@ -1303,9 +1310,9 @@ let%test_module _ = modify_payment replace_cmd_skeleton ~sender ~body:Fn.id ~common:(fun c -> { c with - fee= + fee = Currency.Fee.of_int ((10 + (5 * (size + 1))) * 1_000_000_000) - } ) + }) in (init_nonce, init_balance, setup_cmds, replace_cmd) in @@ -1334,7 +1341,7 @@ let%test_module _ = !"adding command %{sexp: \ Transaction_hash.User_command_with_valid_signature.t} \ failed" - cmd ) + cmd) in let replaced_idx = Account_nonce.to_int @@ -1355,7 +1362,7 @@ let%test_module _ = Option.( currency_consumed ~constraint_constants cmd >>= fun consumed -> - Currency.Amount.(consumed + consumed_so_far)) ) + Currency.Amount.(consumed + consumed_so_far))) in assert ( Currency.Amount.(currency_consumed_pre_replace <= init_balance) ) ; @@ -1376,8 +1383,8 @@ let%test_module _ = Currency.Amount.(a + replacer_currency_consumed)) in let add_res = - add_from_gossip_exn t (`Checked replace_cmd) init_nonce - init_balance ~verify:don't_verify + add_from_gossip_exn t (`Checked replace_cmd) init_nonce init_balance + ~verify:don't_verify in if Currency.Amount.(currency_consumed_post_replace <= init_balance) then @@ -1392,5 +1399,5 @@ let%test_module _ = | Error (Insufficient_funds _) -> () | _ -> - failwith "should've returned insufficient_funds" ) + failwith "should've returned insufficient_funds") end ) diff --git a/src/lib/network_pool/indexed_pool.mli b/src/lib/network_pool/indexed_pool.mli index 91f50fbf0a8..fa2fb734007 100644 --- a/src/lib/network_pool/indexed_pool.mli +++ b/src/lib/network_pool/indexed_pool.mli @@ -14,16 +14,17 @@ module Command_error : sig [ `Expected of Account.Nonce.t | `Between of Account.Nonce.t * Account.Nonce.t ] * Account.Nonce.t - | Insufficient_funds of [`Balance of Currency.Amount.t] * Currency.Amount.t + | Insufficient_funds of + [ `Balance of Currency.Amount.t ] * Currency.Amount.t | (* NOTE: don't punish for this, attackers can induce nodes to banlist each other that way! *) Insufficient_replace_fee of - [`Replace_fee of Currency.Fee.t] * Currency.Fee.t + [ `Replace_fee of Currency.Fee.t ] * Currency.Fee.t | Overflow | Bad_token | Expired of - [`Valid_until of Mina_numbers.Global_slot.t] - * [`Current_global_slot of Mina_numbers.Global_slot.t] + [ `Valid_until of Mina_numbers.Global_slot.t ] + * [ `Current_global_slot of Mina_numbers.Global_slot.t ] | Unwanted_fee_token of Token_id.t | Invalid_transaction [@@deriving sexp_of, to_yojson] @@ -78,8 +79,7 @@ val handle_committed_txn : -> ( t * Transaction_hash.User_command_with_valid_signature.t Sequence.t , [ `Queued_txns_by_sender of string - * Transaction_hash.User_command_with_valid_signature.t Sequence.t ] - ) + * Transaction_hash.User_command_with_valid_signature.t Sequence.t ] ) Result.t (** Add a command to the pool. Pass the current nonce for the account and @@ -116,9 +116,7 @@ val member : t -> Transaction_hash.User_command.t -> bool (** Get all the user commands sent by a user with a particular account *) val all_from_account : - t - -> Account_id.t - -> Transaction_hash.User_command_with_valid_signature.t list + t -> Account_id.t -> Transaction_hash.User_command_with_valid_signature.t list (** Get all user commands in the pool. *) val get_all : t -> Transaction_hash.User_command_with_valid_signature.t list diff --git a/src/lib/network_pool/intf.ml b/src/lib/network_pool/intf.ml index 6a142d78240..d54b5f89932 100644 --- a/src/lib/network_pool/intf.ml +++ b/src/lib/network_pool/intf.ml @@ -29,14 +29,15 @@ module type Resource_pool_base_intf = sig constraint_constants:Genesis_constants.Constraint_constants.t -> consensus_constants:Consensus.Constants.t -> time_controller:Block_time.Controller.t - -> frontier_broadcast_pipe:transition_frontier Option.t - Broadcast_pipe.Reader.t + -> frontier_broadcast_pipe: + transition_frontier Option.t Broadcast_pipe.Reader.t -> config:Config.t -> logger:Logger.t - -> tf_diff_writer:( transition_frontier_diff - , Strict_pipe.synchronous - , unit Deferred.t ) - Strict_pipe.Writer.t + -> tf_diff_writer: + ( transition_frontier_diff + , Strict_pipe.synchronous + , unit Deferred.t ) + Strict_pipe.Writer.t -> t end @@ -86,7 +87,7 @@ module type Resource_pool_diff_intf = sig pool -> verified Envelope.Incoming.t -> ( t * rejected - , [`Locally_generated of t * rejected | `Other of Error.t] ) + , [ `Locally_generated of t * rejected | `Other of Error.t ] ) Result.t Deferred.t @@ -109,7 +110,7 @@ module type Resource_pool_intf = sig remove it from the set of potentially-rebroadcastable item. *) val get_rebroadcastable : - t -> has_timed_out:(Time.t -> [`Timed_out | `Ok]) -> Diff.t list + t -> has_timed_out:(Time.t -> [ `Timed_out | `Ok ]) -> Diff.t list end (** A [Network_pool_base_intf] is the core implementation of a @@ -148,15 +149,16 @@ module type Network_pool_base_intf = sig -> constraint_constants:Genesis_constants.Constraint_constants.t -> consensus_constants:Consensus.Constants.t -> time_controller:Block_time.Controller.t - -> incoming_diffs:( resource_pool_diff Envelope.Incoming.t - * Mina_net2.Validation_callback.t ) - Strict_pipe.Reader.t - -> local_diffs:( resource_pool_diff - * ((resource_pool_diff * rejected_diff) Or_error.t -> unit) - ) - Strict_pipe.Reader.t - -> frontier_broadcast_pipe:transition_frontier Option.t - Broadcast_pipe.Reader.t + -> incoming_diffs: + ( resource_pool_diff Envelope.Incoming.t + * Mina_net2.Validation_callback.t ) + Strict_pipe.Reader.t + -> local_diffs: + ( resource_pool_diff + * ((resource_pool_diff * rejected_diff) Or_error.t -> unit) ) + Strict_pipe.Reader.t + -> frontier_broadcast_pipe: + transition_frontier Option.t Broadcast_pipe.Reader.t -> logger:Logger.t -> t @@ -164,13 +166,14 @@ module type Network_pool_base_intf = sig resource_pool -> logger:Logger.t -> constraint_constants:Genesis_constants.Constraint_constants.t - -> incoming_diffs:( resource_pool_diff Envelope.Incoming.t - * Mina_net2.Validation_callback.t ) - Strict_pipe.Reader.t - -> local_diffs:( resource_pool_diff - * ((resource_pool_diff * rejected_diff) Or_error.t -> unit) - ) - Strict_pipe.Reader.t + -> incoming_diffs: + ( resource_pool_diff Envelope.Incoming.t + * Mina_net2.Validation_callback.t ) + Strict_pipe.Reader.t + -> local_diffs: + ( resource_pool_diff + * ((resource_pool_diff * rejected_diff) Or_error.t -> unit) ) + Strict_pipe.Reader.t -> tf_diffs:transition_frontier_diff Strict_pipe.Reader.t -> t @@ -204,7 +207,7 @@ module type Snark_resource_pool_intf = sig -> work:Transaction_snark_work.Statement.t -> proof:Ledger_proof.t One_or_two.t -> fee:Fee_with_prover.t - -> [`Added | `Statement_not_referenced] + -> [ `Added | `Statement_not_referenced ] val request_proof : t @@ -213,8 +216,9 @@ module type Snark_resource_pool_intf = sig val verify_and_act : t - -> work:Transaction_snark_work.Statement.t - * Ledger_proof.t One_or_two.t Priced_proof.t + -> work: + Transaction_snark_work.Statement.t + * Ledger_proof.t One_or_two.t Priced_proof.t -> sender:Envelope.Sender.t -> bool Deferred.t @@ -240,16 +244,17 @@ module type Snark_pool_diff_intf = sig type verified = t [@@deriving compare, sexp] type compact = - { work: Transaction_snark_work.Statement.t - ; fee: Currency.Fee.t - ; prover: Signature_lib.Public_key.Compressed.t } + { work : Transaction_snark_work.Statement.t + ; fee : Currency.Fee.t + ; prover : Signature_lib.Public_key.Compressed.t + } [@@deriving yojson, hash] include Resource_pool_diff_intf - with type t := t - and type verified := t - and type pool := resource_pool + with type t := t + and type verified := t + and type pool := resource_pool val to_compact : t -> compact option @@ -293,9 +298,9 @@ module type Transaction_pool_diff_intf = sig include Resource_pool_diff_intf - with type t := t - and type pool := resource_pool - and type rejected = Rejected.t + with type t := t + and type pool := resource_pool + and type rejected = Rejected.t end module type Transaction_resource_pool_intf = sig @@ -309,8 +314,7 @@ module type Transaction_resource_pool_intf = sig -> verifier:Verifier.t -> Config.t - val member : - t -> Transaction_hash.User_command_with_valid_signature.t -> bool + val member : t -> Transaction_hash.User_command_with_valid_signature.t -> bool val transactions : logger:Logger.t diff --git a/src/lib/network_pool/map_set.ml b/src/lib/network_pool/map_set.ml index 9ddd0567b0e..6c3ac64ec46 100644 --- a/src/lib/network_pool/map_set.ml +++ b/src/lib/network_pool/map_set.ml @@ -26,4 +26,4 @@ let insert : | None -> Some (Set.singleton comparator v) | Some set -> - Some (Set.add set v) ) + Some (Set.add set v)) diff --git a/src/lib/network_pool/mocks.ml b/src/lib/network_pool/mocks.ml index 85c458eb7fc..b899de331e4 100644 --- a/src/lib/network_pool/mocks.ml +++ b/src/lib/network_pool/mocks.ml @@ -41,11 +41,12 @@ module Transition_frontier = struct end type t = - { refcount_table: table - ; best_tip_table: Transaction_snark_work.Statement.Hash_set.t - ; mutable ledger: Base_ledger.t - ; diff_writer: (diff Broadcast_pipe.Writer.t [@sexp.opaque]) - ; diff_reader: (diff Broadcast_pipe.Reader.t [@sexp.opaque]) } + { refcount_table : table + ; best_tip_table : Transaction_snark_work.Statement.Hash_set.t + ; mutable ledger : Base_ledger.t + ; diff_writer : (diff Broadcast_pipe.Writer.t[@sexp.opaque]) + ; diff_reader : (diff Broadcast_pipe.Reader.t[@sexp.opaque]) + } [@@deriving sexp] let add_statements table stmts = @@ -54,7 +55,7 @@ module Transition_frontier = struct | None -> Some 1 | Some count -> - Some (count + 1) ) ) + Some (count + 1))) (*Create tf with some statements referenced to be able to add snark work for those statements to the pool*) let create _stmts : t = @@ -63,15 +64,17 @@ module Transition_frontier = struct (*add_statements table stmts ;*) let diff_reader, diff_writer = Broadcast_pipe.create - { Extensions.Snark_pool_refcount.removed= 0 + { Extensions.Snark_pool_refcount.removed = 0 ; refcount_table - ; best_tip_table } + ; best_tip_table + } in { refcount_table ; best_tip_table - ; ledger= Account_id.Map.empty + ; ledger = Account_id.Map.empty ; diff_writer - ; diff_reader } + ; diff_reader + } let best_tip t = t.ledger @@ -93,9 +96,10 @@ module Transition_frontier = struct List.iter ~f:(Hash_set.add t.best_tip_table) stmts ; let%bind () = Broadcast_pipe.Writer.write t.diff_writer - { Transition_frontier.Extensions.Snark_pool_refcount.removed= 0 - ; refcount_table= t.refcount_table - ; best_tip_table= t.best_tip_table } + { Transition_frontier.Extensions.Snark_pool_refcount.removed = 0 + ; refcount_table = t.refcount_table + ; best_tip_table = t.best_tip_table + } in Async.Scheduler.yield_until_no_jobs_remain () @@ -103,9 +107,10 @@ module Transition_frontier = struct List.iter ~f:(Hash_set.remove t.best_tip_table) stmts ; let%bind () = Broadcast_pipe.Writer.write t.diff_writer - { Transition_frontier.Extensions.Snark_pool_refcount.removed= 0 - ; refcount_table= t.refcount_table - ; best_tip_table= t.best_tip_table } + { Transition_frontier.Extensions.Snark_pool_refcount.removed = 0 + ; refcount_table = t.refcount_table + ; best_tip_table = t.best_tip_table + } in Async.Scheduler.yield_until_no_jobs_remain () end diff --git a/src/lib/network_pool/network_pool_base.ml b/src/lib/network_pool/network_pool_base.ml index 497e316d48b..8d8ce3e4c91 100644 --- a/src/lib/network_pool/network_pool_base.ml +++ b/src/lib/network_pool/network_pool_base.ml @@ -7,15 +7,15 @@ module Make (Transition_frontier : sig type t end) (Resource_pool : Intf.Resource_pool_intf - with type transition_frontier := Transition_frontier.t) : + with type transition_frontier := Transition_frontier.t) : Intf.Network_pool_base_intf - with type resource_pool := Resource_pool.t - and type resource_pool_diff := Resource_pool.Diff.t - and type resource_pool_diff_verified := Resource_pool.Diff.verified - and type transition_frontier := Transition_frontier.t - and type transition_frontier_diff := Resource_pool.transition_frontier_diff - and type config := Resource_pool.Config.t - and type rejected_diff := Resource_pool.Diff.rejected = struct + with type resource_pool := Resource_pool.t + and type resource_pool_diff := Resource_pool.Diff.t + and type resource_pool_diff_verified := Resource_pool.Diff.verified + and type transition_frontier := Transition_frontier.t + and type transition_frontier_diff := Resource_pool.transition_frontier_diff + and type config := Resource_pool.Config.t + and type rejected_diff := Resource_pool.Diff.rejected = struct module Broadcast_callback = struct type t = | Local of @@ -36,14 +36,14 @@ end) | Local f -> f (Error err) | External cb -> - fire_exn cb `Reject ) + fire_exn cb `Reject) let drop accepted rejected = Fn.compose Deferred.return (function | Local f -> f (Ok (accepted, rejected)) | External cb -> - fire_exn cb `Ignore ) + fire_exn cb `Ignore) let forward broadcast_pipe accepted rejected = function | Local f -> @@ -63,15 +63,16 @@ end) end type t = - { resource_pool: Resource_pool.t - ; logger: Logger.t - ; write_broadcasts: Resource_pool.Diff.t Linear_pipe.Writer.t - ; read_broadcasts: Resource_pool.Diff.t Linear_pipe.Reader.t - ; constraint_constants: Genesis_constants.Constraint_constants.t } + { resource_pool : Resource_pool.t + ; logger : Logger.t + ; write_broadcasts : Resource_pool.Diff.t Linear_pipe.Writer.t + ; read_broadcasts : Resource_pool.Diff.t Linear_pipe.Reader.t + ; constraint_constants : Genesis_constants.Constraint_constants.t + } - let resource_pool {resource_pool; _} = resource_pool + let resource_pool { resource_pool; _ } = resource_pool - let broadcasts {read_broadcasts; _} = read_broadcasts + let broadcasts { read_broadcasts; _ } = read_broadcasts let create_rate_limiter () = Rate_limiter.create @@ -88,7 +89,8 @@ end) ~metadata: [ ( "diff" , Resource_pool.Diff.verified_to_yojson - @@ Envelope.Incoming.data diff ) ] ; + @@ Envelope.Incoming.data diff ) + ] ; drop diff' rejected cb ) else ( [%log' debug t.logger] "Rebroadcasting diff %s" @@ -103,21 +105,20 @@ end) | Error (`Other e) -> [%log' debug t.logger] "Refusing to rebroadcast. Pool diff apply feedback: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] ; Broadcast_callback.error e cb let log_rate_limiter_occasionally t rl = let time = Time_ns.Span.of_min 1. in every time (fun () -> [%log' debug t.logger] - ~metadata:[("rate_limiter", Rate_limiter.summary rl)] - !"%s $rate_limiter" Resource_pool.label ) + ~metadata:[ ("rate_limiter", Rate_limiter.summary rl) ] + !"%s $rate_limiter" Resource_pool.label) - let filter_verified (type a) ~log_rate_limiter - (pipe : a Strict_pipe.Reader.t) (t : t) + let filter_verified (type a) ~log_rate_limiter (pipe : a Strict_pipe.Reader.t) + (t : t) ~(f : - a -> Resource_pool.Diff.t Envelope.Incoming.t * Broadcast_callback.t) - : + a -> Resource_pool.Diff.t Envelope.Incoming.t * Broadcast_callback.t) : (Resource_pool.Diff.verified Envelope.Incoming.t * Broadcast_callback.t) Strict_pipe.Reader.t = let r, w = @@ -131,10 +132,10 @@ end) [%log' warn t.logger] "Dropping verified diff $diff due to pipe overflow" ~metadata: - [("diff", Resource_pool.Diff.verified_to_yojson diff)] ; + [ ("diff", Resource_pool.Diff.verified_to_yojson diff) ] ; Broadcast_callback.drop Resource_pool.Diff.empty (Resource_pool.Diff.reject_overloaded_diff diff) - cb )) )) + cb)) )) in let rl = create_rate_limiter () in if log_rate_limiter then log_rate_limiter_occasionally t rl ; @@ -148,7 +149,8 @@ end) [%log' debug t.logger] "Verifying $diff from $sender" ~metadata: [ ("diff", summary) - ; ("sender", Envelope.Sender.to_yojson diff.sender) ] ; + ; ("sender", Envelope.Sender.to_yojson diff.sender) + ] ; don't_wait_for ( match Rate_limiter.add rl diff.sender ~now:(Time.now ()) @@ -158,7 +160,8 @@ end) [%log' debug t.logger] ~metadata: [ ("sender", Envelope.Sender.to_yojson diff.sender) - ; ("diff", summary) ] + ; ("diff", summary) + ] "exceeded capacity from $sender" ; Broadcast_callback.error (Error.of_string "exceeded capacity") @@ -171,7 +174,8 @@ end) $error" ~metadata: [ ("diff", summary) - ; ("error", Error_json.error_to_yojson err) ] ; + ; ("error", Error_json.error_to_yojson err) + ] ; (*reject incoming messages*) Broadcast_callback.error err cb | Ok verified_diff -> ( @@ -182,12 +186,13 @@ end) @@ Envelope.Incoming.data verified_diff ) ; ( "sender" , Envelope.Sender.to_yojson - @@ Envelope.Incoming.sender verified_diff ) ] ; + @@ Envelope.Incoming.sender verified_diff ) + ] ; match Strict_pipe.Writer.write w (verified_diff, cb) with | Some r -> r | None -> - Deferred.unit ) ) ) ) ) + Deferred.unit ) ) ) )) |> don't_wait_for ; r @@ -199,21 +204,23 @@ end) ; logger ; read_broadcasts ; write_broadcasts - ; constraint_constants } + ; constraint_constants + } in (*proiority: Transition frontier diffs > local diffs > incomming diffs*) Strict_pipe.Reader.Merge.iter [ Strict_pipe.Reader.map tf_diffs ~f:(fun diff -> - `Transition_frontier_extension diff ) + `Transition_frontier_extension diff) ; Strict_pipe.Reader.map (filter_verified ~log_rate_limiter:false local_diffs network_pool ~f:(fun (diff, cb) -> - (Envelope.Incoming.local diff, Broadcast_callback.Local cb) )) + (Envelope.Incoming.local diff, Broadcast_callback.Local cb))) ~f:(fun d -> `Local d) ; Strict_pipe.Reader.map (filter_verified ~log_rate_limiter:true incoming_diffs network_pool ~f:(fun (diff, cb) -> (diff, Broadcast_callback.External cb))) - ~f:(fun d -> `Incoming d) ] + ~f:(fun d -> `Incoming d) + ] ~f:(fun diff_source -> match diff_source with | `Incoming (verified_diff, cb) -> @@ -221,7 +228,7 @@ end) | `Local (verified_diff, cb) -> apply_and_broadcast network_pool verified_diff cb | `Transition_frontier_extension diff -> - Resource_pool.handle_transition_frontier_diff diff resource_pool ) + Resource_pool.handle_transition_frontier_diff diff resource_pool) |> Deferred.don't_wait_for ; network_pool @@ -260,7 +267,8 @@ end) , `List (List.map ~f:(fun d -> `String (Resource_pool.Diff.summary d)) - rebroadcastable) ) ] ; + rebroadcastable) ) + ] ; let%bind () = Deferred.List.iter rebroadcastable ~f:(Linear_pipe.write t.write_broadcasts) @@ -270,9 +278,8 @@ end) in go () - let create ~config ~constraint_constants ~consensus_constants - ~time_controller ~incoming_diffs ~local_diffs ~frontier_broadcast_pipe - ~logger = + let create ~config ~constraint_constants ~consensus_constants ~time_controller + ~incoming_diffs ~local_diffs ~frontier_broadcast_pipe ~logger = (*Diffs from tansition frontier extensions*) let tf_diff_reader, tf_diff_writer = Strict_pipe.( diff --git a/src/lib/network_pool/priced_proof.ml b/src/lib/network_pool/priced_proof.ml index 73cf0555edc..989eb0392b1 100644 --- a/src/lib/network_pool/priced_proof.ml +++ b/src/lib/network_pool/priced_proof.ml @@ -6,10 +6,11 @@ module Stable = struct [@@@no_toplevel_latest_type] module V1 = struct - type 'proof t = {proof: 'proof; fee: Fee_with_prover.Stable.V1.t} + type 'proof t = { proof : 'proof; fee : Fee_with_prover.Stable.V1.t } [@@deriving compare, fields, sexp, yojson, hash] end end] -type 'proof t = 'proof Stable.Latest.t = {proof: 'proof; fee: Fee_with_prover.t} +type 'proof t = 'proof Stable.Latest.t = + { proof : 'proof; fee : Fee_with_prover.t } [@@deriving compare, fields, sexp, yojson, hash] diff --git a/src/lib/network_pool/rate_limiter.ml b/src/lib/network_pool/rate_limiter.ml index d05e43b339f..9d16ba8e5e7 100644 --- a/src/lib/network_pool/rate_limiter.ml +++ b/src/lib/network_pool/rate_limiter.ml @@ -41,7 +41,7 @@ module Record = struct (* For a given peer, all of the actions within [interval] that peer has performed, along with the remaining capacity for actions. *) type t = - {mutable remaining_capacity: Score.t; elts: (Score.t * Time.t) Queue.t} + { mutable remaining_capacity : Score.t; elts : (Score.t * Time.t) Queue.t } [@@deriving sexp] let clear_old_entries r ~now = @@ -74,16 +74,18 @@ end module Lru_table (Q : Hash_queue.S) = struct let max_size = 2048 - type t = {table: Record.t Q.t; initial_capacity: Score.t} + type t = { table : Record.t Q.t; initial_capacity : Score.t } [@@deriving sexp_of] - let add ({table; initial_capacity} : t) (k : Q.key) ~now ~score = + let add ({ table; initial_capacity } : t) (k : Q.key) ~now ~score = match Q.lookup_and_move_to_back table k with | None -> if Int.(Q.length table >= max_size) then ignore (Q.dequeue_front table : Record.t option) ; Q.enqueue_back_exn table k - {Record.remaining_capacity= initial_capacity; elts= Queue.create ()} ; + { Record.remaining_capacity = initial_capacity + ; elts = Queue.create () + } ; `Ok | Some r -> Record.add r ~now ~score @@ -96,14 +98,18 @@ module Lru_table (Q : Hash_queue.S) = struct Record.clear_old_entries r ~now ; Score.(is_non_negative (r.remaining_capacity - score)) - let create ~initial_capacity = {initial_capacity; table= Q.create ()} + let create ~initial_capacity = { initial_capacity; table = Q.create () } - let next_expires ({table; _} : t) (k : Q.key) = + let next_expires ({ table; _ } : t) (k : Q.key) = match Q.lookup table k with | None -> Time.now () - | Some {elts; _} -> ( - match Queue.peek elts with Some (_, time) -> time | None -> Time.now () ) + | Some { elts; _ } -> ( + match Queue.peek elts with + | Some (_, time) -> + time + | None -> + Time.now () ) end module Ip = struct @@ -116,18 +122,18 @@ module Peer_id = struct module Lru = Lru_table (Hash_queue) end -type t = {by_ip: Ip.Lru.t; by_peer_id: Peer_id.Lru.t} [@@deriving sexp_of] +type t = { by_ip : Ip.Lru.t; by_peer_id : Peer_id.Lru.t } [@@deriving sexp_of] let create ~capacity:(capacity, `Per t) = let initial_capacity = let max_per_second = Float.of_int capacity /. Time.Span.to_sec t in - Float.round_up (max_per_second *. Time.Span.to_sec interval) - |> Float.to_int + Float.round_up (max_per_second *. Time.Span.to_sec interval) |> Float.to_int in - { by_ip= Ip.Lru.create ~initial_capacity - ; by_peer_id= Peer_id.Lru.create ~initial_capacity } + { by_ip = Ip.Lru.create ~initial_capacity + ; by_peer_id = Peer_id.Lru.create ~initial_capacity + } -let add {by_ip; by_peer_id} (sender : Envelope.Sender.t) ~now ~score = +let add { by_ip; by_peer_id } (sender : Envelope.Sender.t) ~now ~score = match sender with | Local -> `Within_capacity @@ -138,38 +144,39 @@ let add {by_ip; by_peer_id} (sender : Envelope.Sender.t) ~now ~score = Ip.Lru.has_capacity by_ip ip ~now ~score && Peer_id.Lru.has_capacity by_peer_id id ~now ~score then ( - ignore (Ip.Lru.add by_ip ip ~now ~score : [`No_space | `Ok]) ; - ignore (Peer_id.Lru.add by_peer_id id ~now ~score : [`No_space | `Ok]) ; + ignore (Ip.Lru.add by_ip ip ~now ~score : [ `No_space | `Ok ]) ; + ignore (Peer_id.Lru.add by_peer_id id ~now ~score : [ `No_space | `Ok ]) ; `Within_capacity ) else `Capacity_exceeded -let next_expires {by_peer_id; _} (sender : Envelope.Sender.t) = +let next_expires { by_peer_id; _ } (sender : Envelope.Sender.t) = match sender with | Local -> Time.now () - | Remote {peer_id; _} -> + | Remote { peer_id; _ } -> Peer_id.Lru.next_expires by_peer_id peer_id module Summary = struct - type r = {capacity_used: Score.t} [@@deriving to_yojson] + type r = { capacity_used : Score.t } [@@deriving to_yojson] - type t = {by_ip: (string * r) list; by_peer_id: (string * r) list} + type t = { by_ip : (string * r) list; by_peer_id : (string * r) list } [@@deriving to_yojson] end -let summary ({by_ip; by_peer_id} : t) = +let summary ({ by_ip; by_peer_id } : t) = let open Summary in to_yojson - { by_ip= + { by_ip = Ip.Hash_queue.foldi by_ip.table ~init:[] ~f:(fun acc ~key ~data -> ( Unix.Inet_addr.to_string key - , {capacity_used= by_ip.initial_capacity - data.remaining_capacity} - ) - :: acc ) - ; by_peer_id= + , { capacity_used = by_ip.initial_capacity - data.remaining_capacity + } ) + :: acc) + ; by_peer_id = Peer_id.Hash_queue.foldi by_peer_id.table ~init:[] ~f:(fun acc ~key ~data -> ( Peer.Id.to_string key - , {capacity_used= by_ip.initial_capacity - data.remaining_capacity} - ) - :: acc ) } + , { capacity_used = by_ip.initial_capacity - data.remaining_capacity + } ) + :: acc) + } diff --git a/src/lib/network_pool/rate_limiter.mli b/src/lib/network_pool/rate_limiter.mli index d0cedb9cf03..a782a499eae 100644 --- a/src/lib/network_pool/rate_limiter.mli +++ b/src/lib/network_pool/rate_limiter.mli @@ -3,14 +3,14 @@ open Network_peer type t -val create : capacity:int * [`Per of Time.Span.t] -> t +val create : capacity:int * [ `Per of Time.Span.t ] -> t val add : t -> Envelope.Sender.t -> now:Time.t -> score:int - -> [`Within_capacity | `Capacity_exceeded] + -> [ `Within_capacity | `Capacity_exceeded ] val next_expires : t -> Envelope.Sender.t -> Time.t diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml index c2ac2a32da9..f1e7a4f8354 100644 --- a/src/lib/network_pool/snark_pool.ml +++ b/src/lib/network_pool/snark_pool.ml @@ -23,12 +23,13 @@ module Snark_tables = struct end type t = - { all: + { all : Ledger_proof.t One_or_two.t Priced_proof.t Transaction_snark_work.Statement.Table.t - ; rebroadcastable: + ; rebroadcastable : (Ledger_proof.t One_or_two.t Priced_proof.t * Core.Time.t) - Transaction_snark_work.Statement.Table.t } + Transaction_snark_work.Statement.Table.t + } [@@deriving sexp] let compare t1 t2 = @@ -39,22 +40,24 @@ module Snark_tables = struct list * ( Transaction_snark_work.Statement.t * (Ledger_proof.t One_or_two.t Priced_proof.t * Core.Time.t) ) - list] (p t1) (p t2) + list] + (p t1) (p t2) let of_serializable (t : Serializable.t) : t = - { all= Hashtbl.map t ~f:fst - ; rebroadcastable= + { all = Hashtbl.map t ~f:fst + ; rebroadcastable = Hashtbl.filter_map t ~f:(fun (x, r) -> match r with | `Rebroadcastable time -> Some (x, time) | `Not_rebroadcastable -> - None ) } + None) + } let to_serializable (t : t) : Serializable.t = let res = Hashtbl.map t.all ~f:(fun x -> (x, `Not_rebroadcastable)) in Hashtbl.iteri t.rebroadcastable ~f:(fun ~key ~data:(x, r) -> - Hashtbl.set res ~key ~data:(x, `Rebroadcastable r) ) ; + Hashtbl.set res ~key ~data:(x, `Rebroadcastable r)) ; res end @@ -64,7 +67,7 @@ module type S = sig module Resource_pool : sig include Intf.Snark_resource_pool_intf - with type transition_frontier := transition_frontier + with type transition_frontier := transition_frontier val remove_solved_work : t -> Transaction_snark_work.Statement.t -> unit @@ -74,20 +77,20 @@ module type S = sig module For_tests : sig val get_rebroadcastable : Resource_pool.t - -> has_timed_out:(Time.t -> [`Timed_out | `Ok]) + -> has_timed_out:(Time.t -> [ `Timed_out | `Ok ]) -> Resource_pool.Diff.t list end include Intf.Network_pool_base_intf - with type resource_pool := Resource_pool.t - and type resource_pool_diff := Resource_pool.Diff.t - and type resource_pool_diff_verified := Resource_pool.Diff.t - and type transition_frontier := transition_frontier - and type config := Resource_pool.Config.t - and type transition_frontier_diff := - Resource_pool.transition_frontier_diff - and type rejected_diff := Resource_pool.Diff.rejected + with type resource_pool := Resource_pool.t + and type resource_pool_diff := Resource_pool.Diff.t + and type resource_pool_diff_verified := Resource_pool.Diff.t + and type transition_frontier := transition_frontier + and type config := Resource_pool.Config.t + and type transition_frontier_diff := + Resource_pool.transition_frontier_diff + and type rejected_diff := Resource_pool.Diff.rejected val get_completed_work : t @@ -100,16 +103,17 @@ module type S = sig -> constraint_constants:Genesis_constants.Constraint_constants.t -> consensus_constants:Consensus.Constants.t -> time_controller:Block_time.Controller.t - -> incoming_diffs:( Resource_pool.Diff.t Envelope.Incoming.t - * Mina_net2.Validation_callback.t ) - Strict_pipe.Reader.t - -> local_diffs:( Resource_pool.Diff.t - * ( (Resource_pool.Diff.t * Resource_pool.Diff.rejected) - Or_error.t - -> unit) ) - Strict_pipe.Reader.t - -> frontier_broadcast_pipe:transition_frontier option - Broadcast_pipe.Reader.t + -> incoming_diffs: + ( Resource_pool.Diff.t Envelope.Incoming.t + * Mina_net2.Validation_callback.t ) + Strict_pipe.Reader.t + -> local_diffs: + ( Resource_pool.Diff.t + * ( (Resource_pool.Diff.t * Resource_pool.Diff.rejected) Or_error.t + -> unit) ) + Strict_pipe.Reader.t + -> frontier_broadcast_pipe: + transition_frontier option Broadcast_pipe.Reader.t -> t Deferred.t end @@ -138,12 +142,12 @@ end module Make (Base_ledger : Intf.Base_ledger_intf) (Staged_ledger : sig - type t + type t - val ledger : t -> Base_ledger.t + val ledger : t -> Base_ledger.t end) (Transition_frontier : Transition_frontier_intf - with type staged_ledger := Staged_ledger.t) = + with type staged_ledger := Staged_ledger.t) = struct module Resource_pool = struct module T = struct @@ -151,9 +155,10 @@ struct module Config = struct type t = - { trust_system: (Trust_system.t[@sexp.opaque]) - ; verifier: (Verifier.t[@sexp.opaque]) - ; disk_location: string } + { trust_system : (Trust_system.t[@sexp.opaque]) + ; verifier : (Verifier.t[@sexp.opaque]) + ; disk_location : string + } [@@deriving sexp, make] end @@ -162,31 +167,32 @@ struct | `New_best_tip of Base_ledger.t ] type t = - { snark_tables: Snark_tables.t - ; best_tip_ledger: (unit -> Base_ledger.t option[@sexp.opaque]) - ; mutable ref_table: int Statement_table.t option + { snark_tables : Snark_tables.t + ; best_tip_ledger : (unit -> Base_ledger.t option[@sexp.opaque]) + ; mutable ref_table : int Statement_table.t option (** Tracks the number of blocks that have each work statement in their scan state. Work is included iff it is a member of some block scan state. Used to filter the pool, ensuring that only work referenced within the frontier is kept. *) - ; mutable best_tip_table: + ; mutable best_tip_table : Transaction_snark_work.Statement.Hash_set.t option (** The set of all snark work statements present in the scan state for the last 10 blocks in the best chain. Used to filter broadcasts of locally produced work, so that irrelevant work is not broadcast. *) - ; config: Config.t - ; logger: (Logger.t[@sexp.opaque]) - ; mutable removed_counter: int + ; config : Config.t + ; logger : (Logger.t[@sexp.opaque]) + ; mutable removed_counter : int (** A counter for transition frontier breadcrumbs removed. When this reaches a certain value, unreferenced snark work is removed from ref_table *) - ; account_creation_fee: Currency.Fee.t - ; batcher: Batcher.Snark_pool.t } + ; account_creation_fee : Currency.Fee.t + ; batcher : Batcher.Snark_pool.t + } [@@deriving sexp] type serializable = Snark_tables.Serializable.Stable.Latest.t @@ -201,26 +207,27 @@ struct ~f:(fun tf -> Transition_frontier.best_tip tf |> Transition_frontier.Breadcrumb.staged_ledger - |> Staged_ledger.ledger ) + |> Staged_ledger.ledger) let of_serializable tables ~constraint_constants ~frontier_broadcast_pipe ~config ~logger : t = - { snark_tables= Snark_tables.of_serializable tables - ; best_tip_ledger= get_best_tip_ledger ~frontier_broadcast_pipe - ; batcher= Batcher.Snark_pool.create config.verifier - ; account_creation_fee= + { snark_tables = Snark_tables.of_serializable tables + ; best_tip_ledger = get_best_tip_ledger ~frontier_broadcast_pipe + ; batcher = Batcher.Snark_pool.create config.verifier + ; account_creation_fee = constraint_constants .Genesis_constants.Constraint_constants.account_creation_fee - ; ref_table= None - ; best_tip_table= None + ; ref_table = None + ; best_tip_table = None ; config ; logger - ; removed_counter= removed_breadcrumb_wait } + ; removed_counter = removed_breadcrumb_wait + } let snark_pool_json t : Yojson.Safe.t = `List (Statement_table.fold ~init:[] t.snark_tables.all - ~f:(fun ~key ~data:{proof= _; fee= {fee; prover}} acc -> + ~f:(fun ~key ~data:{ proof = _; fee = { fee; prover } } acc -> let work_ids = Transaction_snark_work.Statement.compact_json key in @@ -229,15 +236,20 @@ struct ; ("fee", Currency.Fee.Stable.V1.to_yojson fee) ; ( "prover" , Signature_lib.Public_key.Compressed.Stable.V1.to_yojson - prover ) ] - :: acc )) + prover ) + ] + :: acc)) let all_completed_work (t : t) : Transaction_snark_work.Info.t list = Statement_table.fold ~init:[] t.snark_tables.all - ~f:(fun ~key ~data:{proof= _; fee= {fee; prover}} acc -> + ~f:(fun ~key ~data:{ proof = _; fee = { fee; prover } } acc -> let work_ids = Transaction_snark_work.Statement.work_ids key in - {Transaction_snark_work.Info.statements= key; work_ids; fee; prover} - :: acc ) + { Transaction_snark_work.Info.statements = key + ; work_ids + ; fee + ; prover + } + :: acc) (** false when there is no active transition_frontier or when the refcount for the given work is 0 *) @@ -246,11 +258,11 @@ struct | None -> false | Some ref_table -> ( - match Statement_table.find ref_table work with - | None -> - false - | Some _ -> - true ) + match Statement_table.find ref_table work with + | None -> + false + | Some _ -> + true ) let fee_is_sufficient t ~fee ~prover ~best_tip_ledger = let open Mina_base in @@ -270,18 +282,19 @@ struct match u with | `New_best_tip l -> Statement_table.filteri_inplace t.snark_tables.all - ~f:(fun ~key ~data:{fee= {fee; prover}; _} -> + ~f:(fun ~key ~data:{ fee = { fee; prover }; _ } -> let keep = fee_is_sufficient t ~fee ~prover ~best_tip_ledger:(Some l) in if not keep then Hashtbl.remove t.snark_tables.rebroadcastable key ; - keep ) ; + keep) ; return () | `New_refcount_table { Extensions.Snark_pool_refcount.removed ; refcount_table - ; best_tip_table } -> + ; best_tip_table + } -> t.ref_table <- Some refcount_table ; t.best_tip_table <- Some best_tip_table ; t.removed_counter <- t.removed_counter + removed ; @@ -293,7 +306,7 @@ struct let keep = work_is_referenced t k in if not keep then Hashtbl.remove t.snark_tables.rebroadcastable k ; - keep ) ; + keep) ; return (*when snark works removed from the pool*) Mina_metrics.( @@ -313,10 +326,9 @@ struct the transition_frontier *) t.removed_counter <- removed_breadcrumb_wait ; Broadcast_pipe.Reader.iter - (Transition_frontier.snark_pool_refcount_pipe tf) - ~f:(fun x -> + (Transition_frontier.snark_pool_refcount_pipe tf) ~f:(fun x -> Strict_pipe.Writer.write tf_diff_writer - (`New_refcount_table x) ) + (`New_refcount_table x)) |> Deferred.don't_wait_for ; Broadcast_pipe.Reader.iter (Transition_frontier.best_tip_diff_pipe tf) ~f:(fun _ -> @@ -324,33 +336,34 @@ struct (`New_best_tip ( Transition_frontier.best_tip tf |> Transition_frontier.Breadcrumb.staged_ledger - |> Staged_ledger.ledger )) ) + |> Staged_ledger.ledger ))) |> Deferred.don't_wait_for ; return () | None -> t.ref_table <- None ; t.best_tip_table <- None ; - return () ) + return ()) in Deferred.don't_wait_for tf_deferred - let create ~constraint_constants ~consensus_constants:_ - ~time_controller:_ ~frontier_broadcast_pipe ~config ~logger - ~tf_diff_writer = + let create ~constraint_constants ~consensus_constants:_ ~time_controller:_ + ~frontier_broadcast_pipe ~config ~logger ~tf_diff_writer = let t = - { snark_tables= - { all= Statement_table.create () - ; rebroadcastable= Statement_table.create () } - ; best_tip_ledger= get_best_tip_ledger ~frontier_broadcast_pipe - ; batcher= Batcher.Snark_pool.create config.verifier + { snark_tables = + { all = Statement_table.create () + ; rebroadcastable = Statement_table.create () + } + ; best_tip_ledger = get_best_tip_ledger ~frontier_broadcast_pipe + ; batcher = Batcher.Snark_pool.create config.verifier ; logger ; config - ; ref_table= None - ; best_tip_table= None - ; account_creation_fee= + ; ref_table = None + ; best_tip_table = None + ; account_creation_fee = constraint_constants .Genesis_constants.Constraint_constants.account_creation_fee - ; removed_counter= removed_breadcrumb_wait } + ; removed_counter = removed_breadcrumb_wait + } in listen_to_frontier_broadcast_pipe frontier_broadcast_pipe t ~tf_diff_writer ; @@ -365,10 +378,10 @@ struct if work_is_referenced t work then ( (*Note: fee against existing proofs and the new proofs are checked in Diff.unsafe_apply which calls this function*) - Hashtbl.set t.snark_tables.all ~key:work ~data:{proof; fee} ; + Hashtbl.set t.snark_tables.all ~key:work ~data:{ proof; fee } ; if is_local then Hashtbl.set t.snark_tables.rebroadcastable ~key:work - ~data:({proof; fee}, Time.now ()) + ~data:({ proof; fee }, Time.now ()) else (* Stop rebroadcasting locally generated snarks if they are overwritten. No-op if there is no rebroadcastable SNARK with that @@ -393,13 +406,16 @@ struct ~metadata: [ ( "stmt" , One_or_two.to_yojson Transaction_snark.Statement.to_yojson - work ) ] ; + work ) + ] ; `Statement_not_referenced let verify_and_act t ~work ~sender = let best_tip_ledger = t.best_tip_ledger () in let statements, priced_proof = work in - let {Priced_proof.proof= proofs; fee= {prover; fee}} = priced_proof in + let { Priced_proof.proof = proofs; fee = { prover; fee } } = + priced_proof + in let trust_record = Trust_system.record_envelope_sender t.config.trust_system t.logger sender @@ -408,12 +424,14 @@ struct let metadata = [ ("prover", Signature_lib.Public_key.Compressed.to_yojson prover) ; ("fee", Currency.Fee.to_yojson fee) - ; ("sender", Envelope.Sender.to_yojson sender) ] + ; ("sender", Envelope.Sender.to_yojson sender) + ] in let log_and_punish ?(punish = true) statement e = let metadata = [ ("error", Error_json.error_to_yojson e) - ; ("work_id", `Int (Transaction_snark.Statement.hash statement)) ] + ; ("work_id", `Int (Transaction_snark.Statement.hash statement)) + ] @ metadata in [%log' error t.logger] ~metadata @@ -444,7 +462,7 @@ struct %{sexp: Transaction_snark.Statement.t}" proof_statement s ; let%map () = log_and_punish s e in - Error e ) + Error e) in let work = One_or_two.map proofs ~f:snd in if not prover_account_ok then ( @@ -482,8 +500,9 @@ struct "Proof had an invalid key: $public_key" ~metadata: [ ( "public_key" - , Signature_lib.Public_key.Compressed.to_yojson - prover ) ] ; + , Signature_lib.Public_key.Compressed.to_yojson prover + ) + ] ; Deferred.return false | Some _ -> ( match%bind @@ -510,7 +529,8 @@ struct ( [ ("error", Error_json.error_to_yojson e) ; ( "work_ids" , Transaction_snark_work.Statement.compact_json statements - ) ] + ) + ] @ metadata ) "One_or_two length mismatch: $error" ; Deferred.return false @@ -534,7 +554,7 @@ struct |> List.filter_map ~f:(fun (stmt, (snark, _time)) -> if in_best_tip_table stmt then Some (Diff.Add_solved_work (stmt, snark)) - else None ) + else None) let remove_solved_work t work = Statement_table.remove t.snark_tables.all work ; @@ -552,9 +572,9 @@ struct let get_completed_work t statement = Option.map (Resource_pool.request_proof (resource_pool t) statement) - ~f:(fun Priced_proof.{proof; fee= {fee; prover}} -> + ~f:(fun Priced_proof.{ proof; fee = { fee; prover } } -> Transaction_snark_work.Checked.create_unsafe - {Transaction_snark_work.fee; proofs= proof; prover} ) + { Transaction_snark_work.fee; proofs = proof; prover }) (* This causes a snark pool to never be GC'd. This is fine as it should live as long as the daemon lives. *) let store_periodically (t : Resource_pool.t) = @@ -570,7 +590,7 @@ struct Snark_work.Snark_pool_serialization_ms_histogram.observe Snark_work.snark_pool_serialization_ms elapsed) ; [%log' debug t.logger] "SNARK pool serialization took $time ms" - ~metadata:[("time", `Float elapsed)] ) + ~metadata:[ ("time", `Float elapsed) ]) let loaded = ref false @@ -663,7 +683,8 @@ let%test_module "random set test" = work easier for testing, we set the account creation fee to 0. *) let constraint_constants = { precomputed_values.constraint_constants with - account_creation_fee= Currency.Fee.zero } + account_creation_fee = Currency.Fee.zero + } let consensus_constants = precomputed_values.consensus_constants @@ -677,11 +698,10 @@ let%test_module "random set test" = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) module Mock_snark_pool = - Make (Mocks.Base_ledger) (Mocks.Staged_ledger) - (Mocks.Transition_frontier) + Make (Mocks.Base_ledger) (Mocks.Staged_ledger) (Mocks.Transition_frontier) open Ledger_proof.For_tests let apply_diff resource_pool work @@ -689,7 +709,7 @@ let%test_module "random set test" = ?(sender = Envelope.Sender.Local) fee = let diff = Mock_snark_pool.Resource_pool.Diff.Add_solved_work - (work, {Priced_proof.Stable.Latest.proof= proof work; fee}) + (work, { Priced_proof.Stable.Latest.proof = proof work; fee }) in let enveloped_diff = Envelope.Incoming.wrap ~data:diff ~sender in match%bind @@ -743,7 +763,7 @@ let%test_module "random set test" = let%map () = Deferred.List.iter sample_solved_work ~f:(fun (work, fee) -> let%map res = apply_diff resource_pool work fee in - assert (Result.is_ok res) ) + assert (Result.is_ok res)) in (resource_pool, tf) in @@ -752,7 +772,7 @@ let%test_module "random set test" = let%test_unit "serialization" = let t, _tf = Async.Thread_safe.block_on_async_exn (fun () -> - Quickcheck.random_value (gen ~length:100 ()) ) + Quickcheck.random_value (gen ~length:100 ())) in let s0 = Mock_snark_pool.For_tests.snark_tables t in let s1 = @@ -781,16 +801,15 @@ let%test_module "random set test" = , One_or_two.map work ~f:(fun statement -> Ledger_proof.create ~statement ~sok_digest:invalid_sok_digest - ~proof:Proof.transaction_dummy ) + ~proof:Proof.transaction_dummy) , fee , some_other_pk ) - :: acc ) + :: acc) in Quickcheck.Generator.filter gen ~f:(fun ls -> List.for_all ls ~f:(fun (_, _, fee, mal_pk) -> not - @@ Signature_lib.Public_key.Compressed.equal mal_pk fee.prover - ) ) + @@ Signature_lib.Public_key.Compressed.equal mal_pk fee.prover)) in Quickcheck.test ~trials:5 ~sexp_of: @@ -821,16 +840,16 @@ let%test_module "random set test" = let diff = Mock_snark_pool.Resource_pool.Diff.Add_solved_work ( statements - , {Priced_proof.Stable.Latest.proof= proofs; fee} ) + , { Priced_proof.Stable.Latest.proof = proofs; fee } ) |> Envelope.Incoming.local in let%map res = Mock_snark_pool.Resource_pool.Diff.verify t diff in - assert (Result.is_error res) ) + assert (Result.is_error res)) in [%test_eq: Transaction_snark_work.Info.t list] completed_works - (Mock_snark_pool.Resource_pool.all_completed_work t) ) ) + (Mock_snark_pool.Resource_pool.all_completed_work t))) let%test_unit "When two priced proofs of the same work are inserted into \ the snark pool, the fee of the work is at most the minimum \ @@ -851,20 +870,20 @@ let%test_module "random set test" = let%bind t, tf = t in (*Statements should be referenced before work for those can be included*) let%bind () = - Mocks.Transition_frontier.refer_statements tf [work] + Mocks.Transition_frontier.refer_statements tf [ work ] in let%bind _ = apply_diff t work fee_1 in let%map _ = apply_diff t work fee_2 in let fee_upper_bound = Currency.Fee.min fee_1.fee fee_2.fee in - let {Priced_proof.fee= {fee; _}; _} = + let { Priced_proof.fee = { fee; _ }; _ } = Option.value_exn (Mock_snark_pool.Resource_pool.request_proof t work) in - assert (Currency.Fee.(fee <= fee_upper_bound)) ) ) + assert (Currency.Fee.(fee <= fee_upper_bound)))) let%test_unit "A priced proof of a work will replace an existing priced \ - proof of the same work only if it's fee is smaller than \ - the existing priced proof" = + proof of the same work only if it's fee is smaller than the \ + existing priced proof" = Quickcheck.test ~trials:5 ~sexp_of: [%sexp_of: @@ -881,7 +900,7 @@ let%test_module "random set test" = let%bind t, tf = t in (*Statements should be referenced before work for those can be included*) let%bind () = - Mocks.Transition_frontier.refer_statements tf [work] + Mocks.Transition_frontier.refer_statements tf [ work ] in Mock_snark_pool.Resource_pool.remove_solved_work t work ; let expensive_fee = Fee_with_prover.max fee_1 fee_2 @@ -894,7 +913,7 @@ let%test_module "random set test" = (Option.value_exn (Mock_snark_pool.Resource_pool.request_proof t work)) .fee - .fee ) ) ) + .fee ))) let fake_work = `One @@ -920,15 +939,17 @@ let%test_module "random set test" = ~frontier_broadcast_pipe:frontier_broadcast_pipe_r in let priced_proof = - { Priced_proof.proof= + { Priced_proof.proof = `One (mk_dummy_proof (Quickcheck.random_value ~seed:(`Deterministic "test proof") Transaction_snark.Statement.gen)) - ; fee= - { fee= Currency.Fee.of_int 0 - ; prover= Signature_lib.Public_key.Compressed.empty } } + ; fee = + { fee = Currency.Fee.of_int 0 + ; prover = Signature_lib.Public_key.Compressed.empty + } + } in let command = Mock_snark_pool.Resource_pool.Diff.Add_solved_work @@ -941,16 +962,16 @@ let%test_module "random set test" = ( match Mock_snark_pool.Resource_pool.request_proof pool fake_work with - | Some {proof; fee= _} -> + | Some { proof; fee = _ } -> assert ( [%equal: Ledger_proof.t One_or_two.t] proof priced_proof.proof ) | None -> failwith "There should have been a proof here" ) ; - Deferred.unit ) ; + Deferred.unit) ; Mock_snark_pool.apply_and_broadcast network_pool (Envelope.Incoming.local command) - (Mock_snark_pool.Broadcast_callback.Local (Fn.const ())) ) + (Mock_snark_pool.Broadcast_callback.Local (Fn.const ()))) let%test_unit "when creating a network, the incoming diffs and locally \ generated diffs in reader pipes will automatically get \ @@ -969,11 +990,12 @@ let%test_module "random set test" = Mock_snark_pool.Resource_pool.Diff.Add_solved_work ( work , Priced_proof. - { proof= One_or_two.map ~f:mk_dummy_proof work - ; fee= - { fee= Currency.Fee.of_int 0 - ; prover= Signature_lib.Public_key.Compressed.empty } } - ) + { proof = One_or_two.map ~f:mk_dummy_proof work + ; fee = + { fee = Currency.Fee.of_int 0 + ; prover = Signature_lib.Public_key.Compressed.empty + } + } ) in let verify_unsolved_work () = let pool_reader, pool_writer = @@ -987,19 +1009,18 @@ let%test_module "random set test" = |> List.map ~f:(fun work -> ( Envelope.Incoming.local work , Mina_net2.Validation_callback.create_without_expiration () - ) ) + )) |> List.iter ~f:(fun diff -> Strict_pipe.Writer.write pool_writer diff - |> Deferred.don't_wait_for ) ; + |> Deferred.don't_wait_for) ; (* locally generated diffs *) List.map (List.drop works per_reader) ~f:create_work |> List.iter ~f:(fun diff -> Strict_pipe.Writer.write local_writer (diff, Fn.const ()) - |> Deferred.don't_wait_for ) ; + |> Deferred.don't_wait_for) ; let%bind () = Async.Scheduler.yield_until_no_jobs_remain () in let frontier_broadcast_pipe_r, _ = - Broadcast_pipe.create - (Some (Mocks.Transition_frontier.create [])) + Broadcast_pipe.create (Some (Mocks.Transition_frontier.create [])) in let network_pool = Mock_snark_pool.create ~logger ~config ~constraint_constants @@ -1021,12 +1042,11 @@ let%test_module "random set test" = assert ( List.mem works work ~equal: - [%equal: Transaction_snark.Statement.t One_or_two.t] - ) ; - Deferred.unit ) ; + [%equal: Transaction_snark.Statement.t One_or_two.t] ) ; + Deferred.unit) ; Deferred.unit in - verify_unsolved_work () ) + verify_unsolved_work ()) let%test_unit "rebroadcast behavior" = let pool_reader, _pool_writer = @@ -1039,17 +1059,17 @@ let%test_module "random set test" = let frontier_broadcast_pipe_r, _w = Broadcast_pipe.create (Some tf) in let stmt1, stmt2, stmt3, stmt4 = let gen_not_any l = - Quickcheck.Generator.filter - Mocks.Transaction_snark_work.Statement.gen ~f:(fun x -> + Quickcheck.Generator.filter Mocks.Transaction_snark_work.Statement.gen + ~f:(fun x -> List.for_all l ~f:(fun y -> - Mocks.Transaction_snark_work.Statement.compare x y <> 0 ) ) + Mocks.Transaction_snark_work.Statement.compare x y <> 0)) in let open Quickcheck.Generator.Let_syntax in Quickcheck.random_value ~seed:(`Deterministic "") (let%bind a = gen_not_any [] in - let%bind b = gen_not_any [a] in - let%bind c = gen_not_any [a; b] in - let%map d = gen_not_any [a; b; c] in + let%bind b = gen_not_any [ a ] in + let%bind c = gen_not_any [ a; b ] in + let%map d = gen_not_any [ a; b; c ] in (a, b, c, d)) in let fee1, fee2, fee3, fee4 = @@ -1061,8 +1081,7 @@ let%test_module "random set test" = Envelope.Sender.Remote (Peer.create (Unix.Inet_addr.of_string "1.2.3.4") - ~peer_id: - (Peer.Id.unsafe_of_string "contents should be irrelevant") + ~peer_id:(Peer.Id.unsafe_of_string "contents should be irrelevant") ~libp2p_port:8302) in let compare_work (x : Mock_snark_pool.Resource_pool.Diff.t) @@ -1089,7 +1108,7 @@ let%test_module "random set test" = let resource_pool = Mock_snark_pool.resource_pool network_pool in let%bind () = Mocks.Transition_frontier.refer_statements tf - [stmt1; stmt2; stmt3; stmt4] + [ stmt1; stmt2; stmt3; stmt4 ] in let%bind res1 = apply_diff ~sender:fake_sender resource_pool stmt1 fee1 @@ -1122,7 +1141,8 @@ let%test_module "random set test" = ~has_timed_out:(Fn.const `Ok) in check_work ~got:rebroadcastable2 - ~expected:[Add_solved_work (stmt2, {proof= proof2; fee= fee2})] ; + ~expected: + [ Add_solved_work (stmt2, { proof = proof2; fee = fee2 }) ] ; let%bind res3 = apply_diff resource_pool stmt3 fee3 in let proof3 = One_or_two.map ~f:mk_dummy_proof stmt3 in ignore @@ -1135,8 +1155,9 @@ let%test_module "random set test" = in check_work ~got:rebroadcastable3 ~expected: - [ Add_solved_work (stmt2, {proof= proof2; fee= fee2}) - ; Add_solved_work (stmt3, {proof= proof3; fee= fee3}) ] ; + [ Add_solved_work (stmt2, { proof = proof2; fee = fee2 }) + ; Add_solved_work (stmt3, { proof = proof3; fee = fee3 }) + ] ; (* Keep rebroadcasting even after the timeout, as long as the work hasn't appeared in a block yet. *) @@ -1146,8 +1167,9 @@ let%test_module "random set test" = in check_work ~got:rebroadcastable4 ~expected: - [ Add_solved_work (stmt2, {proof= proof2; fee= fee2}) - ; Add_solved_work (stmt3, {proof= proof3; fee= fee3}) ] ; + [ Add_solved_work (stmt2, { proof = proof2; fee = fee2 }) + ; Add_solved_work (stmt3, { proof = proof3; fee = fee3 }) + ] ; let%bind res6 = apply_diff resource_pool stmt4 fee4 in let proof4 = One_or_two.map ~f:mk_dummy_proof stmt4 in ignore @@ -1156,7 +1178,7 @@ let%test_module "random set test" = * Mock_snark_pool.Resource_pool.Diff.rejected ) ; (* Mark best tip as not including stmt3. *) let%bind () = - Mocks.Transition_frontier.remove_from_best_tip tf [stmt3] + Mocks.Transition_frontier.remove_from_best_tip tf [ stmt3 ] in let rebroadcastable5 = Mock_snark_pool.For_tests.get_rebroadcastable resource_pool @@ -1164,7 +1186,8 @@ let%test_module "random set test" = in check_work ~got:rebroadcastable5 ~expected: - [ Add_solved_work (stmt2, {proof= proof2; fee= fee2}) - ; Add_solved_work (stmt4, {proof= proof4; fee= fee4}) ] ; - Deferred.unit ) + [ Add_solved_work (stmt2, { proof = proof2; fee = fee2 }) + ; Add_solved_work (stmt4, { proof = proof4; fee = fee4 }) + ] ; + Deferred.unit) end ) diff --git a/src/lib/network_pool/snark_pool.mli b/src/lib/network_pool/snark_pool.mli index 9fd48025517..f9929adddc5 100644 --- a/src/lib/network_pool/snark_pool.mli +++ b/src/lib/network_pool/snark_pool.mli @@ -9,7 +9,7 @@ module type S = sig module Resource_pool : sig include Intf.Snark_resource_pool_intf - with type transition_frontier := transition_frontier + with type transition_frontier := transition_frontier val remove_solved_work : t -> Transaction_snark_work.Statement.t -> unit @@ -19,20 +19,20 @@ module type S = sig module For_tests : sig val get_rebroadcastable : Resource_pool.t - -> has_timed_out:(Core.Time.t -> [`Timed_out | `Ok]) + -> has_timed_out:(Core.Time.t -> [ `Timed_out | `Ok ]) -> Resource_pool.Diff.t list end include Intf.Network_pool_base_intf - with type resource_pool := Resource_pool.t - and type resource_pool_diff := Resource_pool.Diff.t - and type resource_pool_diff_verified := Resource_pool.Diff.verified - and type transition_frontier := transition_frontier - and type config := Resource_pool.Config.t - and type transition_frontier_diff := - Resource_pool.transition_frontier_diff - and type rejected_diff := Resource_pool.Diff.rejected + with type resource_pool := Resource_pool.t + and type resource_pool_diff := Resource_pool.Diff.t + and type resource_pool_diff_verified := Resource_pool.Diff.verified + and type transition_frontier := transition_frontier + and type config := Resource_pool.Config.t + and type transition_frontier_diff := + Resource_pool.transition_frontier_diff + and type rejected_diff := Resource_pool.Diff.rejected val get_completed_work : t @@ -45,16 +45,17 @@ module type S = sig -> constraint_constants:Genesis_constants.Constraint_constants.t -> consensus_constants:Consensus.Constants.t -> time_controller:Block_time.Controller.t - -> incoming_diffs:( Resource_pool.Diff.t Envelope.Incoming.t - * Mina_net2.Validation_callback.t ) - Strict_pipe.Reader.t - -> local_diffs:( Resource_pool.Diff.t - * ( (Resource_pool.Diff.t * Resource_pool.Diff.rejected) - Or_error.t - -> unit) ) - Strict_pipe.Reader.t - -> frontier_broadcast_pipe:transition_frontier option - Broadcast_pipe.Reader.t + -> incoming_diffs: + ( Resource_pool.Diff.t Envelope.Incoming.t + * Mina_net2.Validation_callback.t ) + Strict_pipe.Reader.t + -> local_diffs: + ( Resource_pool.Diff.t + * ( (Resource_pool.Diff.t * Resource_pool.Diff.rejected) Or_error.t + -> unit) ) + Strict_pipe.Reader.t + -> frontier_broadcast_pipe: + transition_frontier option Broadcast_pipe.Reader.t -> t Deferred.t end @@ -83,12 +84,12 @@ end module Make (Base_ledger : Intf.Base_ledger_intf) (Staged_ledger : sig - type t + type t - val ledger : t -> Base_ledger.t + val ledger : t -> Base_ledger.t end) (Transition_frontier : Transition_frontier_intf - with type staged_ledger := Staged_ledger.t) : + with type staged_ledger := Staged_ledger.t) : S with type transition_frontier := Transition_frontier.t include S with type transition_frontier := Transition_frontier.t diff --git a/src/lib/network_pool/snark_pool_diff.ml b/src/lib/network_pool/snark_pool_diff.ml index 33b88e11f52..f1b2030ea7e 100644 --- a/src/lib/network_pool/snark_pool_diff.ml +++ b/src/lib/network_pool/snark_pool_diff.ml @@ -23,7 +23,7 @@ end module Make (Transition_frontier : T) (Pool : Intf.Snark_resource_pool_intf - with type transition_frontier := Transition_frontier.t) : + with type transition_frontier := Transition_frontier.t) : Intf.Snark_pool_diff_intf with type resource_pool := Pool.t = struct type t = | Add_solved_work of Work.t * Ledger_proof.t One_or_two.t Priced_proof.t @@ -37,14 +37,15 @@ module Make let reject_overloaded_diff _ = () type compact = - { work: Work.t - ; fee: Currency.Fee.t - ; prover: Signature_lib.Public_key.Compressed.t } + { work : Work.t + ; fee : Currency.Fee.t + ; prover : Signature_lib.Public_key.Compressed.t + } [@@deriving yojson, hash] let to_compact = function - | Add_solved_work (work, {proof= _; fee= {fee; prover}}) -> - Some {work; fee; prover} + | Add_solved_work (work, { proof = _; fee = { fee; prover } }) -> + Some { work; fee; prover } | Empty -> None @@ -66,7 +67,7 @@ module Make let max_per_15_seconds = 20 let summary = function - | Add_solved_work (work, {proof= _; fee}) -> + | Add_solved_work (work, { proof = _; fee }) -> Printf.sprintf !"Snark_pool_diff for work %s added with fee-prover %s" (Yojson.Safe.to_string @@ Work.compact_json work) @@ -85,7 +86,9 @@ module Make Add_solved_work ( One_or_two.map res.spec.instances ~f:Snark_work_lib.Work.Single.Spec.statement - , {proof= res.proofs; fee= {fee= res.spec.fee; prover= res.prover}} ) + , { proof = res.proofs + ; fee = { fee = res.spec.fee; prover = res.prover } + } ) let has_lower_fee pool work ~fee ~sender = let reject_and_log_if_local reason = @@ -94,28 +97,29 @@ module Make ~metadata: [ ("work", Work.compact_json work) ; ("sender", Envelope.Sender.to_yojson sender) - ; ("reason", `String reason) ] ; + ; ("reason", `String reason) + ] ; Or_error.error_string reason in match Pool.request_proof pool work with | None -> Ok () - | Some {fee= {fee= prev; _}; _} -> ( - match Currency.Fee.compare fee prev with - | -1 -> - Ok () - | 0 -> - reject_and_log_if_local "fee equal to cheapest work we have" - | 1 -> - reject_and_log_if_local "fee higher than cheapest work we have" - | _ -> - failwith "compare didn't return -1, 0, or 1!" ) - - let verify pool ({data; sender; _} as t : t Envelope.Incoming.t) = + | Some { fee = { fee = prev; _ }; _ } -> ( + match Currency.Fee.compare fee prev with + | -1 -> + Ok () + | 0 -> + reject_and_log_if_local "fee equal to cheapest work we have" + | 1 -> + reject_and_log_if_local "fee higher than cheapest work we have" + | _ -> + failwith "compare didn't return -1, 0, or 1!" ) + + let verify pool ({ data; sender; _ } as t : t Envelope.Incoming.t) = match data with | Empty -> Deferred.Or_error.error_string "cannot verify empty snark pool diff" - | Add_solved_work (work, ({Priced_proof.fee; _} as p)) -> ( + | Add_solved_work (work, ({ Priced_proof.fee; _ } as p)) -> ( let is_local = match sender with Local -> true | _ -> false in let verify () = if%map Pool.verify_and_act pool ~work:(work, p) ~sender then @@ -133,13 +137,12 @@ module Make (* This is called after verification has occurred.*) let unsafe_apply (pool : Pool.t) (t : t Envelope.Incoming.t) = - let {Envelope.Incoming.data= diff; sender; _} = t in + let { Envelope.Incoming.data = diff; sender; _ } = t in match diff with | Empty -> Deferred.return - (Error - (`Other (Error.of_string "cannot apply empty snark pool diff"))) - | Add_solved_work (work, {Priced_proof.proof; fee}) -> + (Error (`Other (Error.of_string "cannot apply empty snark pool diff"))) + | Add_solved_work (work, { Priced_proof.proof; fee }) -> let is_local = match sender with Local -> true | _ -> false in let to_or_error = function | `Statement_not_referenced -> diff --git a/src/lib/network_pool/test.ml b/src/lib/network_pool/test.ml index 2992804d804..ed60cee5037 100644 --- a/src/lib/network_pool/test.ml +++ b/src/lib/network_pool/test.ml @@ -23,7 +23,7 @@ let%test_module "network pool test" = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) module Mock_snark_pool = Snark_pool.Make (Mocks.Base_ledger) (Mocks.Staged_ledger) @@ -49,11 +49,13 @@ let%test_module "network pool test" = Transaction_snark.Statement.gen) in let priced_proof = - { Priced_proof.proof= + { Priced_proof.proof = One_or_two.map ~f:Ledger_proof.For_tests.mk_dummy_proof work - ; fee= - { fee= Currency.Fee.of_int 0 - ; prover= Signature_lib.Public_key.Compressed.empty } } + ; fee = + { fee = Currency.Fee.of_int 0 + ; prover = Signature_lib.Public_key.Compressed.empty + } + } in Async.Thread_safe.block_on_async_exn (fun () -> let network_pool = @@ -62,7 +64,9 @@ let%test_module "network pool test" = ~local_diffs:local_reader ~frontier_broadcast_pipe:frontier_broadcast_pipe_r in - let%bind () = Mocks.Transition_frontier.refer_statements tf [work] in + let%bind () = + Mocks.Transition_frontier.refer_statements tf [ work ] + in let command = Mock_snark_pool.Resource_pool.Diff.Add_solved_work (work, priced_proof) @@ -76,15 +80,15 @@ let%test_module "network pool test" = in let pool = Mock_snark_pool.resource_pool network_pool in match Mock_snark_pool.Resource_pool.request_proof pool work with - | Some {proof; fee= _} -> + | Some { proof; fee = _ } -> assert ( [%equal: Ledger_proof.t One_or_two.t] proof priced_proof.proof ) | None -> - failwith "There should have been a proof here" ) + failwith "There should have been a proof here") - let%test_unit "when creating a network, the incoming diffs and local \ - diffs in the reader pipes will automatically get process" = + let%test_unit "when creating a network, the incoming diffs and local diffs \ + in the reader pipes will automatically get process" = let work_count = 10 in let works = Quickcheck.random_sequence ~seed:(`Deterministic "works") @@ -98,11 +102,13 @@ let%test_module "network pool test" = Mock_snark_pool.Resource_pool.Diff.Add_solved_work ( work , Priced_proof. - { proof= + { proof = One_or_two.map ~f:Ledger_proof.For_tests.mk_dummy_proof work - ; fee= - { fee= Currency.Fee.of_int 0 - ; prover= Signature_lib.Public_key.Compressed.empty } } ) + ; fee = + { fee = Currency.Fee.of_int 0 + ; prover = Signature_lib.Public_key.Compressed.empty + } + } ) in let verify_unsolved_work () = let pool_reader, pool_writer = @@ -114,15 +120,14 @@ let%test_module "network pool test" = List.map (List.take works per_reader) ~f:create_work |> List.map ~f:(fun work -> ( Envelope.Incoming.local work - , Mina_net2.Validation_callback.create_without_expiration () ) - ) + , Mina_net2.Validation_callback.create_without_expiration () )) |> List.iter ~f:(fun diff -> Strict_pipe.Writer.write pool_writer diff - |> Deferred.don't_wait_for ) ; + |> Deferred.don't_wait_for) ; List.map (List.drop works per_reader) ~f:create_work |> List.iter ~f:(fun diff -> Strict_pipe.Writer.write local_writer (diff, Fn.const ()) - |> Deferred.don't_wait_for ) ; + |> Deferred.don't_wait_for) ; let%bind () = Async.Scheduler.yield_until_no_jobs_remain () in let tf = Mocks.Transition_frontier.create [] in let frontier_broadcast_pipe_r, _ = Broadcast_pipe.create (Some tf) in @@ -147,7 +152,7 @@ let%test_module "network pool test" = assert ( List.mem works work ~equal:Transaction_snark_work.Statement.equal ) ; - Deferred.unit ) ; + Deferred.unit) ; Deferred.unit in verify_unsolved_work |> Async.Thread_safe.block_on_async_exn diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index 3a025896f55..909cd61bf28 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -25,9 +25,10 @@ module type Transition_frontier_intf = sig end type best_tip_diff = - { new_commands: User_command.Valid.t With_status.t list - ; removed_commands: User_command.Valid.t With_status.t list - ; reorg_best_tip: bool } + { new_commands : User_command.Valid.t With_status.t list + ; removed_commands : User_command.Valid.t With_status.t list + ; reorg_best_tip : bool + } val best_tip : t -> Breadcrumb.t @@ -48,13 +49,13 @@ module Diff_versioned = struct end] (* We defer do any checking on signed-commands until the call to - [add_from_gossip_gossip_exn]. + [add_from_gossip_gossip_exn]. - The real solution would be to have more explicit queueing to make sure things don't happen out of order, factor - [add_from_gossip_gossip_exn] into [check_from_gossip_exn] (which just does - the checks) and [set_from_gossip_exn] (which just does the mutating the pool), - and do the same for snapp commands as well. -*) + The real solution would be to have more explicit queueing to make sure things don't happen out of order, factor + [add_from_gossip_gossip_exn] into [check_from_gossip_exn] (which just does + the checks) and [set_from_gossip_exn] (which just does the mutating the pool), + and do the same for snapp commands as well. + *) type verified = Signed_command.t list [@@deriving sexp, yojson] type t = User_command.t list [@@deriving sexp, yojson] @@ -101,8 +102,8 @@ module Diff_versioned = struct let to_string_hum = function | Insufficient_replace_fee -> - "This transaction would have replaced an existing transaction in \ - the pool, but the fee was too low" + "This transaction would have replaced an existing transaction in the \ + pool, but the fee was too low" | Invalid_signature -> "This transaction had an invalid signature" | Duplicate -> @@ -149,18 +150,18 @@ module Diff_versioned = struct type rejected = Rejected.t [@@deriving sexp, yojson] - let summary t = - Printf.sprintf "Transaction diff of length %d" (List.length t) + let summary t = Printf.sprintf "Transaction diff of length %d" (List.length t) let is_empty t = List.is_empty t end type Structured_log_events.t += | Rejecting_command_for_reason of - { command: User_command.t - ; reason: Diff_versioned.Diff_error.t - ; error_extra: (string * Yojson.Safe.t) list } - [@@deriving register_event {msg= "Rejecting command because: $reason"}] + { command : User_command.t + ; reason : Diff_versioned.Diff_error.t + ; error_extra : (string * Yojson.Safe.t) list + } + [@@deriving register_event { msg = "Rejecting command because: $reason" }] module type S = sig open Intf @@ -170,37 +171,37 @@ module type S = sig module Resource_pool : sig include Transaction_resource_pool_intf - with type transition_frontier := transition_frontier + with type transition_frontier := transition_frontier module Diff : Transaction_pool_diff_intf - with type resource_pool := t - and type Diff_error.t = Diff_versioned.Diff_error.t - and type Rejected.t = Diff_versioned.Rejected.t + with type resource_pool := t + and type Diff_error.t = Diff_versioned.Diff_error.t + and type Rejected.t = Diff_versioned.Rejected.t end include Network_pool_base_intf - with type resource_pool := Resource_pool.t - and type transition_frontier := transition_frontier - and type resource_pool_diff := Diff_versioned.t - and type resource_pool_diff_verified := Diff_versioned.verified - and type config := Resource_pool.Config.t - and type transition_frontier_diff := - Resource_pool.transition_frontier_diff - and type rejected_diff := Diff_versioned.rejected + with type resource_pool := Resource_pool.t + and type transition_frontier := transition_frontier + and type resource_pool_diff := Diff_versioned.t + and type resource_pool_diff_verified := Diff_versioned.verified + and type config := Resource_pool.Config.t + and type transition_frontier_diff := + Resource_pool.transition_frontier_diff + and type rejected_diff := Diff_versioned.rejected end (* Functor over user command, base ledger and transaction validator for mocking. *) module Make0 (Base_ledger : Intf.Base_ledger_intf) (Staged_ledger : sig - type t + type t - val ledger : t -> Base_ledger.t + val ledger : t -> Base_ledger.t end) (Transition_frontier : Transition_frontier_intf - with type staged_ledger := Staged_ledger.t) = + with type staged_ledger := Staged_ledger.t) = struct module Breadcrumb = Transition_frontier.Breadcrumb @@ -212,14 +213,15 @@ struct module Config = struct type t = - { trust_system: (Trust_system.t [@sexp.opaque]) - ; pool_max_size: int - (* note this value needs to be mostly the same across gossipping nodes, so - nodes with larger pools don't send nodes with smaller pools lots of - low fee transactions the smaller-pooled nodes consider useless and get - themselves banned. - *) - ; verifier: (Verifier.t [@sexp.opaque]) } + { trust_system : (Trust_system.t[@sexp.opaque]) + ; pool_max_size : int + (* note this value needs to be mostly the same across gossipping nodes, so + nodes with larger pools don't send nodes with smaller pools lots of + low fee transactions the smaller-pooled nodes consider useless and get + themselves banned. + *) + ; verifier : (Verifier.t[@sexp.opaque]) + } [@@deriving sexp_of, make] end @@ -249,26 +251,27 @@ struct end type t = - { mutable pool: Indexed_pool.t - ; recently_seen: (Lru_cache.t [@sexp.opaque]) - ; locally_generated_uncommitted: + { mutable pool : Indexed_pool.t + ; recently_seen : (Lru_cache.t[@sexp.opaque]) + ; locally_generated_uncommitted : ( Transaction_hash.User_command_with_valid_signature.t - , Time.t * [`Batch of int] ) + , Time.t * [ `Batch of int ] ) Hashtbl.t (** Commands generated on this machine, that are not included in the current best tip, along with the time they were added. *) - ; locally_generated_committed: + ; locally_generated_committed : ( Transaction_hash.User_command_with_valid_signature.t - , Time.t * [`Batch of int] ) + , Time.t * [ `Batch of int ] ) Hashtbl.t (** Ones that are included in the current best tip. *) - ; mutable current_batch: int - ; mutable remaining_in_batch: int - ; config: Config.t - ; logger: (Logger.t [@sexp.opaque]) - ; batcher: Batcher.t - ; mutable best_tip_diff_relay: (unit Deferred.t [@sexp.opaque]) Option.t - ; mutable best_tip_ledger: (Base_ledger.t [@sexp.opaque]) Option.t } + ; mutable current_batch : int + ; mutable remaining_in_batch : int + ; config : Config.t + ; logger : (Logger.t[@sexp.opaque]) + ; batcher : Batcher.t + ; mutable best_tip_diff_relay : (unit Deferred.t[@sexp.opaque]) Option.t + ; mutable best_tip_ledger : (Base_ledger.t[@sexp.opaque]) Option.t + } [@@deriving sexp_of] let member t x = @@ -278,45 +281,47 @@ struct Sequence.unfold ~init:p ~f:(fun pool -> match Indexed_pool.get_highest_fee pool with | Some cmd -> ( - match - Indexed_pool.handle_committed_txn pool - cmd - (* we have the invariant that the transactions currently - in the pool are always valid against the best tip, so - no need to check balances here *) - ~fee_payer_balance:Currency.Amount.max_int - ~fee_payer_nonce: - ( Transaction_hash.User_command_with_valid_signature.command - cmd - |> User_command.nonce_exn ) - with - | Ok (t, _) -> - Some (cmd, t) - | Error (`Queued_txns_by_sender (error_str, queued_cmds)) -> - [%log error] - "Error handling committed transaction $cmd: $error " - ~metadata: - [ ( "cmd" - , Transaction_hash.User_command_with_valid_signature - .to_yojson cmd ) - ; ("error", `String error_str) - ; ( "queue" - , `List - (List.map (Sequence.to_list queued_cmds) ~f:(fun c -> - Transaction_hash - .User_command_with_valid_signature - .to_yojson c )) ) ] ; - failwith error_str ) + match + Indexed_pool.handle_committed_txn pool + cmd + (* we have the invariant that the transactions currently + in the pool are always valid against the best tip, so + no need to check balances here *) + ~fee_payer_balance:Currency.Amount.max_int + ~fee_payer_nonce: + ( Transaction_hash.User_command_with_valid_signature.command + cmd + |> User_command.nonce_exn ) + with + | Ok (t, _) -> + Some (cmd, t) + | Error (`Queued_txns_by_sender (error_str, queued_cmds)) -> + [%log error] + "Error handling committed transaction $cmd: $error " + ~metadata: + [ ( "cmd" + , Transaction_hash.User_command_with_valid_signature + .to_yojson cmd ) + ; ("error", `String error_str) + ; ( "queue" + , `List + (List.map (Sequence.to_list queued_cmds) + ~f:(fun c -> + Transaction_hash + .User_command_with_valid_signature + .to_yojson c)) ) + ] ; + failwith error_str ) | None -> - None ) + None) let transactions ~logger t = transactions' ~logger t.pool - let all_from_account {pool; _} = Indexed_pool.all_from_account pool + let all_from_account { pool; _ } = Indexed_pool.all_from_account pool - let get_all {pool; _} = Indexed_pool.get_all pool + let get_all { pool; _ } = Indexed_pool.get_all pool - let find_by_hash {pool; _} hash = Indexed_pool.find_by_hash pool hash + let find_by_hash { pool; _ } hash = Indexed_pool.find_by_hash pool hash (** Get the best tip ledger*) let get_best_tip_ledger frontier = @@ -352,21 +357,23 @@ struct let nonce_json = Account.Nonce.to_yojson in ( "invalid_nonce" , [ ( "between" - , `Assoc [("low", nonce_json low); ("hi", nonce_json hi)] ) - ; ("nonce", nonce_json nonce) ] ) + , `Assoc [ ("low", nonce_json low); ("hi", nonce_json hi) ] ) + ; ("nonce", nonce_json nonce) + ] ) | Invalid_nonce (`Expected enonce, nonce) -> let nonce_json = Account.Nonce.to_yojson in ( "invalid_nonce" - , [("expected_nonce", nonce_json enonce); ("nonce", nonce_json nonce)] - ) + , [ ("expected_nonce", nonce_json enonce) + ; ("nonce", nonce_json nonce) + ] ) | Insufficient_funds (`Balance bal, amt) -> let amt_json = Currency.Amount.to_yojson in ( "insufficient_funds" - , [("balance", amt_json bal); ("amount", amt_json amt)] ) + , [ ("balance", amt_json bal); ("amount", amt_json amt) ] ) | Insufficient_replace_fee (`Replace_fee rfee, fee) -> let fee_json = Currency.Fee.to_yojson in ( "insufficient_replace_fee" - , [("replace_fee", fee_json rfee); ("fee", fee_json fee)] ) + , [ ("replace_fee", fee_json rfee); ("fee", fee_json fee) ] ) | Overflow -> ("overflow", []) | Bad_token -> @@ -374,14 +381,15 @@ struct | Invalid_transaction -> ("invalid_transaction", []) | Unwanted_fee_token fee_token -> - ("unwanted_fee_token", [("fee_token", Token_id.to_yojson fee_token)]) + ("unwanted_fee_token", [ ("fee_token", Token_id.to_yojson fee_token) ]) | Expired (`Valid_until valid_until, `Current_global_slot current_global_slot) -> ( "expired" , [ ("valid_until", Mina_numbers.Global_slot.to_yojson valid_until) ; ( "current_global_slot" - , Mina_numbers.Global_slot.to_yojson current_global_slot ) ] ) + , Mina_numbers.Global_slot.to_yojson current_global_slot ) + ] ) let balance_of_account ~global_slot (account : Account.t) = match account.timing with @@ -392,7 +400,8 @@ struct ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } -> + ; vesting_increment + } -> Currency.Balance.sub_amount account.balance (Currency.Balance.to_amount (Account.min_balance_at_slot ~global_slot ~cliff_time @@ -406,10 +415,10 @@ struct None | Signed_command t -> Option.map (Signed_command.check t) ~f:(fun x -> - User_command.Signed_command x ) + User_command.Signed_command x) let handle_transition_frontier_diff - ( ({new_commands; removed_commands; reorg_best_tip= _} : + ( ({ new_commands; removed_commands; reorg_best_tip = _ } : Transition_frontier.best_tip_diff) , best_tip_ledger ) t = (* This runs whenever the best tip changes. The simple case is when the @@ -437,7 +446,8 @@ struct ( [ ( "cmd" , Transaction_hash.User_command_with_valid_signature.to_yojson cmd ) - ; ("error", `String error_str) ] + ; ("error", `String error_str) + ] @ metadata ) in [%log' trace t.logger] @@ -457,8 +467,8 @@ struct ( removed_commands |> List.rev |> Sequence.of_list |> Sequence.map ~f:(fun unchecked -> unchecked.data - |> Transaction_hash.User_command_with_valid_signature.create - ) ) + |> Transaction_hash.User_command_with_valid_signature.create) + ) ~init:(t.pool, Sequence.empty) ~f:(fun (pool, dropped_so_far) cmd -> ( match @@ -478,7 +488,7 @@ struct | Ok indexed_pool -> drop_until_below_max_size ~pool_max_size indexed_pool in - (pool', Sequence.append dropped_so_far dropped_seq) ) + (pool', Sequence.append dropped_so_far dropped_seq)) in (* Track what locally generated commands were removed from the pool during backtracking due to the max size constraint. *) @@ -497,7 +507,8 @@ struct (List.map ~f: Transaction_hash.User_command_with_valid_signature - .to_yojson locally_generated_dropped) ) ] ; + .to_yojson locally_generated_dropped) ) + ] ; let pool'', dropped_commit_conflicts = List.fold new_commands ~init:(pool', Sequence.empty) ~f:(fun (p, dropped_so_far) cmd -> @@ -520,8 +531,7 @@ struct let fee_payer = User_command.(fee_payer (forget_check cmd.data)) in let fee_payer_balance, fee_payer_nonce = balance fee_payer in let cmd' = - Transaction_hash.User_command_with_valid_signature.create - cmd.data + Transaction_hash.User_command_with_valid_signature.create cmd.data in ( match Hashtbl.find_and_remove t.locally_generated_uncommitted cmd' @@ -534,7 +544,8 @@ struct ~metadata: [ ( "cmd" , With_status.to_yojson User_command.Valid.to_yojson cmd - ) ] ; + ) + ] ; Hashtbl.add_exn t.locally_generated_committed ~key:cmd' ~data:time_added ) ; let p', dropped = @@ -549,8 +560,8 @@ struct "Error handling committed transaction $cmd: $error " ~metadata: [ ( "cmd" - , With_status.to_yojson User_command.Valid.to_yojson - cmd ) + , With_status.to_yojson User_command.Valid.to_yojson cmd + ) ; ("error", `String error_str) ; ( "queue" , `List @@ -558,15 +569,16 @@ struct ~f:(fun c -> Transaction_hash .User_command_with_valid_signature - .to_yojson c )) ) ] ; + .to_yojson c)) ) + ] ; failwith error_str in - (p', Sequence.append dropped_so_far dropped) ) + (p', Sequence.append dropped_so_far dropped)) in let commit_conflicts_locally_generated = Sequence.filter dropped_commit_conflicts ~f:(fun cmd -> Hashtbl.find_and_remove t.locally_generated_uncommitted cmd - |> Option.is_some ) + |> Option.is_some) in if not @@ Sequence.is_empty commit_conflicts_locally_generated then [%log' info t.logger] @@ -579,7 +591,8 @@ struct (Sequence.map commit_conflicts_locally_generated ~f: Transaction_hash.User_command_with_valid_signature - .to_yojson)) ) ] ; + .to_yojson)) ) + ] ; [%log' debug t.logger] !"Finished handling diff. Old pool size %i, new pool size %i. Dropped \ %i commands during backtracking to maintain max size." @@ -616,7 +629,8 @@ struct ~metadata: [ ( "cmd" , Transaction_hash.User_command_with_valid_signature - .to_yojson cmd ) ] ; + .to_yojson cmd ) + ] ; remove_cmd () ) else let unchecked = @@ -629,34 +643,35 @@ struct ~f:(Base_ledger.get best_tip_ledger) with | Some acct -> ( - match - Indexed_pool.add_from_gossip_exn t.pool (`Checked cmd) - acct.nonce ~verify:check_command - ( balance_of_account ~global_slot acct - |> Currency.Balance.to_amount ) - with - | Error e -> - let error_str, metadata = of_indexed_pool_error e in - log_and_remove error_str - ~metadata: - ( ("user_command", User_command.to_yojson unchecked) - :: metadata ) - | Ok (_, pool''', _) -> - [%log' debug t.logger] - "re-added locally generated command $cmd to transaction \ - pool after reorg" - ~metadata: - [ ( "cmd" - , Transaction_hash.User_command_with_valid_signature - .to_yojson cmd ) ] ; - Mina_metrics.( - Gauge.set Transaction_pool.pool_size - (Float.of_int (Indexed_pool.size pool'''))) ; - t.pool <- pool''' ) + match + Indexed_pool.add_from_gossip_exn t.pool (`Checked cmd) + acct.nonce ~verify:check_command + ( balance_of_account ~global_slot acct + |> Currency.Balance.to_amount ) + with + | Error e -> + let error_str, metadata = of_indexed_pool_error e in + log_and_remove error_str + ~metadata: + ( ("user_command", User_command.to_yojson unchecked) + :: metadata ) + | Ok (_, pool''', _) -> + [%log' debug t.logger] + "re-added locally generated command $cmd to \ + transaction pool after reorg" + ~metadata: + [ ( "cmd" + , Transaction_hash.User_command_with_valid_signature + .to_yojson cmd ) + ] ; + Mina_metrics.( + Gauge.set Transaction_pool.pool_size + (Float.of_int (Indexed_pool.size pool'''))) ; + t.pool <- pool''' ) | None -> log_and_remove "Fee_payer_account not found" ~metadata: - [("user_command", User_command.to_yojson unchecked)] ) ; + [ ("user_command", User_command.to_yojson unchecked) ]) ; (*Remove any expired user commands*) let expired_commands, pool = Indexed_pool.remove_expired t.pool in Sequence.iter expired_commands ~f:(fun cmd -> @@ -665,10 +680,11 @@ struct ~metadata: [ ( "cmd" , Transaction_hash.User_command_with_valid_signature.to_yojson - cmd ) ] ; + cmd ) + ] ; ignore ( Hashtbl.find_and_remove t.locally_generated_uncommitted cmd - : (Time.t * [`Batch of int]) option ) ) ; + : (Time.t * [ `Batch of int ]) option )) ; Mina_metrics.( Gauge.set Transaction_pool.pool_size (Float.of_int (Indexed_pool.size pool))) ; @@ -678,25 +694,26 @@ struct let create ~constraint_constants ~consensus_constants ~time_controller ~frontier_broadcast_pipe ~config ~logger ~tf_diff_writer = let t = - { pool= + { pool = Indexed_pool.empty ~constraint_constants ~consensus_constants ~time_controller - ; locally_generated_uncommitted= + ; locally_generated_uncommitted = Hashtbl.create ( module Transaction_hash.User_command_with_valid_signature.Stable .Latest ) - ; locally_generated_committed= + ; locally_generated_committed = Hashtbl.create ( module Transaction_hash.User_command_with_valid_signature.Stable .Latest ) - ; current_batch= 0 - ; remaining_in_batch= max_per_15_seconds + ; current_batch = 0 + ; remaining_in_batch = max_per_15_seconds ; config ; logger - ; batcher= Batcher.create config.verifier - ; best_tip_diff_relay= None - ; recently_seen= Lru_cache.Q.create () - ; best_tip_ledger= None } + ; batcher = Batcher.create config.verifier + ; best_tip_diff_relay = None + ; recently_seen = Lru_cache.Q.create () + ; best_tip_ledger = None + } in don't_wait_for (Broadcast_pipe.Reader.iter frontier_broadcast_pipe @@ -722,7 +739,8 @@ struct "Transition frontier closed without first \ closing best tip view pipe" ; assert false ) - else ()) ] ) + else ()) + ] ) | Some frontier -> [%log debug] "Got frontier!" ; let validation_ledger = get_best_tip_ledger frontier in @@ -749,7 +767,7 @@ struct in ( acc.nonce , balance_of_account ~global_slot acc - |> Currency.Balance.to_amount ) ) + |> Currency.Balance.to_amount )) in let dropped_locally_generated = Sequence.filter dropped ~f:(fun cmd -> @@ -764,7 +782,7 @@ struct in (* Nothing should be in both tables. *) assert (not (dropped_committed && dropped_uncommitted)) ; - dropped_committed || dropped_uncommitted ) + dropped_committed || dropped_uncommitted) in (* In this situation we don't know whether the commands aren't valid against the new ledger because they were already @@ -782,7 +800,8 @@ struct ~f: Transaction_hash .User_command_with_valid_signature - .to_yojson) ) ] ; + .to_yojson) ) + ] ; [%log debug] !"Re-validated transaction pool after restart: dropped %i \ of %i previously in pool" @@ -791,16 +810,16 @@ struct Gauge.set Transaction_pool.pool_size (Float.of_int (Indexed_pool.size new_pool))) ; t.pool <- new_pool ; - t.best_tip_diff_relay - <- Some - (Broadcast_pipe.Reader.iter - (Transition_frontier.best_tip_diff_pipe frontier) - ~f:(fun diff -> - Strict_pipe.Writer.write tf_diff_writer - (diff, get_best_tip_ledger frontier) - |> Deferred.don't_wait_for ; - Deferred.unit )) ; - Deferred.unit )) ; + t.best_tip_diff_relay <- + Some + (Broadcast_pipe.Reader.iter + (Transition_frontier.best_tip_diff_pipe frontier) + ~f:(fun diff -> + Strict_pipe.Writer.write tf_diff_writer + (diff, get_best_tip_ledger frontier) + |> Deferred.don't_wait_for ; + Deferred.unit)) ; + Deferred.unit)) ; t type pool = t @@ -841,7 +860,7 @@ struct let reject_overloaded_diff (diffs : verified) = List.map diffs ~f:(fun cmd -> - (User_command.Signed_command cmd, Diff_error.Overloaded) ) + (User_command.Signed_command cmd, Diff_error.Overloaded)) let empty = [] @@ -867,7 +886,8 @@ struct let is_local = Envelope.Sender.(equal Local sender) in let metadata = [ ("error", Error_json.error_to_yojson e) - ; ("sender", Envelope.Sender.to_yojson sender) ] + ; ("sender", Envelope.Sender.to_yojson sender) + ] in [%log' error t.logger] ~metadata "Error verifying transaction pool diff from $sender: $error" ; @@ -882,8 +902,8 @@ struct (* TODO: Move the actual verify call to the end for now instead of "pre" doing it *) (* TODO: - Don't allow arbitrary transactions. Check the account actually has - the funds and the nonce is in [current nonce, current nonce + 10] or something. *) + Don't allow arbitrary transactions. Check the account actually has + the funds and the nonce is in [current nonce, current nonce + 10] or something. *) (* Transaction verification currently happens in apply. In the future we could batch it. *) let verify (t : pool) (diffs : t Envelope.Incoming.t) : @@ -903,7 +923,7 @@ struct ; ( "sender" , Envelope.(Sender.to_yojson (Incoming.sender diffs)) ) ] ; - is_valid ) + is_valid) in let h = Lru_cache.T.hash diffs.data in let (`Already_mem already_mem) = Lru_cache.add t.recently_seen h in @@ -923,30 +943,30 @@ struct "We don't have a transition frontier at the moment, so we're \ unable to verify any transactions." | Some _ledger -> ( - match - Option.all - (List.map diffs.data ~f:(function - | Snapp_command _ -> - None - | Signed_command x -> - Some x )) - |> Option.map ~f:(fun data -> {diffs with data}) - with - | None -> - let trust_record = - Trust_system.record_envelope_sender t.config.trust_system - t.logger sender - in - let%map () = - (* that's an insta-ban *) - trust_record - ( Trust_system.Actions.Sent_snapp_transaction - , Some ("peer sent snapp transaction", []) ) - in - Or_error.error_string "diff contained snapp transactions" - | Some diffs' -> - Deferred.Or_error.return diffs' - (* Currently we defer all verification to [apply] *) ) + match + Option.all + (List.map diffs.data ~f:(function + | Snapp_command _ -> + None + | Signed_command x -> + Some x)) + |> Option.map ~f:(fun data -> { diffs with data }) + with + | None -> + let trust_record = + Trust_system.record_envelope_sender t.config.trust_system + t.logger sender + in + let%map () = + (* that's an insta-ban *) + trust_record + ( Trust_system.Actions.Sent_snapp_transaction + , Some ("peer sent snapp transaction", []) ) + in + Or_error.error_string "diff contained snapp transactions" + | Some diffs' -> + Deferred.Or_error.return diffs' + (* Currently we defer all verification to [apply] *) ) let register_locally_generated t txn = Hashtbl.update t.locally_generated_uncommitted txn ~f:(function @@ -965,7 +985,7 @@ struct t.current_batch <- t.current_batch + 1 ; t.current_batch ) in - (Time.now (), `Batch batch_num) ) + (Time.now (), `Batch batch_num)) let apply t (env : verified Envelope.Incoming.t) = let txs = Envelope.Incoming.data env in @@ -980,8 +1000,8 @@ struct unavailable, ignoring." | Some ledger -> ( let trust_record = - Trust_system.record_envelope_sender t.config.trust_system - t.logger sender + Trust_system.record_envelope_sender t.config.trust_system t.logger + sender in let rec go txs' (accepted, rejected) = let open Interruptible.Deferred_let_syntax in @@ -997,7 +1017,7 @@ struct if is_sender_local then ( [%log' info t.logger] "Rebroadcasting $cmd already present in the pool" - ~metadata:[("cmd", User_command.to_yojson tx)] ; + ~metadata:[ ("cmd", User_command.to_yojson tx) ] ; Option.iter (check_command tx) ~f:(fun cmd -> (* Re-register to reset the rebroadcast timer. @@ -1005,12 +1025,11 @@ struct register_locally_generated t Transaction_hash.( User_command_with_valid_signature.make cmd - (User_command.hash tx')) ) ; + (User_command.hash tx'))) ; go txs'' (tx :: accepted, rejected) ) else let%bind _ = - trust_record - (Trust_system.Actions.Sent_old_gossip, None) + trust_record (Trust_system.Actions.Sent_old_gossip, None) in go txs'' ( accepted @@ -1029,7 +1048,7 @@ struct ( Trust_system.Actions.Sent_useless_gossip , Some ( "account does not exist for command: $cmd" - , [("cmd", User_command.to_yojson tx)] ) ) + , [ ("cmd", User_command.to_yojson tx) ] ) ) in go txs'' ( accepted @@ -1055,24 +1074,29 @@ struct , [ ( "between" , `Assoc [ ("low", nonce_json low) - ; ("hi", nonce_json hi) ] ) - ; ("nonce", nonce_json nonce) ] ) + ; ("hi", nonce_json hi) + ] ) + ; ("nonce", nonce_json nonce) + ] ) | Invalid_nonce (`Expected enonce, nonce) -> let nonce_json = Account.Nonce.to_yojson in ( Diff_versioned.Diff_error.Invalid_nonce , [ ("expected_nonce", nonce_json enonce) - ; ("nonce", nonce_json nonce) ] ) + ; ("nonce", nonce_json nonce) + ] ) | Insufficient_funds (`Balance bal, amt) -> let amt_json = Currency.Amount.to_yojson in ( Insufficient_funds , [ ("balance", amt_json bal) - ; ("amount", amt_json amt) ] ) + ; ("amount", amt_json amt) + ] ) | Insufficient_replace_fee (`Replace_fee rfee, fee) -> let fee_json = Currency.Fee.to_yojson in ( Insufficient_replace_fee , [ ("replace_fee", fee_json rfee) - ; ("fee", fee_json fee) ] ) + ; ("fee", fee_json fee) + ] ) | Overflow -> (Overflow, []) | Bad_token -> @@ -1081,8 +1105,8 @@ struct (Diff_error.Invalid_signature, []) | Unwanted_fee_token fee_token -> ( Unwanted_fee_token - , [("fee_token", Token_id.to_yojson fee_token)] - ) + , [ ("fee_token", Token_id.to_yojson fee_token) + ] ) | Expired ( `Valid_until valid_until , `Current_global_slot current_global_slot ) -> @@ -1092,7 +1116,8 @@ struct valid_until ) ; ( "current_global_slot" , Mina_numbers.Global_slot.to_yojson - current_global_slot ) ] ) + current_global_slot ) + ] ) in let yojson_fail_reason = Fn.compose @@ -1114,7 +1139,7 @@ struct | Unwanted_fee_token _ -> "unwanted fee token" | Expired _ -> - "expired" ) + "expired") in match add_res with | Ok (verified, pool', dropped) -> @@ -1132,8 +1157,8 @@ struct ( Trust_system.Actions.Sent_useful_gossip , Some ( "$cmd" - , [("cmd", User_command.to_yojson tx)] ) - ) + , [ ("cmd", User_command.to_yojson tx) ] + ) ) in let seq_cmd_to_yojson seq = `List @@ -1150,29 +1175,29 @@ struct "dropped commands due to transaction \ replacement: $dropped" ~metadata: - [("dropped", seq_cmd_to_yojson dropped)] ; + [ ("dropped", seq_cmd_to_yojson dropped) ] ; if not (Sequence.is_empty dropped_for_size) then [%log' debug t.logger] "dropped commands to maintain max size: $cmds" ~metadata: [ ( "cmds" - , seq_cmd_to_yojson dropped_for_size ) ] ; + , seq_cmd_to_yojson dropped_for_size ) + ] ; let locally_generated_dropped = Sequence.filter (Sequence.append dropped dropped_for_size) ~f:(fun tx_dropped -> Hashtbl.find_and_remove - t.locally_generated_uncommitted - tx_dropped - |> Option.is_some ) + t.locally_generated_uncommitted tx_dropped + |> Option.is_some) |> Sequence.to_list in if not (List.is_empty locally_generated_dropped) then [%log' info t.logger] "Dropped locally generated commands $cmds \ - from transaction pool due to replacement \ - or max size" + from transaction pool due to replacement or \ + max size" ~metadata: [ ( "cmds" , `List @@ -1181,7 +1206,8 @@ struct Transaction_hash .User_command_with_valid_signature .to_yojson - locally_generated_dropped) ) ] ; + locally_generated_dropped) ) + ] ; go txs'' (tx :: accepted, rejected) | Error (Insufficient_replace_fee @@ -1201,7 +1227,8 @@ struct ~metadata: [ ("cmd", User_command.to_yojson tx) ; ("rfee", Currency.Fee.to_yojson rfee) - ; ("fee", Currency.Fee.to_yojson fee) ] ; + ; ("fee", Currency.Fee.to_yojson fee) + ] ; go txs'' ( accepted , ( tx @@ -1211,7 +1238,7 @@ struct | Error (Unwanted_fee_token fee_token) -> (* We can't punish peers for this, since these are our specific preferences. - *) + *) let f_log = if is_sender_local then [%log' error t.logger] else [%log' debug t.logger] @@ -1221,20 +1248,21 @@ struct in $token" ~metadata: [ ("cmd", User_command.to_yojson tx) - ; ("token", Token_id.to_yojson fee_token) ] ; + ; ("token", Token_id.to_yojson fee_token) + ] ; go txs'' ( accepted , ( tx - , Diff_versioned.Diff_error - .Unwanted_fee_token ) + , Diff_versioned.Diff_error.Unwanted_fee_token + ) :: rejected ) | Error Invalid_transaction -> let%bind _ = trust_record ( Trust_system.Actions.Sent_useless_gossip , Some - ( "rejecting command because had \ - invalid signature or was malformed" + ( "rejecting command because had invalid \ + signature or was malformed" , [] ) ) in go txs'' @@ -1250,9 +1278,10 @@ struct if is_sender_local then [%str_log' error t.logger] (Rejecting_command_for_reason - { command= tx - ; reason= diff_err - ; error_extra }) ; + { command = tx + ; reason = diff_err + ; error_extra + }) ; let%bind _ = trust_record ( Trust_system.Actions.Sent_useless_gossip @@ -1273,7 +1302,7 @@ struct ( sprintf "rejecting command $cmd due to \ insufficient fee." - , [("cmd", User_command.to_yojson tx)] ) ) + , [ ("cmd", User_command.to_yojson tx) ] ) ) in go txs'' ( accepted @@ -1287,7 +1316,7 @@ struct let signal = Deferred.map (Base_ledger.detached_signal ledger) ~f:(fun () -> Error.createf "Ledger was detatched" - |> Error.tag ~tag:"Transaction_pool.apply" ) + |> Error.tag ~tag:"Transaction_pool.apply") in let%bind () = Interruptible.lift Deferred.unit signal in go txs ([], []) @@ -1302,8 +1331,7 @@ struct | Ok ((accepted, _) as e) -> ( if not (List.is_empty accepted) then Mina_metrics.( - Gauge.set - Transaction_pool.useful_transactions_received_time_sec + Gauge.set Transaction_pool.useful_transactions_received_time_sec (let x = Time.(now () |> to_span_since_epoch |> Span.to_sec) in @@ -1317,7 +1345,8 @@ struct let metadata ~key ~time = [ ( "cmd" , Transaction_hash.User_command_with_valid_signature.to_yojson key ) - ; ("time", `String (Time.to_string_abs ~zone:Time.Zone.utc time)) ] + ; ("time", `String (Time.to_string_abs ~zone:Time.Zone.utc time)) + ] in let added_str = "it was added at $time and its rebroadcast period is now expired." @@ -1332,7 +1361,7 @@ struct added_str ~metadata:(metadata ~key ~time) ; false | `Ok -> - true ) ; + true) ; Hashtbl.filteri_inplace t.locally_generated_committed ~f:(fun ~key ~data:(time, `Batch _) -> match has_timed_out time with @@ -1343,13 +1372,12 @@ struct added_str ~metadata:(metadata ~key ~time) ; false | `Ok -> - true ) ; + true) ; (* Important to maintain ordering here *) let rebroadcastable_txs = Hashtbl.to_alist t.locally_generated_uncommitted |> List.sort - ~compare:(fun (txn1, (_, `Batch batch1)) - (txn2, (_, `Batch batch2)) + ~compare:(fun (txn1, (_, `Batch batch1)) (txn2, (_, `Batch batch2)) -> let cmp = compare batch1 batch2 in let get_hash = @@ -1366,16 +1394,15 @@ struct (get_nonce txn2) in if cmp <> 0 then cmp - else Transaction_hash.compare (get_hash txn1) (get_hash txn2) - ) + else Transaction_hash.compare (get_hash txn1) (get_hash txn2)) |> List.group ~break:(fun (_, (_, `Batch batch1)) (_, (_, `Batch batch2)) -> - batch1 <> batch2 ) + batch1 <> batch2) |> List.map ~f: (List.map ~f:(fun (txn, _) -> Transaction_hash.User_command_with_valid_signature.command - txn )) + txn)) in rebroadcastable_txs end @@ -1390,7 +1417,7 @@ module Make (Staged_ledger : sig val ledger : t -> Mina_base.Ledger.t end) (Transition_frontier : Transition_frontier_intf - with type staged_ledger := Staged_ledger.t) : + with type staged_ledger := Staged_ledger.t) : S with type transition_frontier := Transition_frontier.t = Make0 (Mina_base.Ledger) (Staged_ledger) (Transition_frontier) @@ -1401,9 +1428,10 @@ include Make include Transition_frontier type best_tip_diff = Extensions.Best_tip_diff.view = - { new_commands: User_command.Valid.t With_status.t list - ; removed_commands: User_command.Valid.t With_status.t list - ; reorg_best_tip: bool } + { new_commands : User_command.Valid.t With_status.t list + ; removed_commands : User_command.Valid.t With_status.t list + ; reorg_best_tip : bool + } let best_tip_diff_pipe t = Extensions.(get_view_pipe (extensions t) Best_tip_diff) @@ -1432,7 +1460,7 @@ let%test_module _ = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) module Mock_transition_frontier = struct module Breadcrumb = struct @@ -1442,9 +1470,10 @@ let%test_module _ = end type best_tip_diff = - { new_commands: User_command.Valid.t With_status.t list - ; removed_commands: User_command.Valid.t With_status.t list - ; reorg_best_tip: bool } + { new_commands : User_command.Valid.t With_status.t list + ; removed_commands : User_command.Valid.t With_status.t list + ; reorg_best_tip : bool + } type t = best_tip_diff Broadcast_pipe.Reader.t * Breadcrumb.t ref @@ -1452,7 +1481,7 @@ let%test_module _ = fun () -> let pipe_r, pipe_w = Broadcast_pipe.create - {new_commands= []; removed_commands= []; reorg_best_tip= false} + { new_commands = []; removed_commands = []; reorg_best_tip = false } in let accounts = List.map (Array.to_list test_keys) ~f:(fun kp -> @@ -1460,7 +1489,7 @@ let%test_module _ = let account_id = Account_id.create compressed Token_id.default in ( account_id , Account.create account_id - @@ Currency.Balance.of_int 1_000_000_000_000 ) ) + @@ Currency.Balance.of_int 1_000_000_000_000 )) in let ledger = Account_id.Map.of_alist_exn accounts in ((pipe_r, ref ledger), pipe_w) @@ -1498,13 +1527,13 @@ let%test_module _ = Some cmd | `Right cmd -> (* Locally generated uncommitted transactions should be in the - pool, so long as we're not in the middle of updating it. *) + pool, so long as we're not in the middle of updating it. *) assert ( Indexed_pool.member pool.pool (Transaction_hash.User_command.of_checked key) ) ; - Some cmd ) + Some cmd) : ( Transaction_hash.User_command_with_valid_signature.t - , Time.t * [`Batch of int] ) + , Time.t * [ `Batch of int ] ) Hashtbl.t ) let setup_test () = @@ -1536,7 +1565,7 @@ let%test_module _ = ~f:Transaction_hash.User_command_with_valid_signature.command |> Sequence.to_list |> List.sort ~compare:User_command.compare ) - (List.sort ~compare:User_command.compare txs) ) + (List.sort ~compare:User_command.compare txs)) , pool , best_tip_diff_w , tf ) @@ -1565,23 +1594,24 @@ let%test_module _ = include Result (*let equal ok_eq err_eq a b = - match a, b with - | Ok a, Ok b -> ok_eq a b - | Error a, Error b -> err_eq a b - | _ -> false*) + match a, b with + | Ok a, Ok b -> ok_eq a b + | Error a, Error b -> err_eq a b + | _ -> false*) end - type pool_apply = (User_command.t list, [`Other of Error.t]) Result.t + type pool_apply = (User_command.t list, [ `Other of Error.t ]) Result.t [@@deriving sexp, compare] let accepted_commands = Result.map ~f:fst let mk_with_status (cmd : User_command.Valid.t) = - { With_status.data= cmd - ; status= + { With_status.data = cmd + ; status = Applied ( Transaction_status.Auxiliary_data.empty - , Transaction_status.Balance_data.empty ) } + , Transaction_status.Balance_data.empty ) + } let independent_signed_cmds' = List.map independent_cmds' ~f:(function @@ -1590,7 +1620,7 @@ let%test_module _ = | _ -> failwith "when snapp commands are enabled, [independent_signed_cmds'] \ - should be removed." ) + should be removed.") let%test_unit "transactions are removed in linear case" = Thread_safe.block_on_async_exn (fun () -> @@ -1608,23 +1638,25 @@ let%test_module _ = assert_pool_txs independent_cmds' ; let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= [mk_with_status (List.hd_exn independent_cmds)] - ; removed_commands= [] - ; reorg_best_tip= false } + { new_commands = [ mk_with_status (List.hd_exn independent_cmds) ] + ; removed_commands = [] + ; reorg_best_tip = false + } in let%bind () = Async.Scheduler.yield_until_no_jobs_remain () in assert_pool_txs (List.tl_exn independent_cmds') ; let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status (List.take (List.tl_exn independent_cmds) 2) - ; removed_commands= [] - ; reorg_best_tip= false } + ; removed_commands = [] + ; reorg_best_tip = false + } in let%bind () = Async.Scheduler.yield_until_no_jobs_remain () in assert_pool_txs (List.drop independent_cmds' 3) ; - Deferred.unit ) + Deferred.unit) let rec map_set_multi map pairs = match pairs with @@ -1639,19 +1671,20 @@ let%test_module _ = let public_key = Public_key.compress @@ test_keys.(i).public_key in ( i , { Account.Poly.Stable.Latest.public_key - ; token_id= Token_id.default - ; token_permissions= - Token_permissions.Not_owned {account_disabled= false} - ; balance= Currency.Balance.of_int balance - ; nonce= Account.Nonce.of_int nonce - ; receipt_chain_hash= Receipt.Chain_hash.empty - ; delegate= Some public_key - ; voting_for= + ; token_id = Token_id.default + ; token_permissions = + Token_permissions.Not_owned { account_disabled = false } + ; balance = Currency.Balance.of_int balance + ; nonce = Account.Nonce.of_int nonce + ; receipt_chain_hash = Receipt.Chain_hash.empty + ; delegate = Some public_key + ; voting_for = Quickcheck.random_value ~seed:(`Deterministic "constant") State_hash.gen - ; timing= Account.Timing.Untimed - ; permissions= Permissions.user_default - ; snapp= None } ) + ; timing = Account.Timing.Untimed + ; permissions = Permissions.user_default + ; snapp = None + } ) let%test_unit "Transactions are removed and added back in fork changes" = Thread_safe.block_on_async_exn (fun () -> @@ -1669,18 +1702,19 @@ let%test_module _ = (accepted_commands apply_res) (Ok (List.hd_exn independent_cmds' :: List.drop independent_cmds' 2)) ; best_tip_ref := - map_set_multi !best_tip_ref [mk_account 1 1_000_000_000_000 1] ; + map_set_multi !best_tip_ref [ mk_account 1 1_000_000_000_000 1 ] ; let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status @@ List.take independent_cmds 1 - ; removed_commands= + ; removed_commands = List.map ~f:mk_with_status - @@ [List.nth_exn independent_cmds 1] - ; reorg_best_tip= true } + @@ [ List.nth_exn independent_cmds 1 ] + ; reorg_best_tip = true + } in assert_pool_txs (List.tl_exn independent_cmds') ; - Deferred.unit ) + Deferred.unit) let%test_unit "invalid transactions are not accepted" = Thread_safe.block_on_async_exn (fun () -> @@ -1690,11 +1724,14 @@ let%test_module _ = assert_pool_txs [] ; best_tip_ref := map_set_multi !best_tip_ref - [mk_account 0 0 0; mk_account 1 1_000_000_000_000 1] ; + [ mk_account 0 0 0; mk_account 1 1_000_000_000_000 1 ] ; (* need a best tip diff so the ref is actually read *) let%bind _ = Broadcast_pipe.Writer.write best_tip_diff_w - {new_commands= []; removed_commands= []; reorg_best_tip= false} + { new_commands = [] + ; removed_commands = [] + ; reorg_best_tip = false + } in let%bind apply_res = Test.Resource_pool.Diff.unsafe_apply pool @@ -1704,7 +1741,7 @@ let%test_module _ = (Ok (List.drop independent_cmds' 2)) (accepted_commands apply_res) ; assert_pool_txs (List.drop independent_cmds' 2) ; - Deferred.unit ) + Deferred.unit) let mk_payment' ?valid_until sender_idx fee nonce receiver_idx amount = let get_pk idx = Public_key.compress test_keys.(idx).public_key in @@ -1716,10 +1753,11 @@ let%test_module _ = ~memo:(Signed_command_memo.create_by_digesting_string_exn "foo") ~body: (Signed_command_payload.Body.Payment - { source_pk= get_pk sender_idx - ; receiver_pk= get_pk receiver_idx - ; token_id= Token_id.default - ; amount= Currency.Amount.of_int amount })) + { source_pk = get_pk sender_idx + ; receiver_pk = get_pk receiver_idx + ; token_id = Token_id.default + ; amount = Currency.Amount.of_int amount + })) let mk_payment ?valid_until sender_idx fee nonce receiver_idx amount = User_command.Signed_command @@ -1739,13 +1777,14 @@ let%test_module _ = in assert_pool_txs [] ; best_tip_ref := - map_set_multi !best_tip_ref [mk_account 0 1_000_000_000_000 1] ; + map_set_multi !best_tip_ref [ mk_account 0 1_000_000_000_000 1 ] ; let%bind _ = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status @@ List.take independent_cmds 2 - ; removed_commands= [] - ; reorg_best_tip= false } + ; removed_commands = [] + ; reorg_best_tip = false + } in assert_pool_txs [] ; let cmd1 = @@ -1765,26 +1804,28 @@ let%test_module _ = | Signed_command x -> Signed_command.forget_check x | _ -> - failwith "fix when snapps are enabled" ) ] + failwith "fix when snapps are enabled" ) + ] in [%test_eq: pool_apply] (accepted_commands apply_res) - (Ok [User_command.forget_check cmd1]) ; - assert_pool_txs [User_command.forget_check cmd1] ; + (Ok [ User_command.forget_check cmd1 ]) ; + assert_pool_txs [ User_command.forget_check cmd1 ] ; let cmd2 = mk_payment 0 1_000_000_000 0 5 999_000_000_000 in - best_tip_ref := map_set_multi !best_tip_ref [mk_account 0 0 1] ; + best_tip_ref := map_set_multi !best_tip_ref [ mk_account 0 0 1 ] ; let%bind _ = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status @@ (cmd2 :: List.drop independent_cmds 2) - ; removed_commands= + ; removed_commands = List.map ~f:mk_with_status @@ List.take independent_cmds 2 - ; reorg_best_tip= true } + ; reorg_best_tip = true + } in (*first cmd from removed_commands gets replaced by cmd2 (same sender), cmd1 is invalid because of insufficient balance, and so only the second cmd from removed_commands is expected to be in the pool*) - assert_pool_txs [List.nth_exn independent_cmds' 1] ; - Deferred.unit ) + assert_pool_txs [ List.nth_exn independent_cmds' 1 ] ; + Deferred.unit) let extract_signed_commands = List.map ~f:(function @@ -1793,12 +1834,11 @@ let%test_module _ = | _ -> failwith "when snapp commands are enabled, [extract_signed_commands] \ - should be removed." ) + should be removed.") let%test_unit "expired transactions are not accepted" = Thread_safe.block_on_async_exn (fun () -> - let%bind assert_pool_txs, pool, _best_tip_diff_w, (_, _best_tip_ref) - = + let%bind assert_pool_txs, pool, _best_tip_diff_w, (_, _best_tip_ref) = setup_test () in assert_pool_txs [] ; @@ -1813,7 +1853,8 @@ let%test_module _ = let expired_commands = [ mk_payment ~valid_until:curr_slot 0 1_000_000_000 1 9 1_000_000_000 - ; mk_payment 0 1_000_000_000 2 9 1_000_000_000 ] + ; mk_payment 0 1_000_000_000 2 9 1_000_000_000 + ] in (*Wait till global slot increases* by 1 which invalidates the commands with valid_until=curr_slot*) let%bind () = @@ -1821,7 +1862,7 @@ let%test_module _ = (Block_time.Span.to_time_span consensus_constants.block_window_duration_ms) in - let all_valid_commands = independent_cmds @ [valid_command] in + let all_valid_commands = independent_cmds @ [ valid_command ] in let%bind apply_res = Test.Resource_pool.Diff.unsafe_apply pool @@ Envelope.Incoming.local @@ -1834,7 +1875,7 @@ let%test_module _ = [%test_eq: pool_apply] (Ok cmds_wo_check) (accepted_commands apply_res) ; assert_pool_txs cmds_wo_check ; - Deferred.unit ) + Deferred.unit) let%test_unit "Expired transactions that are already in the pool are \ removed from the pool when best tip changes" = @@ -1862,7 +1903,7 @@ let%test_module _ = mk_payment ~valid_until:curr_slot_plus_seven 0 1_000_000_000 2 9 10_000_000_000 in - let valid_commands = few_now @ [expires_later1; expires_later2] in + let valid_commands = few_now @ [ expires_later1; expires_later2 ] in let%bind apply_res = Test.Resource_pool.Diff.unsafe_apply pool @@ Envelope.Incoming.local (extract_signed_commands valid_commands) @@ -1877,14 +1918,15 @@ let%test_module _ = (*new commands from best tip diff should be removed from the pool*) (*update the nonce to be consistent with the commands in the block*) best_tip_ref := - map_set_multi !best_tip_ref [mk_account 0 1_000_000_000_000_000 2] ; + map_set_multi !best_tip_ref [ mk_account 0 1_000_000_000_000_000 2 ] ; let%bind _ = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status - [List.nth_exn few_now 0; expires_later1] - ; removed_commands= [] - ; reorg_best_tip= false } + [ List.nth_exn few_now 0; expires_later1 ] + ; removed_commands = [] + ; reorg_best_tip = false + } in let cmds_wo_check = List.map ~f:User_command.forget_check @@ -1902,13 +1944,16 @@ let%test_module _ = in let valid_forever = List.nth_exn few_now 0 in let removed_commands = - [valid_forever; expires_later1; expired_command; unexpired_command] + [ valid_forever + ; expires_later1 + ; expired_command + ; unexpired_command + ] |> List.map ~f:mk_with_status in let n_block_times n = Int64.( - Block_time.Span.to_ms - consensus_constants.block_window_duration_ms + Block_time.Span.to_ms consensus_constants.block_window_duration_ms * n) |> Block_time.Span.of_ms in @@ -1917,9 +1962,10 @@ let%test_module _ = in let%bind _ = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= [mk_with_status valid_forever] + { new_commands = [ mk_with_status valid_forever ] ; removed_commands - ; reorg_best_tip= true } + ; reorg_best_tip = true + } in (*expired_command should not be in the pool becuase they are expired and (List.nth few_now 0) becuase it was committed in a block*) let cmds_wo_check = @@ -1935,17 +1981,20 @@ let%test_module _ = in let%bind _ = Broadcast_pipe.Writer.write best_tip_diff_w - {new_commands= []; removed_commands= []; reorg_best_tip= false} + { new_commands = [] + ; removed_commands = [] + ; reorg_best_tip = false + } in let cmds_wo_check = List.map ~f:User_command.forget_check (List.drop few_now 1) in let%bind () = Async.Scheduler.yield_until_no_jobs_remain () in assert_pool_txs cmds_wo_check ; - Deferred.unit ) + Deferred.unit) - let%test_unit "Now-invalid transactions are removed from the pool when \ - the transition frontier is recreated" = + let%test_unit "Now-invalid transactions are removed from the pool when the \ + transition frontier is recreated" = Thread_safe.block_on_async_exn (fun () -> (* Set up initial frontier *) let frontier_pipe_r, frontier_pipe_w = Broadcast_pipe.create None in @@ -1971,8 +2020,7 @@ let%test_module _ = [%test_eq: User_command.t List.t] ( Test.Resource_pool.transactions ~logger pool |> Sequence.map - ~f: - Transaction_hash.User_command_with_valid_signature.command + ~f:Transaction_hash.User_command_with_valid_signature.command |> Sequence.to_list |> List.sort ~compare:User_command.compare ) @@ List.sort ~compare:User_command.compare txs @@ -2001,12 +2049,13 @@ let%test_module _ = map_set_multi !ledger_ref2 [ mk_account 0 20_000_000_000_000 5 ; mk_account 1 0 0 - ; mk_account 2 0 1 ] ; + ; mk_account 2 0 1 + ] ; let%bind _ = Broadcast_pipe.Writer.write frontier_pipe_w (Some frontier2) in assert_pool_txs @@ List.drop independent_cmds' 3 ; - Deferred.unit ) + Deferred.unit) let%test_unit "transaction replacement works" = Thread_safe.block_on_async_exn @@ -2019,26 +2068,30 @@ let%test_module _ = let sender_pk = Public_key.compress sender_kp.public_key in let payload : Signed_command.Payload.t = match tx.payload with - | {common; body= Payment payload} -> - { common= {common with fee_payer_pk= sender_pk} - ; body= Payment {payload with source_pk= sender_pk} } - | {common; body= Stake_delegation (Set_delegate payload)} -> - { common= {common with fee_payer_pk= sender_pk} - ; body= + | { common; body = Payment payload } -> + { common = { common with fee_payer_pk = sender_pk } + ; body = Payment { payload with source_pk = sender_pk } + } + | { common; body = Stake_delegation (Set_delegate payload) } -> + { common = { common with fee_payer_pk = sender_pk } + ; body = Stake_delegation - (Set_delegate {payload with delegator= sender_pk}) } + (Set_delegate { payload with delegator = sender_pk }) + } | { common - ; body= - (Create_new_token _ | Create_token_account _ | Mint_tokens _) - as body } -> - {common= {common with fee_payer_pk= sender_pk}; body} + ; body = + (Create_new_token _ | Create_token_account _ | Mint_tokens _) as + body + } -> + { common = { common with fee_payer_pk = sender_pk }; body } in User_command.Signed_command (Signed_command.sign sender_kp payload) in let txs0 = [ mk_payment' 0 1_000_000_000 0 9 20_000_000_000 ; mk_payment' 0 1_000_000_000 1 9 12_000_000_000 - ; mk_payment' 0 1_000_000_000 2 9 500_000_000_000 ] + ; mk_payment' 0 1_000_000_000 2 9 500_000_000_000 + ] in let txs0' = List.map txs0 ~f:Signed_command.forget_check in let txs1 = List.map ~f:(set_sender 1) txs0' in @@ -2081,7 +2134,8 @@ let%test_module _ = in Currency.Balance.to_int account.balance - amount in - mk_payment 3 fee 1 4 amount) ] + mk_payment 3 fee 1 4 amount) + ] in let%bind apply_res_2 = Test.Resource_pool.Diff.unsafe_apply pool @@ -2089,12 +2143,12 @@ let%test_module _ = in let replace_txs = List.map replace_txs ~f:User_command.forget_check in [%test_eq: pool_apply] - (Ok [List.nth_exn replace_txs 0; List.nth_exn replace_txs 2]) + (Ok [ List.nth_exn replace_txs 0; List.nth_exn replace_txs 2 ]) (accepted_commands apply_res_2) ; Deferred.unit - let%test_unit "it drops queued transactions if a committed one makes \ - there be insufficient funds" = + let%test_unit "it drops queued transactions if a committed one makes there \ + be insufficient funds" = Thread_safe.block_on_async_exn @@ fun () -> let%bind assert_pool_txs, pool, best_tip_diff_w, (_, best_tip_ref) = @@ -2103,7 +2157,8 @@ let%test_module _ = let txs = [ mk_payment 0 5_000_000_000 0 9 20_000_000_000 ; mk_payment 0 6_000_000_000 1 5 77_000_000_000 - ; mk_payment 0 1_000_000_000 2 3 891_000_000_000 ] + ; mk_payment 0 1_000_000_000 2 3 891_000_000_000 + ] in let committed_tx = mk_payment 0 5_000_000_000 0 2 25_000_000_000 in let%bind apply_res = @@ -2114,14 +2169,15 @@ let%test_module _ = [%test_eq: pool_apply] (Ok txs) (accepted_commands apply_res) ; assert_pool_txs @@ txs ; best_tip_ref := - map_set_multi !best_tip_ref [mk_account 0 970_000_000_000 1] ; + map_set_multi !best_tip_ref [ mk_account 0 970_000_000_000 1 ] ; let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= List.map ~f:mk_with_status @@ [committed_tx] - ; removed_commands= [] - ; reorg_best_tip= false } + { new_commands = List.map ~f:mk_with_status @@ [ committed_tx ] + ; removed_commands = [] + ; reorg_best_tip = false + } in - assert_pool_txs [List.nth_exn txs 1] ; + assert_pool_txs [ List.nth_exn txs 1 ] ; Deferred.unit let%test_unit "max size is maintained" = @@ -2136,10 +2192,8 @@ let%test_module _ = return (init_ledger_state, cmds)) ~f:(fun (init_ledger_state, cmds) -> Thread_safe.block_on_async_exn (fun () -> - let%bind ( _assert_pool_txs - , pool - , best_tip_diff_w - , (_, best_tip_ref) ) = + let%bind _assert_pool_txs, pool, best_tip_diff_w, (_, best_tip_ref) + = setup_test () in let mock_ledger = @@ -2152,17 +2206,21 @@ let%test_module _ = in ( account_id , { (Account.initialize account_id) with - balance= + balance = Currency.Balance.of_uint64 (Currency.Amount.to_uint64 balance) ; nonce - ; timing } ) ) + ; timing + } )) |> Sequence.to_list ) in best_tip_ref := mock_ledger ; let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - {new_commands= []; removed_commands= []; reorg_best_tip= true} + { new_commands = [] + ; removed_commands = [] + ; reorg_best_tip = true + } in let cmds1, cmds2 = List.split_n cmds pool_max_size in let%bind apply_res1 = @@ -2181,12 +2239,12 @@ let%test_module _ = commands have higher fee than the lowest one already in the pool. *) - assert (Indexed_pool.size pool.pool <= pool_max_size) ) ) + assert (Indexed_pool.size pool.pool <= pool_max_size))) let assert_rebroadcastable pool cmds = let normalize = List.sort ~compare:User_command.compare in let expected = - match normalize cmds with [] -> [] | normalized -> [normalized] + match normalize cmds with [] -> [] | normalized -> [ normalized ] in [%test_eq: User_command.t list list] ( List.map ~f:normalize @@ -2240,11 +2298,12 @@ let%test_module _ = longer rebroadcastable *) let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status @@ List.take local_cmds 2 @ List.take remote_cmds 3 - ; removed_commands= [] - ; reorg_best_tip= false } + ; removed_commands = [] + ; reorg_best_tip = false + } in assert_pool_txs (List.drop local_cmds' 2 @ List.drop remote_cmds' 3) ; assert_rebroadcastable pool (List.drop local_cmds' 2) ; @@ -2252,22 +2311,24 @@ let%test_module _ = rebroadcastable pool, if they were removed and not re-added *) let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status @@ List.take local_cmds 1 - ; removed_commands= + ; removed_commands = List.map ~f:mk_with_status @@ List.take local_cmds 2 - ; reorg_best_tip= true } + ; reorg_best_tip = true + } in assert_pool_txs (List.tl_exn local_cmds' @ List.drop remote_cmds' 3) ; assert_rebroadcastable pool (List.tl_exn local_cmds') ; (* Committing them again removes them from the pool again. *) let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= + { new_commands = List.map ~f:mk_with_status @@ List.tl_exn local_cmds @ List.drop remote_cmds 3 - ; removed_commands= [] - ; reorg_best_tip= false } + ; removed_commands = [] + ; reorg_best_tip = false + } in assert_pool_txs [] ; assert_rebroadcastable pool [] ; @@ -2275,11 +2336,12 @@ let%test_module _ = into the rebroadcastable pool. *) let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= [] - ; removed_commands= + { new_commands = [] + ; removed_commands = List.map ~f:mk_with_status @@ List.drop local_cmds 3 @ remote_cmds - ; reorg_best_tip= true } + ; reorg_best_tip = true + } in assert_pool_txs (List.drop local_cmds' 3 @ remote_cmds') ; assert_rebroadcastable pool (List.drop local_cmds' 3) ; @@ -2287,10 +2349,11 @@ let%test_module _ = two step reorg processes) *) let%bind () = Broadcast_pipe.Writer.write best_tip_diff_w - { new_commands= - List.map ~f:mk_with_status @@ [List.nth_exn local_cmds 3] - ; removed_commands= [] - ; reorg_best_tip= false } + { new_commands = + List.map ~f:mk_with_status @@ [ List.nth_exn local_cmds 3 ] + ; removed_commands = [] + ; reorg_best_tip = false + } in assert_pool_txs (List.drop local_cmds' 4 @ remote_cmds') ; assert_rebroadcastable pool (List.drop local_cmds' 4) ; @@ -2303,5 +2366,5 @@ let%test_module _ = : User_command.t list list ) ; assert_pool_txs (List.drop local_cmds' 4 @ remote_cmds') ; assert_rebroadcastable pool [] ; - Deferred.unit ) + Deferred.unit) end ) diff --git a/src/lib/node_addrs_and_ports/node_addrs_and_ports.ml b/src/lib/node_addrs_and_ports/node_addrs_and_ports.ml index aebd64364ec..b71d7ef3537 100644 --- a/src/lib/node_addrs_and_ports/node_addrs_and_ports.ml +++ b/src/lib/node_addrs_and_ports/node_addrs_and_ports.ml @@ -3,13 +3,14 @@ open Network_peer (** Network information for speaking to this daemon. *) type t = - { external_ip: Unix.Inet_addr.Blocking_sexp.t - ; bind_ip: Unix.Inet_addr.Blocking_sexp.t + { external_ip : Unix.Inet_addr.Blocking_sexp.t + ; bind_ip : Unix.Inet_addr.Blocking_sexp.t (** When peer is [None], the peer_id will be auto-generated and this field replaced with [Some] after libp2p initialization. *) - ; mutable peer: Peer.Stable.Latest.t option - ; libp2p_port: int - ; client_port: int } + ; mutable peer : Peer.Stable.Latest.t option + ; libp2p_port : int + ; client_port : int + } [@@deriving fields, sexp] module Display = struct @@ -17,11 +18,12 @@ module Display = struct module Stable = struct module V1 = struct type t = - { external_ip: string - ; bind_ip: string - ; peer: Peer.Display.Stable.V1.t option - ; libp2p_port: int - ; client_port: int } + { external_ip : string + ; bind_ip : string + ; peer : Peer.Display.Stable.V1.t option + ; libp2p_port : int + ; client_port : int + } [@@deriving fields, yojson, sexp] let to_latest = Fn.id @@ -31,18 +33,20 @@ end let to_display (t : t) = Display. - { external_ip= Unix.Inet_addr.to_string t.external_ip - ; bind_ip= Unix.Inet_addr.to_string t.bind_ip - ; peer= Option.map ~f:Peer.to_display t.peer - ; libp2p_port= t.libp2p_port - ; client_port= t.client_port } + { external_ip = Unix.Inet_addr.to_string t.external_ip + ; bind_ip = Unix.Inet_addr.to_string t.bind_ip + ; peer = Option.map ~f:Peer.to_display t.peer + ; libp2p_port = t.libp2p_port + ; client_port = t.client_port + } let of_display (d : Display.t) : t = - { external_ip= Unix.Inet_addr.of_string d.external_ip - ; bind_ip= Unix.Inet_addr.of_string d.bind_ip - ; peer= Option.map ~f:Peer.of_display d.peer - ; libp2p_port= d.libp2p_port - ; client_port= d.client_port } + { external_ip = Unix.Inet_addr.of_string d.external_ip + ; bind_ip = Unix.Inet_addr.of_string d.bind_ip + ; peer = Option.map ~f:Peer.of_display d.peer + ; libp2p_port = d.libp2p_port + ; client_port = d.client_port + } let to_multiaddr (t : t) = match t.peer with @@ -62,7 +66,7 @@ let to_multiaddr_exn t = let to_yojson = Fn.compose Display.Stable.V1.to_yojson to_display let to_peer_exn : t -> Peer.t = function - | {peer= Some peer; _} -> + | { peer = Some peer; _ } -> peer | _ -> failwith "to_peer_exn: no peer yet" diff --git a/src/lib/node_status/node_status.ml b/src/lib/node_status/node_status.ml index ca00337d5a6..132c79debb1 100644 --- a/src/lib/node_status/node_status.ml +++ b/src/lib/node_status/node_status.ml @@ -13,11 +13,11 @@ let get_node_status_from_peers (net : Mina_networking.t) | None -> Mina_networking.peers net >>= run | Some peers -> ( - match Option.all (List.map ~f:Mina_net2.Multiaddr.to_peer peers) with - | Some peers -> - run peers - | None -> - Deferred.return - (List.map peers ~f:(fun _ -> - Or_error.error_string - "Could not parse peers in node status request" )) ) + match Option.all (List.map ~f:Mina_net2.Multiaddr.to_peer peers) with + | Some peers -> + run peers + | None -> + Deferred.return + (List.map peers ~f:(fun _ -> + Or_error.error_string + "Could not parse peers in node status request")) ) diff --git a/src/lib/non_empty_list/non_empty_list.ml b/src/lib/non_empty_list/non_empty_list.ml index 475ce813954..b7219db1491 100644 --- a/src/lib/non_empty_list/non_empty_list.ml +++ b/src/lib/non_empty_list/non_empty_list.ml @@ -42,8 +42,7 @@ module C = Container.Make (struct let length = `Define_using_fold end) -[%%define_locally -C.(find, find_map, iter, length)] +[%%define_locally C.(find, find_map, iter, length)] let fold (x, xs) ~init ~f = List.fold xs ~init:(init x) ~f @@ -61,11 +60,11 @@ let take (x, xs) = function let min_elt ~compare (x, xs) = Option.value_map ~default:x (List.min_elt ~compare xs) ~f:(fun mininum -> - if compare x mininum < 0 then x else mininum ) + if compare x mininum < 0 then x else mininum) let max_elt ~compare (x, xs) = Option.value_map ~default:x (List.max_elt ~compare xs) ~f:(fun maximum -> - if compare x maximum > 0 then x else maximum ) + if compare x maximum > 0 then x else maximum) let rec iter_deferred (x, xs) ~f = let open Async_kernel in diff --git a/src/lib/non_empty_list/non_empty_list.mli b/src/lib/non_empty_list/non_empty_list.mli index 64f8b97f3de..57597f85b24 100644 --- a/src/lib/non_empty_list/non_empty_list.mli +++ b/src/lib/non_empty_list/non_empty_list.mli @@ -69,6 +69,4 @@ val min_elt : compare:('a -> 'a -> int) -> 'a t -> 'a val max_elt : compare:('a -> 'a -> int) -> 'a t -> 'a val iter_deferred : - 'a t - -> f:('a -> unit Async_kernel.Deferred.t) - -> unit Async_kernel.Deferred.t + 'a t -> f:('a -> unit Async_kernel.Deferred.t) -> unit Async_kernel.Deferred.t diff --git a/src/lib/non_zero_curve_point/compressed_poly.ml b/src/lib/non_zero_curve_point/compressed_poly.ml index 20e033d4db2..886bab8a0b3 100644 --- a/src/lib/non_zero_curve_point/compressed_poly.ml +++ b/src/lib/non_zero_curve_point/compressed_poly.ml @@ -1,7 +1,6 @@ (* compressed_poly.ml -- versioned type with parameters for compressed curve point *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel @@ -9,7 +8,7 @@ module Poly = struct [%%versioned module Stable = struct module V1 = struct - type ('field, 'boolean) t = {x: 'field; is_odd: 'boolean} + type ('field, 'boolean) t = { x : 'field; is_odd : 'boolean } [@@deriving compare, equal, hash, hlist] end end] diff --git a/src/lib/non_zero_curve_point/non_zero_curve_point.ml b/src/lib/non_zero_curve_point/non_zero_curve_point.ml index 6f9c5bdba86..036a8235b3b 100644 --- a/src/lib/non_zero_curve_point/non_zero_curve_point.ml +++ b/src/lib/non_zero_curve_point/non_zero_curve_point.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -24,7 +22,7 @@ let gen_uncompressed = Quickcheck.Generator.filter_map Field.gen_uniform ~f:(fun x -> let open Option.Let_syntax in let%map y = Inner_curve.find_y x in - (x, y) ) + (x, y)) module Compressed = struct open Compressed_poly @@ -50,7 +48,7 @@ module Compressed = struct end] end - let compress (x, y) = {Poly.x; is_odd= parity y} + let compress (x, y) = { Poly.x; is_odd = parity y } [%%versioned_asserted module Stable = struct @@ -82,10 +80,9 @@ module Compressed = struct module Tests = struct (* these tests check not only whether the serialization of the version-asserted type has changed, but also whether the serializations for the consensus and nonconsensus code are identical - *) + *) - [%%if - curve_size = 255] + [%%if curve_size = 255] let%test "nonzero_curve_point_compressed v1" = let point = @@ -113,30 +110,30 @@ module Compressed = struct let to_string = to_base58_check - [%%define_locally - Stable.Latest.(sexp_of_t, t_of_sexp, gen)] + [%%define_locally Stable.Latest.(sexp_of_t, t_of_sexp, gen)] - let compress (x, y) = {Poly.x; is_odd= parity y} + let compress (x, y) = { Poly.x; is_odd = parity y } - let empty = Poly.{x= Field.zero; is_odd= false} + let empty = Poly.{ x = Field.zero; is_odd = false } - let to_input {Poly.x; is_odd} = - {Random_oracle.Input.field_elements= [|x|]; bitstrings= [|[is_odd]|]} + let to_input { Poly.x; is_odd } = + { Random_oracle.Input.field_elements = [| x |] + ; bitstrings = [| [ is_odd ] |] + } - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] (* snarky-dependent *) type var = (Field.Var.t, Boolean.var) Poly.t let typ : (var, t) Typ.t = - Typ.of_hlistable [Field.typ; Boolean.typ] ~var_to_hlist:Poly.to_hlist + Typ.of_hlistable [ Field.typ; Boolean.typ ] ~var_to_hlist:Poly.to_hlist ~var_of_hlist:Poly.of_hlist ~value_to_hlist:Poly.to_hlist ~value_of_hlist:Poly.of_hlist - let var_of_t ({x; is_odd} : t) : var = - {x= Field.Var.constant x; is_odd= Boolean.var_of_value is_odd} + let var_of_t ({ x; is_odd } : t) : var = + { x = Field.Var.constant x; is_odd = Boolean.var_of_value is_odd } let assert_equal (t1 : var) (t2 : var) = let%map () = Field.Checked.Assert.equal t1.x t2.x @@ -154,7 +151,7 @@ module Compressed = struct let if_ cond ~then_:t1 ~else_:t2 = let%map x = Field.Checked.if_ cond ~then_:t1.Poly.x ~else_:t2.Poly.x and is_odd = Boolean.if_ cond ~then_:t1.is_odd ~else_:t2.is_odd in - Poly.{x; is_odd} + Poly.{ x; is_odd } module Assert = struct let equal t1 t2 = @@ -169,11 +166,11 @@ module Compressed = struct end module Uncompressed = struct - let decompress ({x; is_odd} : Compressed.t) = + let decompress ({ x; is_odd } : Compressed.t) = Option.map (Inner_curve.find_y x) ~f:(fun y -> let y_parity = parity y in let y = if Bool.(is_odd = y_parity) then y else Field.negate y in - (x, y) ) + (x, y)) let decompress_exn t = match decompress t with @@ -259,10 +256,9 @@ module Uncompressed = struct let%test_unit "point-compression: decompress . compress = id" = Quickcheck.test gen ~f:(fun pk -> - assert (equal (decompress_exn (compress pk)) pk) ) + assert (equal (decompress_exn (compress pk)) pk)) - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] (* snarky-dependent *) @@ -284,7 +280,7 @@ module Uncompressed = struct let%map bs = Field.Checked.unpack_full y in List.hd_exn (Bitstring_lib.Bitstring.Lsb_first.to_list bs) - let decompress_var ({x; is_odd} as c : Compressed.var) = + let decompress_var ({ x; is_odd } as c : Compressed.var) = let open Let_syntax in let%bind y = exists Typ.field @@ -299,7 +295,7 @@ module Uncompressed = struct let%snarkydef compress_var ((x, y) : var) : (Compressed.var, _) Checked.t = let open Compressed_poly in let%map is_odd = parity_var y in - {Poly.x; is_odd} + { Poly.x; is_odd } (* end snarky-dependent *) [%%endif] diff --git a/src/lib/o1trace/o1trace.ml b/src/lib/o1trace/o1trace.ml index b6eaa3c80dd..1fc46eb8101 100644 --- a/src/lib/o1trace/o1trace.ml +++ b/src/lib/o1trace/o1trace.ml @@ -1,8 +1,6 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%if -tracing] +[%%if tracing] open Core open Async @@ -27,16 +25,17 @@ let tid_names = ref [] let remember_tid name tid = tid_names := (name, tid) :: !tid_names let new_event (k : event_kind) : event = - { name= "" - ; categories= [] - ; phase= k - ; timestamp= timestamp () - ; pid= our_pid - ; tid= 0 } + { name = "" + ; categories = [] + ; phase = k + ; timestamp = timestamp () + ; pid = our_pid + ; tid = 0 + } let log_thread_existence name tid = Option.iter !current_wr ~f:(fun wr -> - emit_event wr {(new_event New_thread) with name; tid} ) + emit_event wr { (new_event New_thread) with name; tid }) let trace_new_thread (name : string) (tid : int) = remember_tid name tid ; @@ -44,16 +43,17 @@ let trace_new_thread (name : string) (tid : int) = let trace_thread_switch (new_ctx : Execution_context.t) = Option.iter !current_wr ~f:(fun wr -> - emit_event wr {(new_event Thread_switch) with tid= new_ctx.tid} ) + emit_event wr { (new_event Thread_switch) with tid = new_ctx.tid }) let () = Async_kernel.Tracing.fns := { trace_thread_switch - ; trace_new_thread= (fun name ctx -> trace_new_thread name ctx.tid) } + ; trace_new_thread = (fun name ctx -> trace_new_thread name ctx.tid) + } let trace_event (name : string) = Option.iter !current_wr ~f:(fun wr -> - emit_event wr {(new_event Event) with name} ) + emit_event wr { (new_event Event) with name }) let trace (name : string) (f : unit -> 'a) = let new_ctx = @@ -80,12 +80,12 @@ let trace_recurring name f = trace (recurring_prefix name) f let trace_recurring_task (name : string) (f : unit -> unit Deferred.t) = trace_task (recurring_prefix name) (fun () -> trace_event "started another" ; - f () ) + f ()) let measure (name : string) (f : unit -> 'a) : 'a = match !current_wr with | Some wr -> - emit_event wr {(new_event Measure_start) with name} ; + emit_event wr { (new_event Measure_start) with name } ; let res = f () in emit_event wr (new_event Measure_end) ; res @@ -105,19 +105,19 @@ let start_tracing wr = Scheduler.Expert.set_on_end_of_cycle (fun () -> if not sch.cycle_started then emit_event wr - {(new_event Cycle_end) with tid= sch.current_execution_context.tid} ; - sch.cycle_started <- true ) ; + { (new_event Cycle_end) with tid = sch.current_execution_context.tid } ; + sch.cycle_started <- true) ; emit_event wr (new_event Pid_is) ; List.iter !tid_names ~f:(fun (name, tid) -> log_thread_existence name tid) ; emit_event wr - {(new_event Thread_switch) with tid= sch.current_execution_context.tid} + { (new_event Thread_switch) with tid = sch.current_execution_context.tid } let stop_tracing () = let sch = Scheduler.t () in Scheduler.Expert.set_on_end_of_cycle Fn.id ; Option.iter !current_wr ~f:(fun wr -> emit_event wr - {(new_event Trace_end) with tid= sch.current_execution_context.tid} ) ; + { (new_event Trace_end) with tid = sch.current_execution_context.tid }) ; current_wr := None [%%else] diff --git a/src/lib/one_or_two/intfs.ml b/src/lib/one_or_two/intfs.ml index 780fe78b5ed..3f5f5f05f8e 100644 --- a/src/lib/one_or_two/intfs.ml +++ b/src/lib/one_or_two/intfs.ml @@ -1,4 +1,4 @@ -type 'a t = [`One of 'a | `Two of 'a * 'a] +type 'a t = [ `One of 'a | `Two of 'a * 'a ] (** One_or_two operations in a two-parameter monad. *) module type Monadic2 = sig diff --git a/src/lib/one_or_two/one_or_two.ml b/src/lib/one_or_two/one_or_two.ml index 73710794ea7..7872fd7d85a 100644 --- a/src/lib/one_or_two/one_or_two.ml +++ b/src/lib/one_or_two/one_or_two.ml @@ -4,7 +4,7 @@ open Async [%%versioned module Stable = struct module V1 = struct - type 'a t = [`One of 'a | `Two of 'a * 'a] + type 'a t = [ `One of 'a | `Two of 'a * 'a ] [@@deriving equal, compare, hash, sexp, yojson] let to_latest a_latest = function @@ -27,13 +27,13 @@ end] let length = function `One _ -> 1 | `Two _ -> 2 -let to_list = function `One a -> [a] | `Two (a, b) -> [a; b] +let to_list = function `One a -> [ a ] | `Two (a, b) -> [ a; b ] let to_numbered_list = function | `One a -> - [(0, a)] + [ (0, a) ] | `Two (a, b) -> - [(0, a); (1, b)] + [ (0, a); (1, b) ] let group_sequence : 'a Sequence.t -> 'a t Sequence.t = fun to_group -> @@ -42,11 +42,11 @@ let group_sequence : 'a Sequence.t -> 'a t Sequence.t = | None -> None | Some (a, rest_1) -> ( - match Sequence.next rest_1 with - | None -> - Some (`One a, Sequence.empty) - | Some (b, rest_2) -> - Some (`Two (a, b), rest_2) ) ) + match Sequence.next rest_1 with + | None -> + Some (`One a, Sequence.empty) + | Some (b, rest_2) -> + Some (`Two (a, b), rest_2) )) let group_list : 'a list -> 'a t list = fun xs -> xs |> Sequence.of_list |> group_sequence |> Sequence.to_list @@ -123,4 +123,5 @@ let gen inner_gen = Quickcheck.Generator.( union [ map inner_gen ~f:(fun x -> `One x) - ; map (tuple2 inner_gen inner_gen) ~f:(fun pair -> `Two pair) ]) + ; map (tuple2 inner_gen inner_gen) ~f:(fun pair -> `Two pair) + ]) diff --git a/src/lib/one_or_two/one_or_two.mli b/src/lib/one_or_two/one_or_two.mli index 301a7b205ce..c9c1bbac6aa 100644 --- a/src/lib/one_or_two/one_or_two.mli +++ b/src/lib/one_or_two/one_or_two.mli @@ -10,8 +10,7 @@ module Stable : sig val to_latest : ('a -> 'b) -> 'a t -> 'b t - val of_latest : - ('a -> ('b, 'err) Result.t) -> 'a t -> ('b t, 'err) Result.t + val of_latest : ('a -> ('b, 'err) Result.t) -> 'a t -> ('b t, 'err) Result.t end module Latest = V1 diff --git a/src/lib/otp_lib/agent.ml b/src/lib/otp_lib/agent.ml index 8cf762fcaf3..13a132b08a8 100644 --- a/src/lib/otp_lib/agent.ml +++ b/src/lib/otp_lib/agent.ml @@ -7,15 +7,16 @@ type read_only type _ flag = Read_write : read_write flag | Read_only : read_only flag type 'a t_ = - { mutable a: 'a - ; mutable on_update: 'a -> unit - ; mutable dirty: bool - ; mutable subscribers: 'a t_ list } + { mutable a : 'a + ; mutable on_update : 'a -> unit + ; mutable dirty : bool + ; mutable subscribers : 'a t_ list + } type ('flag, 'a) t = 'a t_ constraint 'flag = _ flag let create ~(f : 'a -> 'b) x : (_ flag, 'b) t = - {a= f x; on_update= Fn.ignore; dirty= false; subscribers= []} + { a = f x; on_update = Fn.ignore; dirty = false; subscribers = [] } let get (t : (_ flag, 'a) t) = if t.dirty then ( @@ -35,7 +36,7 @@ let num_subscribers t = List.length t.subscribers let read_only (t : (read_write flag, 'a) t) : (read_only flag, 'a) t = let read_only_copy = - {a= t.a; on_update= t.on_update; dirty= t.dirty; subscribers= []} + { a = t.a; on_update = t.on_update; dirty = t.dirty; subscribers = [] } in t.subscribers <- read_only_copy :: t.subscribers ; read_only_copy @@ -50,7 +51,7 @@ let%test_module "Agent" = on_update read_only_agent ~f:(fun _ -> is_touched := true) ; let new_value = intial_value + 2 in update agent new_value ; - let equal = [%equal: int * [`Same | `Different]] in + let equal = [%equal: int * [ `Same | `Different ]] in !is_touched && 1 = num_subscribers agent && 0 = num_subscribers read_only_agent diff --git a/src/lib/otp_lib/agent.mli b/src/lib/otp_lib/agent.mli index e7df21b4758..3b9c60e131e 100644 --- a/src/lib/otp_lib/agent.mli +++ b/src/lib/otp_lib/agent.mli @@ -12,7 +12,7 @@ type ('flag, 'a) t = 'a t_ constraint 'flag = _ flag val create : f:('a -> 'b) -> 'a -> (read_write flag, 'b) t -val get : (_ flag, 'a) t -> 'a * [> `Different | `Same] +val get : (_ flag, 'a) t -> 'a * [> `Different | `Same ] val update : (read_write flag, 'a) t -> 'a -> unit diff --git a/src/lib/otp_lib/capped_supervisor.ml b/src/lib/otp_lib/capped_supervisor.ml index 409aac76512..366ebb48dfe 100644 --- a/src/lib/otp_lib/capped_supervisor.ml +++ b/src/lib/otp_lib/capped_supervisor.ml @@ -4,8 +4,9 @@ open Pipe_lib open Strict_pipe type 'data t = - { job_writer: ('data, crash buffered, unit) Writer.t - ; f: 'data -> unit Deferred.t } + { job_writer : ('data, crash buffered, unit) Writer.t + ; f : 'data -> unit Deferred.t + } let create ?(buffer_capacity = 30) ~job_capacity f = let job_reader, job_writer = @@ -41,7 +42,7 @@ let create ?(buffer_capacity = 30) ~job_capacity f = don't_wait_for (Reader.iter_without_pushback job_reader ~f:(fun job -> if !active_jobs < job_capacity then run_job job - else pending_jobs := !pending_jobs @ [job] )) ; - {job_writer; f} + else pending_jobs := !pending_jobs @ [ job ])) ; + { job_writer; f } let dispatch t data = Writer.write t.job_writer data diff --git a/src/lib/otp_lib/worker_supervisor.ml b/src/lib/otp_lib/worker_supervisor.ml index 470b2c1d316..bb2c87ff971 100644 --- a/src/lib/otp_lib/worker_supervisor.ml +++ b/src/lib/otp_lib/worker_supervisor.ml @@ -28,7 +28,7 @@ end module type Worker_intf = sig include Base_intf - val make_immediate_progress : t -> input -> [`Unprocessed of input] + val make_immediate_progress : t -> input -> [ `Unprocessed of input ] val perform : t -> input -> output Deferred.t end @@ -39,7 +39,7 @@ module type S = sig val is_working : t -> bool - val make_immediate_progress : t -> input -> [`Unprocessed of input] + val make_immediate_progress : t -> input -> [ `Unprocessed of input ] val dispatch : t -> input -> output Deferred.t end @@ -47,12 +47,13 @@ end (** [Make (Worker)] creates a supervisor which wraps dispatches to [Worker]. *) module Make (Worker : Worker_intf) : S - with type create_args := Worker.create_args - and type input := Worker.input - and type output := Worker.output = struct - type t = {mutable thread: Worker.output Deferred.t option; worker: Worker.t} + with type create_args := Worker.create_args + and type input := Worker.input + and type output := Worker.output = struct + type t = + { mutable thread : Worker.output Deferred.t option; worker : Worker.t } - let create args = {thread= None; worker= Worker.create args} + let create args = { thread = None; worker = Worker.create args } let is_working t = Option.value_map t.thread ~default:false ~f:Deferred.is_determined diff --git a/src/lib/outside_hash_image/outside_hash_image.ml b/src/lib/outside_hash_image/outside_hash_image.ml index b8065e6d322..271f62af22c 100644 --- a/src/lib/outside_hash_image/outside_hash_image.ml +++ b/src/lib/outside_hash_image/outside_hash_image.ml @@ -1,10 +1,8 @@ (* outside_hash_image.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let t = Snark_params.Tick.Field.zero diff --git a/src/lib/parallel/parallel.ml b/src/lib/parallel/parallel.ml index 43cd14ffb75..57007375284 100644 --- a/src/lib/parallel/parallel.ml +++ b/src/lib/parallel/parallel.ml @@ -11,10 +11,10 @@ let init_master () = if not !initialized then ( let rpc_heartbeat_config = Rpc.Connection.Heartbeat_config.create - ~send_every:(Time_ns.Span.of_sec 10.) - ~timeout:(Time_ns.Span.of_min 15.) () + ~send_every:(Time_ns.Span.of_sec 10.) ~timeout:(Time_ns.Span.of_min 15.) + () in Rpc_parallel.Expert.start_master_server_exn ~rpc_handshake_timeout:(Time.Span.of_min 10.) ~rpc_heartbeat_config - ~worker_command_args:[worker_command_name] () ; + ~worker_command_args:[ worker_command_name ] () ; initialized := true ) diff --git a/src/lib/parallel_scan/parallel_scan.ml b/src/lib/parallel_scan/parallel_scan.ml index db8962b79d9..0da16696872 100644 --- a/src/lib/parallel_scan/parallel_scan.ml +++ b/src/lib/parallel_scan/parallel_scan.ml @@ -50,13 +50,13 @@ module Weight = struct [@@@no_toplevel_latest_type] module V1 = struct - type t = {base: int; merge: int} [@@deriving sexp] + type t = { base : int; merge : int } [@@deriving sexp] let to_latest = Fn.id end end] - type t = Stable.Latest.t = {base: int; merge: int} [@@deriving sexp, lens] + type t = Stable.Latest.t = { base : int; merge : int } [@@deriving sexp, lens] end (**For base proofs (Proving new transactions)*) @@ -66,9 +66,10 @@ module Base = struct module Stable = struct module V1 = struct type 'base t = - { job: 'base - ; seq_no: Sequence_number.Stable.V1.t - ; status: Job_status.Stable.V1.t } + { job : 'base + ; seq_no : Sequence_number.Stable.V1.t + ; status : Job_status.Stable.V1.t + } [@@deriving sexp] end end] @@ -102,10 +103,11 @@ module Merge = struct module Stable = struct module V1 = struct type 'merge t = - { left: 'merge - ; right: 'merge - ; seq_no: Sequence_number.Stable.V1.t - ; status: Job_status.Stable.V1.t } + { left : 'merge + ; right : 'merge + ; seq_no : Sequence_number.Stable.V1.t + ; status : Job_status.Stable.V1.t + } [@@deriving sexp] end end] @@ -163,7 +165,8 @@ module Space_partition = struct [%%versioned module Stable = struct module V1 = struct - type t = {first: int * int; second: (int * int) option} [@@deriving sexp] + type t = { first : int * int; second : (int * int) option } + [@@deriving sexp] let to_latest = Fn.id end @@ -177,7 +180,9 @@ module Job_view = struct module Stable = struct module V1 = struct type t = - {seq_no: Sequence_number.Stable.V1.t; status: Job_status.Stable.V1.t} + { seq_no : Sequence_number.Stable.V1.t + ; status : Job_status.Stable.V1.t + } [@@deriving sexp] let to_latest = Fn.id @@ -203,7 +208,8 @@ module Job_view = struct [%%versioned module Stable = struct module V1 = struct - type 'a t = {position: int; value: 'a Node.Stable.V1.t} [@@deriving sexp] + type 'a t = { position : int; value : 'a Node.Stable.V1.t } + [@@deriving sexp] end end] end @@ -220,9 +226,10 @@ module Tree = struct type ('merge_t, 'base_t) t = | Leaf of 'base_t | Node of - { depth: int - ; value: 'merge_t - ; sub_tree: ('merge_t * 'merge_t, 'base_t * 'base_t) t } + { depth : int + ; value : 'merge_t + ; sub_tree : ('merge_t * 'merge_t, 'base_t * 'base_t) t + } [@@deriving sexp] end end] @@ -236,10 +243,11 @@ module Tree = struct Node ((M,M),(M,M)) | Leaf (((B,B),(B,B)),((B,B),(B,B))) - *) + *) (*mapi where i is the level of the tree*) - let rec map_depth : type a_merge b_merge c_base d_base. + let rec map_depth : + type a_merge b_merge c_base d_base. f_merge:(int -> a_merge -> b_merge) -> f_base:(c_base -> d_base) -> (a_merge, c_base) t @@ -248,17 +256,19 @@ module Tree = struct match tree with | Leaf d -> Leaf (f_base d) - | Node {depth; value; sub_tree} -> + | Node { depth; value; sub_tree } -> Node { depth - ; value= f_merge depth value - ; sub_tree= + ; value = f_merge depth value + ; sub_tree = map_depth ~f_merge:(fun i (x, y) -> (f_merge i x, f_merge i y)) ~f_base:(fun (x, y) -> (f_base x, f_base y)) - sub_tree } + sub_tree + } - let map : type a_merge b_merge c_base d_base. + let map : + type a_merge b_merge c_base d_base. f_merge:(a_merge -> b_merge) -> f_base:(c_base -> d_base) -> (a_merge, c_base) t @@ -268,11 +278,10 @@ module Tree = struct (* foldi where i is the cur_level*) module Make_foldable (M : Monad.S) = struct - let rec fold_depth_until' : type merge_t accum base_t final. - f_merge:( int - -> accum - -> merge_t - -> (accum, final) Continue_or_stop.t M.t) + let rec fold_depth_until' : + type merge_t accum base_t final. + f_merge: + (int -> accum -> merge_t -> (accum, final) Continue_or_stop.t M.t) -> f_base:(accum -> base_t -> (accum, final) Continue_or_stop.t M.t) -> init:accum -> (merge_t, base_t) t @@ -283,7 +292,7 @@ module Tree = struct match t with | Leaf d -> f_base acc d - | Node {depth; value; sub_tree} -> ( + | Node { depth; value; sub_tree } -> ( match%bind f_merge depth acc value with | Continue acc' -> fold_depth_until' @@ -292,22 +301,21 @@ module Tree = struct | Continue r -> f_merge i r y | x -> - M.return x ) + M.return x) ~f_base:(fun acc (x, y) -> match%bind f_base acc x with | Continue r -> f_base r y | x -> - M.return x ) + M.return x) ~init:acc' sub_tree | x -> M.return x ) - let fold_depth_until : type merge_t base_t accum final. - f_merge:( int - -> accum - -> merge_t - -> (accum, final) Continue_or_stop.t M.t) + let fold_depth_until : + type merge_t base_t accum final. + f_merge: + (int -> accum -> merge_t -> (accum, final) Continue_or_stop.t M.t) -> f_base:(accum -> base_t -> (accum, final) Continue_or_stop.t M.t) -> init:accum -> finish:(accum -> final M.t) @@ -324,7 +332,8 @@ module Tree = struct module Foldable_ident = Make_foldable (Monad.Ident) - let fold_depth : type merge_t base_t accum. + let fold_depth : + type merge_t base_t accum. f_merge:(int -> accum -> merge_t -> accum) -> f_base:(accum -> base_t -> accum) -> init:accum @@ -336,7 +345,8 @@ module Tree = struct ~f_base:(fun acc d -> Continue (f_base acc d)) ~init ~finish:Fn.id t - let fold : type merge_t base_t accum. + let fold : + type merge_t base_t accum. f_merge:(accum -> merge_t -> accum) -> f_base:(accum -> base_t -> accum) -> init:accum @@ -345,7 +355,8 @@ module Tree = struct fun ~f_merge ~f_base ~init t -> fold_depth t ~init ~f_merge:(fun _ -> f_merge) ~f_base - let _fold_until : type merge_t base_t accum final. + let _fold_until : + type merge_t base_t accum final. f_merge:(accum -> merge_t -> (accum, final) Continue_or_stop.t) -> f_base:(accum -> base_t -> (accum, final) Continue_or_stop.t) -> init:accum @@ -360,7 +371,8 @@ module Tree = struct (* result -> final proof f_merge, f_base are to update the nodes with new jobs and mark old jobs as "Done"*) - let rec update_split : type merge_t base_t data weight result. + let rec update_split : + type merge_t base_t data weight result. f_merge:(data -> int -> merge_t -> (merge_t * result option) Or_error.t) -> f_base:(data -> base_t -> base_t Or_error.t) -> weight_merge:(merge_t -> weight * weight) @@ -375,7 +387,7 @@ module Tree = struct | Leaf d -> let%map updated = f_base jobs d in (Leaf updated, None) - | Node {depth; value; sub_tree} -> + | Node { depth; value; sub_tree } -> let weight_left_subtree, weight_right_subtree = weight_merge value in (*update the jobs at the current level*) let%bind value', scan_result = f_merge jobs depth value in @@ -391,20 +403,21 @@ module Tree = struct ~f_merge:(fun (b, b') i (x, y) -> let%bind left = f_merge b i x in let%map right = f_merge b' i y in - ((fst left, fst right), Option.both (snd left) (snd right)) ) + ((fst left, fst right), Option.both (snd left) (snd right))) ~f_base:(fun (b, b') (x, x') -> let%bind left = f_base b x in let%map right = f_base b' x' in - (left, right) ) + (left, right)) ~weight_merge:(fun (a, b) -> (weight_merge a, weight_merge b)) ~update_level ~jobs_split:(fun (x, y) (a, b) -> - (jobs_split x a, jobs_split y b) ) + (jobs_split x a, jobs_split y b)) ~jobs:new_jobs_list sub_tree in - (Node {depth; value= value'; sub_tree= sub}, scan_result) + (Node { depth; value = value'; sub_tree = sub }, scan_result) - let rec update_accumulate : type merge_t base_t data. + let rec update_accumulate : + type merge_t base_t data. f_merge:(data * data -> merge_t -> merge_t * data) -> f_base:(base_t -> base_t * data) -> (merge_t, base_t) t @@ -415,17 +428,17 @@ module Tree = struct | Leaf d -> let new_base, count_list = f_base d in (Leaf new_base, count_list) - | Node {depth; value; sub_tree} -> + | Node { depth; value; sub_tree } -> (*get the updated subtree*) let sub, counts = update_accumulate ~f_merge:(fun (b1, b2) (x, y) -> - transpose (f_merge b1 x, f_merge b2 y) ) + transpose (f_merge b1 x, f_merge b2 y)) ~f_base:(fun (x, y) -> transpose (f_base x, f_base y)) sub_tree in let value', count_list = f_merge counts value in - (Node {depth; value= value'; sub_tree= sub}, count_list) + (Node { depth; value = value'; sub_tree = sub }, count_list) let update : ('merge_job, 'base_job) Job.t list @@ -445,20 +458,21 @@ module Tree = struct match (jobs, m) with | [], _ -> Ok (weight, m) - | [Job.Merge a; Merge b], Merge.Job.Empty -> + | [ Job.Merge a; Merge b ], Merge.Job.Empty -> Ok - ( ( weight_lens.set (left - 1) w1 - , weight_lens.set (right - 1) w2 ) - , Full {left= a; right= b; seq_no; status= Job_status.Todo} ) - | [Merge a], Empty -> + ( (weight_lens.set (left - 1) w1, weight_lens.set (right - 1) w2) + , Full { left = a; right = b; seq_no; status = Job_status.Todo } + ) + | [ Merge a ], Empty -> Ok ( (weight_lens.set (left - 1) w1, weight_lens.set right w2) , Part a ) - | [Merge b], Part a -> + | [ Merge b ], Part a -> Ok ( (weight_lens.set left w1, weight_lens.set (right - 1) w2) - , Full {left= a; right= b; seq_no; status= Job_status.Todo} ) - | [Base _], Empty -> + , Full { left = a; right = b; seq_no; status = Job_status.Todo } + ) + | [ Base _ ], Empty -> (*Depending on whether this is the first or second of the two base jobs*) let weight = if left = 0 then @@ -466,10 +480,9 @@ module Tree = struct else (weight_lens.set (left - 1) w1, weight_lens.set right w2) in Ok (weight, m) - | [Base _; Base _], Empty -> + | [ Base _; Base _ ], Empty -> Ok - ( ( weight_lens.set (left - 1) w1 - , weight_lens.set (right - 1) w2 ) + ( (weight_lens.set (left - 1) w1, weight_lens.set (right - 1) w2) , m ) | xs, m -> Or_error.errorf @@ -481,8 +494,8 @@ module Tree = struct else if cur_level = update_level then (*Mark completed jobs as Done*) match (jobs, m) with - | [Merge a], Full ({status= Job_status.Todo; _} as x) -> - let new_job = Merge.Job.Full {x with status= Job_status.Done} in + | [ Merge a ], Full ({ status = Job_status.Todo; _ } as x) -> + let new_job = Merge.Job.Full { x with status = Job_status.Done } in let scan_result, weight' = if cur_level = 0 then (Some a, (weight_lens.set 0 w1, weight_lens.set 0 w2)) @@ -518,16 +531,16 @@ module Tree = struct match (jobs, d) with | [], _ -> Ok (w, d) - | [Job.Base d], Base.Job.Empty -> + | [ Job.Base d ], Base.Job.Empty -> Ok ( weight_lens.set (weight - 1) w - , Base.Job.Full {job= d; seq_no; status= Job_status.Todo} ) - | [Job.Merge _], Full b -> - Ok (w, Full {b with status= Job_status.Done}) + , Base.Job.Full { job = d; seq_no; status = Job_status.Todo } ) + | [ Job.Merge _ ], Full b -> + Ok (w, Full { b with status = Job_status.Done }) | xs, _ -> Or_error.errorf - "Got %d jobs when updating level %d and when one of the base \ - nodes is %s" + "Got %d jobs when updating level %d and when one of the base nodes \ + is %s" (List.length xs) update_level (Base.Job.job_str d) in let jobs = completed_jobs in @@ -535,31 +548,29 @@ module Tree = struct ~jobs ~update_level ~jobs_split:(fun (w1, w2) a -> let l = weight_lens.get w1 in let r = weight_lens.get w2 in - (List.take a l, List.take (List.drop a l) r) ) + (List.take a l, List.take (List.drop a l) r)) let reset_weights : - [`Base | `Merge | `Both] + [ `Base | `Merge | `Both ] -> ('merge_t, 'base_t) t -> ('merge_t, 'base_t) t = fun weight_type tree -> let set_all_zero weight = Weight.base.set 0 (Weight.merge.set 0 weight) in let f_base base = let set_one (lens : (Weight.t, int) Lens.t) weight = lens.set 1 weight in - let set_zero (lens : (Weight.t, int) Lens.t) weight = - lens.set 0 weight - in + let set_zero (lens : (Weight.t, int) Lens.t) weight = lens.set 0 weight in let update_merge_weight weight = (*When updating the merge-weight of base nodes, only the nodes with - "Todo" status needs to be included*) + "Todo" status needs to be included*) match snd base with - | Base.Job.Full {status= Job_status.Todo; _} -> + | Base.Job.Full { status = Job_status.Todo; _ } -> set_one Weight.merge weight | _ -> set_zero Weight.merge weight in let update_base_weight weight = (*When updating the base-weight of base nodes, only the Empty nodes - need to be included*) + need to be included*) match snd base with | Base.Job.Empty -> set_one Weight.base weight @@ -589,16 +600,16 @@ module Tree = struct match weight_type with | `Merge -> ( (*When updating the merge-weight of merge nodes, only the nodes - with "Todo" status needs to be included*) + with "Todo" status needs to be included*) let lens = Weight.merge in match m with - | (w1', w2'), Merge.Job.Full {status= Job_status.Todo; _} -> + | (w1', w2'), Merge.Job.Full { status = Job_status.Todo; _ } -> (lens.set 1 w1', lens.set 0 w2') | w, _ -> reset lens w ) | `Base -> (* The base-weight of merge nodes is the sum of weights of its - children*) + children*) reset Weight.base (fst m) | `Both -> reset Weight.merge (reset Weight.base (fst m)) @@ -616,16 +627,16 @@ module Tree = struct fold_depth ~init:[] ~f_merge:(fun i acc a -> match (i = level, a) with - | true, (_weight, Merge.Job.Full {left; right; status= Todo; _}) -> + | true, (_weight, Merge.Job.Full { left; right; status = Todo; _ }) -> Available_job.Merge (left, right) :: acc | _ -> - acc ) + acc) ~f_base:(fun acc d -> match (level = depth, d) with - | true, (_weight, Base.Job.Full {job; status= Todo; _}) -> + | true, (_weight, Base.Job.Full { job; status = Todo; _ }) -> Available_job.Base job :: acc | _ -> - acc ) + acc) tree |> List.rev @@ -635,21 +646,21 @@ module Tree = struct fold ~init:[] ~f_merge:(fun acc a -> match a with - | _, Merge.Job.Full {status= Job_status.Done; _} -> + | _, Merge.Job.Full { status = Job_status.Done; _ } -> acc | _ -> - Job.Merge a :: acc ) + Job.Merge a :: acc) ~f_base:(fun acc d -> match d with - | _, Base.Job.Full {status= Job_status.Done; _} -> + | _, Base.Job.Full { status = Job_status.Done; _ } -> acc | _ -> - Job.Base d :: acc ) + Job.Base d :: acc) tree |> List.rev - let jobs_records : - ('merge_t, 'base_t) t -> ('merge_job, 'base_job) Job.t list = + let jobs_records : ('merge_t, 'base_t) t -> ('merge_job, 'base_job) Job.t list + = fun tree -> fold ~init:[] ~f_merge:(fun acc a -> @@ -657,10 +668,9 @@ module Tree = struct | _weight, Merge.Job.Full x -> Job.Merge x :: acc | _ -> - acc ) + acc) ~f_base:(fun acc d -> - match d with _weight, Base.Job.Full j -> Job.Base j :: acc | _ -> acc - ) + match d with _weight, Base.Job.Full j -> Job.Base j :: acc | _ -> acc) tree |> List.rev @@ -669,7 +679,7 @@ module Tree = struct fold_depth ~init:[] ~f_merge:(fun _ _ _ -> []) ~f_base:(fun acc d -> - match d with _, Base.Job.Full {job; _} -> job :: acc | _ -> acc ) + match d with _, Base.Job.Full { job; _ } -> job :: acc | _ -> acc) tree |> List.rev @@ -680,16 +690,16 @@ module Tree = struct fold_depth ~init:(0, 0) ~f_merge:(fun _ (b, m) (_, j) -> match j with - | Merge.Job.Full {status= Job_status.Todo; _} -> + | Merge.Job.Full { status = Job_status.Todo; _ } -> (b, m + 1) | _ -> - (b, m) ) + (b, m)) ~f_base:(fun (b, m) (_, d) -> match d with - | Base.Job.Full {status= Job_status.Todo; _} -> + | Base.Job.Full { status = Job_status.Todo; _ } -> (b + 1, m) | _ -> - (b, m) ) + (b, m)) tree let leaves : ('merge_t, 'base_t) t -> 'base_t list = @@ -697,11 +707,12 @@ module Tree = struct fold_depth ~init:[] ~f_merge:(fun _ _ _ -> []) ~f_base:(fun acc d -> - match d with _, Base.Job.Full _ -> d :: acc | _ -> acc ) + match d with _, Base.Job.Full _ -> d :: acc | _ -> acc) tree |> List.rev - let rec _view_tree : type merge_t base_t. + let rec _view_tree : + type merge_t base_t. (merge_t, base_t) t -> show_merge:(merge_t -> string) -> show_base:(base_t -> string) @@ -710,25 +721,25 @@ module Tree = struct match tree with | Leaf d -> sprintf !"Leaf %s\n" (show_base d) - | Node {value; sub_tree; _} -> + | Node { value; sub_tree; _ } -> let curr = sprintf !"Node %s\n" (show_merge value) in let subtree = _view_tree sub_tree ~show_merge:(fun (x, y) -> - sprintf !"%s %s" (show_merge x) (show_merge y) ) + sprintf !"%s %s" (show_merge x) (show_merge y)) ~show_base:(fun (x, y) -> - sprintf !"%s %s" (show_base x) (show_base y) ) + sprintf !"%s %s" (show_base x) (show_base y)) in curr ^ subtree let required_job_count = function - | Node {value= (w1, w2), _; _} -> + | Node { value = (w1, w2), _; _ } -> Weight.merge.get w1 + Weight.merge.get w2 | Leaf (w, _) -> Weight.merge.get w let available_space = function - | Node {value= (w1, w2), _; _} -> + | Node { value = (w1, w2), _; _ } -> Weight.base.get w1 + Weight.base.get w2 | Leaf (w, _) -> Weight.base.get w @@ -741,8 +752,8 @@ module Tree = struct Job_view.Node.MEmpty | Part a -> MPart (fa a) - | Full {left; right; seq_no; status} -> - MFull (fa left, fa right, {Job_view.Extra.status; seq_no}) + | Full { left; right; seq_no; status } -> + MFull (fa left, fa right, { Job_view.Extra.status; seq_no }) in view :: acc in @@ -751,14 +762,14 @@ module Tree = struct match snd a with | Base.Job.Empty -> Job_view.Node.BEmpty - | Full {seq_no; status; job} -> - BFull (fd job, {seq_no; status}) + | Full { seq_no; status; job } -> + BFull (fd job, { seq_no; status }) in view :: acc in let lst = fold ~f_merge ~f_base ~init:[] tree in let len = List.length lst - 1 in - List.rev_mapi lst ~f:(fun i value -> {Job_view.position= len - i; value}) + List.rev_mapi lst ~f:(fun i value -> { Job_view.position = len - i; value }) end (*This struture works well because we always complete all the nodes on a specific level before proceeding to the next level*) @@ -770,15 +781,16 @@ module T = struct module V1 = struct type ('merge, 'base) t = - { trees: + { trees : ( 'merge Merge.Stable.V1.t , 'base Base.Stable.V1.t ) Tree.Stable.V1.t Non_empty_list.Stable.V1.t - ; acc: ('merge * 'base list) option - ; curr_job_seq_no: int - ; max_base_jobs: int - ; delay: int } + ; acc : ('merge * 'base list) option + ; curr_job_seq_no : int + ; max_base_jobs : int + ; delay : int + } end end] end @@ -787,36 +799,37 @@ module T = struct module Stable = struct module V1 = struct type ('merge, 'base) t = ('merge, 'base) Binable_arg.Stable.V1.t = - { trees: + { trees : ('merge Merge.Stable.V1.t, 'base Base.Stable.V1.t) Tree.Stable.V1.t Non_empty_list.Stable.V1.t (*use non empty list*) - ; acc: ('merge * 'base list) option + ; acc : ('merge * 'base list) option (*last emitted proof and the corresponding transactions*) - ; curr_job_seq_no: int + ; curr_job_seq_no : int (*Sequence number for the jobs added every block*) - ; max_base_jobs: int (*transaction_capacity_log_2*) - ; delay: int } + ; max_base_jobs : int (*transaction_capacity_log_2*) + ; delay : int + } [@@deriving sexp] (* Delete all the completed jobs because 1. They are completed 2. They are not required to create new jobs anymore 3. We are not exposing these jobs for any sort of computation as of now - *) - let with_leaner_trees ({trees; _} as t) = + *) + let with_leaner_trees ({ trees; _ } as t) = let trees = Non_empty_list.map trees ~f:(fun tree -> Tree.map tree ~f_merge:(fun merge_node -> match snd merge_node with - | Merge.Job.Full {status= Job_status.Done; _} -> + | Merge.Job.Full { status = Job_status.Done; _ } -> (fst merge_node, Merge.Job.Empty) | _ -> - merge_node ) - ~f_base:Fn.id ) + merge_node) + ~f_base:Fn.id) in - {t with trees} + { t with trees } include Binable.Of_binable2 (Binable_arg.Stable.V1) @@ -830,11 +843,11 @@ module T = struct end end] - [%%define_locally - Stable.Latest.(with_leaner_trees)] + [%%define_locally Stable.Latest.(with_leaner_trees)] let create_tree_for_level ~level ~depth ~merge_job ~base_job = - let rec go : type merge_t base_t. + let rec go : + type merge_t base_t. int -> (int -> merge_t) -> base_t -> (merge_t, base_t) Tree.t = fun d fmerge base -> if d >= depth then Tree.Leaf base @@ -842,9 +855,9 @@ module T = struct let sub_tree = go (d + 1) (fun i -> (fmerge i, fmerge i)) (base, base) in - Node {depth= d; value= fmerge d; sub_tree} + Node { depth = d; value = fmerge d; sub_tree } in - let weight base merge = {Weight.base; merge} in + let weight base merge = { Weight.base; merge } in let base_weight = if level = -1 then weight 0 0 else weight 1 0 in go 0 (fun d -> @@ -854,15 +867,15 @@ module T = struct let x = Int.pow 2 level / Int.pow 2 (d + 1) in (weight x 0, weight x 0) in - (weight, merge_job) ) + (weight, merge_job)) (base_weight, base_job) let create_tree ~depth = create_tree_for_level ~level:depth ~depth ~merge_job:Merge.Job.Empty ~base_job:Base.Job.Empty - let empty : type merge base. - max_base_jobs:int -> delay:int -> (merge, base) t = + let empty : type merge base. max_base_jobs:int -> delay:int -> (merge, base) t + = fun ~max_base_jobs ~delay -> let depth = Int.ceil_log2 max_base_jobs in let first_tree : @@ -871,11 +884,12 @@ module T = struct Tree.t = create_tree ~depth in - { trees= Non_empty_list.singleton first_tree - ; acc= None - ; curr_job_seq_no= 0 + { trees = Non_empty_list.singleton first_tree + ; acc = None + ; curr_job_seq_no = 0 ; max_base_jobs - ; delay } + ; delay + } end module State = struct @@ -883,7 +897,7 @@ module State = struct module Hash = Hash let hash t f_merge f_base = - let {trees; acc; max_base_jobs; curr_job_seq_no; delay; _} = + let { trees; acc; max_base_jobs; curr_job_seq_no; delay; _ } = with_leaner_trees t in let h = ref (Digestif.SHA256.init ()) in @@ -891,17 +905,17 @@ module State = struct let () = let tree_hash tree f_merge f_base = List.iter (Tree.to_hashable_jobs tree) ~f:(fun job -> - match job with Job.Merge a -> f_merge a | Base d -> f_base d ) + match job with Job.Merge a -> f_merge a | Base d -> f_base d) in Non_empty_list.iter trees ~f:(fun tree -> - let w_to_string {Weight.base= b; merge= m} = + let w_to_string { Weight.base = b; merge = m } = Int.to_string b ^ Int.to_string m in let w_to_string' (w1, w2) = w_to_string w1 ^ w_to_string w2 in let f_merge = function | w, Merge.Job.Empty -> add_string (w_to_string' w ^ "Empty") - | w, Merge.Job.Full {left; right; status; seq_no} -> + | w, Merge.Job.Full { left; right; status; seq_no } -> add_string ( w_to_string' w ^ "Full" ^ Int.to_string seq_no ^ Job_status.to_string status ) ; @@ -914,18 +928,17 @@ module State = struct let f_base = function | w, Base.Job.Empty -> add_string (w_to_string w ^ "Empty") - | w, Base.Job.Full {job; status; seq_no} -> + | w, Base.Job.Full { job; status; seq_no } -> add_string ( w_to_string w ^ "Full" ^ Int.to_string seq_no ^ Job_status.to_string status ) ; add_string (f_base job) in - tree_hash tree f_merge f_base ) + tree_hash tree f_merge f_base) in let acc_string = Option.value_map acc ~default:"None" ~f:(fun (a, d_lst) -> - f_merge a ^ List.fold ~init:"" d_lst ~f:(fun acc d -> acc ^ f_base d) - ) + f_merge a ^ List.fold ~init:"" d_lst ~f:(fun acc d -> acc ^ f_base d)) in add_string acc_string ; add_string (Int.to_string curr_job_seq_no) ; @@ -939,12 +952,10 @@ module State = struct let fold_chronological_until : ('merge, 'base) t -> init:'acc - -> f_merge:( 'acc - -> 'merge Merge.t - -> ('acc, 'final) Continue_or_stop.t M.t) - -> f_base:( 'acc - -> 'base Base.t - -> ('acc, 'final) Continue_or_stop.t M.t) + -> f_merge: + ('acc -> 'merge Merge.t -> ('acc, 'final) Continue_or_stop.t M.t) + -> f_base: + ('acc -> 'base Base.t -> ('acc, 'final) Continue_or_stop.t M.t) -> finish:('acc -> 'final M.t) -> 'final M.t = fun t ~init ~f_merge ~f_base ~finish -> @@ -993,16 +1004,18 @@ let return_error e a = let max_trees : ('merge, 'base) t -> int = fun t -> ((Int.ceil_log2 t.max_base_jobs + 1) * (t.delay + 1)) + 1 -let work_to_do : type merge base. +let work_to_do : + type merge base. (merge Merge.t, base Base.t) Tree.t list -> max_base_jobs:int -> (merge, base) Available_job.t list = fun trees ~max_base_jobs -> let depth = Int.ceil_log2 max_base_jobs in List.concat_mapi trees ~f:(fun i tree -> - Tree.jobs_on_level ~depth ~level:(depth - i) tree ) + Tree.jobs_on_level ~depth ~level:(depth - i) tree) -let work : type merge base. +let work : + type merge base. (merge Merge.t, base Base.t) Tree.t list -> delay:int -> max_base_jobs:int @@ -1028,8 +1041,9 @@ let work_for_tree t ~data_tree = work trees ~max_base_jobs:t.max_base_jobs ~delay (*work on all the level and all the trees*) -let all_work : type merge base. - (merge, base) t -> (merge, base) Available_job.t list list = +let all_work : + type merge base. (merge, base) t -> (merge, base) Available_job.t list list + = fun t -> let depth = Int.ceil_log2 t.max_base_jobs in let set1 = work_for_tree t ~data_tree:`Current in @@ -1038,25 +1052,23 @@ let all_work : type merge base. (List.init ~f:Fn.id (t.delay + 1)) ~f:(fun (t, work_list) _ -> let trees' = Non_empty_list.cons (create_tree ~depth) t.trees in - let t' = {t with trees= trees'} in + let t' = { t with trees = trees' } in match work_for_tree t' ~data_tree:`Current with | [] -> (t', work_list) | work -> - (t', work :: work_list) ) + (t', work :: work_list)) in if List.is_empty set1 then List.rev other_sets else set1 :: List.rev other_sets -let work_for_next_update : type merge base. - (merge, base) t - -> data_count:int - -> (merge, base) Available_job.t list list = +let work_for_next_update : + type merge base. + (merge, base) t -> data_count:int -> (merge, base) Available_job.t list list + = fun t ~data_count -> let delay = t.delay + 1 in - let current_tree_space = - Tree.available_space (Non_empty_list.head t.trees) - in + let current_tree_space = Tree.available_space (Non_empty_list.head t.trees) in let set1 = work (Non_empty_list.tail t.trees) ~max_base_jobs:t.max_base_jobs ~delay in @@ -1069,10 +1081,10 @@ let work_for_next_update : type merge base. ~max_base_jobs:t.max_base_jobs ~delay) ((count - current_tree_space) * 2) in - List.filter ~f:(Fn.compose not List.is_empty) [set1; set2] + List.filter ~f:(Fn.compose not List.is_empty) [ set1; set2 ] else let set = List.take set1 (2 * count) in - if List.is_empty set then [] else [set] + if List.is_empty set then [] else [ set ] let free_space_on_current_tree t = let tree = Non_empty_list.head t.trees in @@ -1081,11 +1093,11 @@ let free_space_on_current_tree t = let cons b bs = Option.value_map (Non_empty_list.of_list_opt bs) ~default:(Non_empty_list.singleton b) ~f:(fun bs -> - Non_empty_list.cons b bs ) + Non_empty_list.cons b bs) let append bs bs' = Option.value_map (Non_empty_list.of_list_opt bs') ~default:bs ~f:(fun bs' -> - Non_empty_list.append bs bs' ) + Non_empty_list.append bs bs') let add_merge_jobs : completed_jobs:'merge list -> ('base, 'merge, _) State_or_error.t = @@ -1135,7 +1147,7 @@ let add_merge_jobs : Error (Error.tag_arg e "Error while adding merge jobs to tree" ("tree_number", i) [%sexp_of: string * int]) - else Ok (tree :: trees, scan_result, jobs) ) + else Ok (tree :: trees, scan_result, jobs)) in match res with | Ok res -> @@ -1153,7 +1165,7 @@ let add_merge_jobs : ([], None) | t :: ts -> let tree_data = Tree.base_jobs t in - (List.rev ts, Some (res, tree_data)) ) + (List.rev ts, Some (res, tree_data))) in if Option.is_some result_opt @@ -1164,7 +1176,7 @@ let add_merge_jobs : else (updated_trees, result_opt) in let all_trees = cons curr_tree updated_trees in - let%map _ = State_or_error.put {state with trees= all_trees} in + let%map _ = State_or_error.put { state with trees = all_trees } in result_opt let add_data : data:'base list -> (_, _, 'base) State_or_error.t = @@ -1199,13 +1211,14 @@ let add_data : data:'base list -> (_, _, 'base) State_or_error.t = in let updated_trees = if List.length base_jobs = available_space then - cons (create_tree ~depth) [Tree.reset_weights `Both tree] + cons (create_tree ~depth) [ Tree.reset_weights `Both tree ] else Non_empty_list.singleton (Tree.reset_weights `Merge tree) in let%map _ = State_or_error.put { state with - trees= append updated_trees (Non_empty_list.tail state.trees) } + trees = append updated_trees (Non_empty_list.tail state.trees) + } in () @@ -1213,7 +1226,7 @@ let reset_seq_no : type a b. (a, b) t -> (a, b) t = fun state -> let oldest_seq_no = match List.hd @@ Tree.leaves (Non_empty_list.last state.trees) with - | Some (_, Base.Job.Full {seq_no; _}) -> + | Some (_, Base.Job.Full { seq_no; _ }) -> seq_no | _ -> 0 @@ -1221,15 +1234,15 @@ let reset_seq_no : type a b. (a, b) t -> (a, b) t = let new_seq seq = seq - oldest_seq_no + 1 in let f_merge (a : a Merge.t) : a Merge.t = match a with - | w, Merge.Job.Full ({seq_no; _} as x) -> - (w, Merge.Job.Full {x with seq_no= new_seq seq_no}) + | w, Merge.Job.Full ({ seq_no; _ } as x) -> + (w, Merge.Job.Full { x with seq_no = new_seq seq_no }) | m -> m in let f_base (b : b Base.t) : b Base.t = match b with - | w, Base.Job.Full ({seq_no; _} as x) -> - (w, Base.Job.Full {x with seq_no= new_seq seq_no}) + | w, Base.Job.Full ({ seq_no; _ } as x) -> + (w, Base.Job.Full { x with seq_no = new_seq seq_no }) | b -> b in @@ -1239,17 +1252,18 @@ let reset_seq_no : type a b. (a, b) t -> (a, b) t = let tree' = Tree.map ~f_base ~f_merge tree in let seq_no = match List.last @@ Tree.leaves tree' with - | Some (_, Base.Job.Full {seq_no; _}) -> + | Some (_, Base.Job.Full { seq_no; _ }) -> max seq_no max_seq | _ -> max_seq in - (seq_no, tree' :: updated_trees) ) + (seq_no, tree' :: updated_trees)) in { state with - curr_job_seq_no= next_seq_no - ; trees= - Option.value_exn (Non_empty_list.of_list_opt (List.rev updated_trees)) } + curr_job_seq_no = next_seq_no + ; trees = + Option.value_exn (Non_empty_list.of_list_opt (List.rev updated_trees)) + } let incr_sequence_no : type a b. (a, b) t -> (unit, a, b) State_or_error.t = fun state -> @@ -1257,7 +1271,7 @@ let incr_sequence_no : type a b. (a, b) t -> (unit, a, b) State_or_error.t = if state.curr_job_seq_no + 1 = Int.max_value then let state = reset_seq_no state in put state - else put {state with curr_job_seq_no= state.curr_job_seq_no + 1} + else put { state with curr_job_seq_no = state.curr_job_seq_no + 1 } let update_metrics t = Or_error.try_with (fun () -> @@ -1273,7 +1287,7 @@ let update_metrics t = (Int.to_float @@ base_job_count) ; Mina_metrics.( Gauge.set (Scan_state_metrics.scan_state_merge_snarks ~name)) - (Int.to_float @@ merge_job_count) ) ) + (Int.to_float @@ merge_job_count))) let update_helper : data:'base list @@ -1327,7 +1341,7 @@ let update_helper : (*update the latest emitted value *) let%bind () = State_or_error.put - {state with acc= Option.merge result_opt state.acc ~f:Fn.const} + { state with acc = Option.merge result_opt state.acc ~f:Fn.const } in (*Check the tree-list length is under max*) let%map () = @@ -1373,12 +1387,13 @@ let partition_if_overflowing : ('merge, 'base) t -> Space_partition.t = (*Check actual work count because it would be zero initially*) let work_count = work_for_tree t ~data_tree:`Current |> List.length in let work_count_new_tree = work_for_tree t ~data_tree:`Next |> List.length in - { first= (cur_tree_space, work_count) - ; second= + { first = (cur_tree_space, work_count) + ; second = ( if cur_tree_space < t.max_base_jobs then let slots = t.max_base_jobs - cur_tree_space in Some (slots, min work_count_new_tree (2 * slots)) - else None ) } + else None ) + } let next_on_new_tree t = let curr_tree_space = free_space_on_current_tree t in @@ -1389,7 +1404,7 @@ let pending_data t = let view_jobs_with_position (state : ('merge, 'base) State.t) fa fd = List.fold ~init:[] (Non_empty_list.to_list state.trees) ~f:(fun acc tree -> - Tree.view_jobs_with_position tree fa fd :: acc ) + Tree.view_jobs_with_position tree fa fd :: acc) let job_count t = State.fold_chronological t ~init:(0., 0.) @@ -1398,25 +1413,25 @@ let job_count t = match snd merge_node with | Merge.Job.Part _ -> (0.5, 0.) - | Full {status= Job_status.Todo; _} -> + | Full { status = Job_status.Todo; _ } -> (1., 0.) - | Full {status= Job_status.Done; _} -> + | Full { status = Job_status.Done; _ } -> (0., 1.) | Empty -> (0., 0.) in - (c +. count_todo, c' +. count_done) ) + (c +. count_todo, c' +. count_done)) ~f_base:(fun (c, c') base_node -> let count_todo, count_done = match snd base_node with | Base.Job.Empty -> (0., 0.) - | Full {status= Job_status.Todo; _} -> + | Full { status = Job_status.Todo; _ } -> (1., 0.) - | Full {status= Job_status.Done; _} -> + | Full { status = Job_status.Done; _ } -> (0., 1.) in - (c +. count_todo, c' +. count_done) ) + (c +. count_todo, c' +. count_done)) let assert_job_count t t' ~completed_job_count ~base_job_count ~value_emitted = let todo_before, done_before = job_count t in @@ -1426,14 +1441,14 @@ let assert_job_count t t' ~completed_job_count ~base_job_count ~value_emitted = (*list of jobs*) let all_jobs_expected = List.fold ~init:[] (Non_empty_list.to_list t'.trees) ~f:(fun acc tree -> - Tree.jobs_records tree @ acc ) + Tree.jobs_records tree @ acc) |> List.filter ~f:(fun job -> match job with - | Job.Base {status= Job_status.Todo; _} | Job.Merge {status= Todo; _} - -> + | Job.Base { status = Job_status.Todo; _ } + | Job.Merge { status = Todo; _ } -> true | _ -> - false ) + false) in assert (List.length all_jobs = List.length all_jobs_expected) ; let expected_todo_after = @@ -1477,7 +1492,7 @@ let%test_module "test" = in let new_merges = List.map work ~f:(fun job -> - match job with Base i -> i | Merge (i, j) -> i + j ) + match job with Base i -> i | Merge (i, j) -> i + j) in let result_opt, t' = test_update ~data ~completed_jobs:new_merges t @@ -1490,13 +1505,13 @@ let%test_module "test" = | [] -> ((0, []), []) | x :: xs -> - ((List.sum (module Int) x ~f:Fn.id, x), List.rev xs) ) + ((List.sum (module Int) x ~f:Fn.id, x), List.rev xs)) in assert ( [%equal: int * int list] (Option.value ~default:expected_result result_opt) expected_result ) ; - (remaining_expected_results, t') ) + (remaining_expected_results, t')) in () @@ -1517,11 +1532,9 @@ let%test_module "test" = in let new_merges = List.map work ~f:(fun job -> - match job with Base i -> i | Merge (i, j) -> i + j ) - in - let result_opt, t' = - test_update ~data ~completed_jobs:new_merges t + match job with Base i -> i | Merge (i, j) -> i + j) in + let result_opt, t' = test_update ~data ~completed_jobs:new_merges t in let expected_result = (max_base_jobs, List.init max_base_jobs ~f:(fun _ -> 1)) in @@ -1529,7 +1542,7 @@ let%test_module "test" = [%equal: int * int list] (Option.value ~default:expected_result result_opt) expected_result ) ; - state := t' ) + state := t') end ) let gen : @@ -1559,7 +1572,7 @@ let gen : in Option.value_map ~default:s res_opt ~f:(fun x -> let tuple = if Option.is_some old_tuple then old_tuple else s.acc in - {s with acc= f_acc tuple x} ) ) + { s with acc = f_acc tuple x })) let%test_module "scans" = ( module struct @@ -1578,7 +1591,7 @@ let%test_module "scans" = let tuple = if Option.is_some old_tuple then f_acc old_tuple x else state.acc in - {state with acc= tuple} ) + { state with acc = tuple }) in let%bind () = Linear_pipe.write w state.acc in let rem_ds = List.drop ds state.max_base_jobs in @@ -1601,11 +1614,11 @@ let%test_module "scans" = let scan ~data ~depth ~f ~f_acc = Linear_pipe.create_reader ~close_on_exception:true (fun w -> let s = ref (empty ~max_base_jobs:(Int.pow 2 depth) ~delay:1) in - do_steps ~state:s ~data ~f w ~f_acc ) + do_steps ~state:s ~data ~f w ~f_acc) let step_repeatedly ~state ~data ~f ~f_acc = Linear_pipe.create_reader ~close_on_exception:true (fun w -> - do_steps ~state ~data ~f w ~f_acc ) + do_steps ~state ~data ~f w ~f_acc) let%test_module "scan (+) over ints" = ( module struct @@ -1648,11 +1661,11 @@ let%test_module "scans" = if i > fst partition.first then tree_count_before + 1 else tree_count_before in - assert (tree_count_after = expected_tree_count) ) + assert (tree_count_after = expected_tree_count)) let%test_unit "sequence number reset" = (*create jobs with unique sequence numbers starting from 1. At any - point, after reset, the jobs should be labelled starting from 1. + point, after reset, the jobs should be labelled starting from 1. *) Backtrace.elide := false ; let p = 3 in @@ -1669,17 +1682,17 @@ let%test_module "scans" = List.iteri jobs_list ~f:(fun i jobs -> (*each tree has jobs up till a level below the older tree*) (* and have the following sequence numbers after reset - * 4 - * 3 3 - * 2 2 2 2 - * 1 1 1 1 1 1 1 1 + * 4 + * 3 3 + * 2 2 2 2 + * 1 1 1 1 1 1 1 1 *) let cur_levels = depth - i in let seq_sum = List.fold (List.init cur_levels ~f:Fn.id) ~init:0 ~f:(fun acc j -> let j = j + i in - acc + (Int.pow 2 j * (depth - j)) ) + acc + (Int.pow 2 j * (depth - j))) in let offset = i in let sum_of_all_seq_numbers = @@ -1689,13 +1702,13 @@ let%test_module "scans" = (fun (job : (int64 Merge.Record.t, int64 Base.Record.t) Job.t) -> match job with - | Job.Merge {seq_no; _} -> + | Job.Merge { seq_no; _ } -> seq_no - offset - | Base {seq_no; _} -> - seq_no - offset ) + | Base { seq_no; _ } -> + seq_no - offset) jobs in - assert (sum_of_all_seq_numbers = seq_sum) ) + assert (sum_of_all_seq_numbers = seq_sum)) in let state = ref (State.empty ~max_base_jobs ~delay:0) in let counter = ref 0 in @@ -1711,7 +1724,7 @@ let%test_module "scans" = (*start the rest after enough jobs are created*) if !counter >= p + 1 then verify_sequence_number !state else counter := !counter + 1 - else () ) + else ()) let%test_unit "serialize, deserialize scan state" = Backtrace.elide := false ; @@ -1726,8 +1739,8 @@ let%test_module "scans" = ~trials:50 ~f:(fun s -> let hash_s = State.hash s Int64.to_string Int64.to_string in let sz = - State.Stable.Latest.bin_size_t Int64.bin_size_t - Int64.bin_size_t s + State.Stable.Latest.bin_size_t Int64.bin_size_t Int64.bin_size_t + s in let buf = Bin_prot.Common.create_buf sz in ignore @@ -1735,13 +1748,13 @@ let%test_module "scans" = Int64.bin_write_t buf ~pos:0 s : int ) ; let deserialized = - State.Stable.Latest.bin_read_t Int64.bin_read_t - Int64.bin_read_t ~pos_ref:(ref 0) buf + State.Stable.Latest.bin_read_t Int64.bin_read_t Int64.bin_read_t + ~pos_ref:(ref 0) buf in let new_hash = State.hash deserialized Int64.to_string Int64.to_string in - assert (Hash.equal hash_s new_hash) ) + assert (Hash.equal hash_s new_hash)) let%test_unit "scan can be initialized from intermediate state" = Backtrace.elide := false ; @@ -1760,8 +1773,7 @@ let%test_module "scans" = (* For any arbitrary intermediate state *) (* if we then add 1 and a bunch of zeros *) let one_then_zeros = - Linear_pipe.create_reader ~close_on_exception:true - (fun w -> + Linear_pipe.create_reader ~close_on_exception:true (fun w -> let rec go () = let next = if !do_one_next then ( @@ -1772,7 +1784,7 @@ let%test_module "scans" = let%bind () = Pipe.write w next in go () in - go () ) + go ()) in let pipe s = step_repeatedly ~state:s ~data:one_then_zeros ~f:job_done @@ -1790,7 +1802,7 @@ let%test_module "scans" = | `Ok (Some (v', _)) -> v' | `Ok None -> - v ) + v) in (* after we flush intermediate work *) let old_acc = @@ -1803,8 +1815,7 @@ let%test_module "scans" = (* eventually we'll emit the acc+1 element *) let%map _ = fill_some_zeros v s in let acc_plus_one = !s.acc |> Option.value_exn in - assert (Int64.(equal (fst acc_plus_one) (fst acc + one))) ) - ) + assert (Int64.(equal (fst acc_plus_one) (fst acc + one))))) end ) let%test_module "scan (+) over ints, map from string" = @@ -1823,13 +1834,14 @@ let%test_module "scans" = let%test_unit "scan behaves like a fold long-term" = let a_bunch_of_ones_then_zeros x = - { Linear_pipe.Reader.pipe= + { Linear_pipe.Reader.pipe = Pipe.unfold ~init:x ~f:(fun count -> let next = if count <= 0 then "0" else Int.to_string (x - count) in - return (Some (next, count - 1)) ) - ; has_reader= false } + return (Some (next, count - 1))) + ; has_reader = false + } in let depth = 7 in let n = 1000 in @@ -1848,14 +1860,14 @@ let%test_module "scans" = | `Ok (Some (v, _)) -> v | `Ok None -> - acc ) + acc) in let expected = List.fold (List.init n ~f:(fun i -> Int64.of_int i)) ~init:Int64.zero ~f:Int64.( + ) in - assert ([%equal: int64] after_3n expected) ) + assert ([%equal: int64] after_3n expected)) end ) let%test_module "scan (concat) over strings" = @@ -1872,14 +1884,14 @@ let%test_module "scans" = non-commutative semigroup" = Backtrace.elide := false ; let a_bunch_of_nums_then_empties x = - { Linear_pipe.Reader.pipe= + { Linear_pipe.Reader.pipe = Pipe.unfold ~init:x ~f:(fun count -> let next = - if count <= 0 then "" - else Int.to_string (x - count) ^ "," + if count <= 0 then "" else Int.to_string (x - count) ^ "," in - return (Some (next, count - 1)) ) - ; has_reader= false } + return (Some (next, count - 1))) + ; has_reader = false + } in let n = 100 in let result = @@ -1897,13 +1909,13 @@ let%test_module "scans" = | `Ok (Some (v, _)) -> v | `Ok None -> - acc ) + acc) in let expected = List.fold (List.init n ~f:(fun i -> Int.to_string i ^ ",")) ~init:"" ~f:String.( ^ ) in - assert (String.equal after_42n expected) ) + assert (String.equal after_42n expected)) end ) end ) diff --git a/src/lib/parallel_scan/parallel_scan.mli b/src/lib/parallel_scan/parallel_scan.mli index 914521b36d1..6eca4ea6d55 100644 --- a/src/lib/parallel_scan/parallel_scan.mli +++ b/src/lib/parallel_scan/parallel_scan.mli @@ -59,7 +59,7 @@ module Weight : sig [%%versioned: module Stable : sig module V1 : sig - type t = {base: int; merge: int} [@@deriving sexp] + type t = { base : int; merge : int } [@@deriving sexp] end end] end @@ -71,9 +71,10 @@ module Base : sig module Stable : sig module V1 : sig type 'base t = - { job: 'base - ; seq_no: Sequence_number.Stable.V1.t - ; status: Job_status.Stable.V1.t } + { job : 'base + ; seq_no : Sequence_number.Stable.V1.t + ; status : Job_status.Stable.V1.t + } [@@deriving sexp] end end] @@ -105,10 +106,11 @@ module Merge : sig module Stable : sig module V1 : sig type 'merge t = - { left: 'merge - ; right: 'merge - ; seq_no: Sequence_number.Stable.V1.t - ; status: Job_status.Stable.V1.t } + { left : 'merge + ; right : 'merge + ; seq_no : Sequence_number.Stable.V1.t + ; status : Job_status.Stable.V1.t + } [@@deriving sexp] end end] @@ -154,7 +156,8 @@ module Space_partition : sig [%%versioned: module Stable : sig module V1 : sig - type t = {first: int * int; second: (int * int) option} [@@deriving sexp] + type t = { first : int * int; second : (int * int) option } + [@@deriving sexp] end end] end @@ -165,7 +168,9 @@ module Job_view : sig module Stable : sig module V1 : sig type t = - {seq_no: Sequence_number.Stable.V1.t; status: Job_status.Stable.V1.t} + { seq_no : Sequence_number.Stable.V1.t + ; status : Job_status.Stable.V1.t + } [@@deriving sexp] end end] @@ -189,7 +194,8 @@ module Job_view : sig [%%versioned: module Stable : sig module V1 : sig - type 'a t = {position: int; value: 'a Node.Stable.V1.t} [@@deriving sexp] + type 'a t = { position : int; value : 'a Node.Stable.V1.t } + [@@deriving sexp] end end] end @@ -216,12 +222,10 @@ module State : sig val fold_chronological_until : ('merge, 'base) t -> init:'accum - -> f_merge:( 'accum - -> 'merge Merge.t - -> ('accum, 'final) Continue_or_stop.t M.t) - -> f_base:( 'accum - -> 'base Base.t - -> ('accum, 'final) Continue_or_stop.t M.t) + -> f_merge: + ('accum -> 'merge Merge.t -> ('accum, 'final) Continue_or_stop.t M.t) + -> f_base: + ('accum -> 'base Base.t -> ('accum, 'final) Continue_or_stop.t M.t) -> finish:('accum -> 'final M.t) -> 'final M.t end @@ -264,8 +268,7 @@ val update : (** The last ['a] we emitted from the top of the tree and the ['d list] * responsible for that ['a]. *) -val last_emitted_value : - ('merge, 'base) State.t -> ('merge * 'base list) option +val last_emitted_value : ('merge, 'base) State.t -> ('merge * 'base list) option (** If there aren't enough slots for [max_slots] many ['d], then before * continuing onto the next virtual tree, split max_slots = (x,y) such that diff --git a/src/lib/participating_state/participating_state.ml b/src/lib/participating_state/participating_state.ml index 275ca8e8c78..91e09a8fccf 100644 --- a/src/lib/participating_state/participating_state.ml +++ b/src/lib/participating_state/participating_state.ml @@ -2,7 +2,7 @@ open Core_kernel open Async_kernel module T = struct - type 'a t = [`Active of 'a | `Bootstrapping] + type 'a t = [ `Active of 'a | `Bootstrapping ] let return value = `Active value @@ -62,9 +62,9 @@ let rec sequence (list : 'a T.t List.t) : 'a List.t T.t = match list with | [] -> return [] - | [participating_state] -> - bind participating_state ~f:(fun value -> return [value]) + | [ participating_state ] -> + bind participating_state ~f:(fun value -> return [ value ]) | participating_state :: participating_states -> bind participating_state ~f:(fun x -> map (sequence participating_states) ~f:(fun sub_result -> - x :: sub_result ) ) + x :: sub_result)) diff --git a/src/lib/perf_histograms/histogram.ml b/src/lib/perf_histograms/histogram.ml index 5a608f35cb3..e6c9847f9f4 100644 --- a/src/lib/perf_histograms/histogram.ml +++ b/src/lib/perf_histograms/histogram.ml @@ -15,29 +15,31 @@ module Make (Elem : sig val create : ?min:t0 -> ?max:t0 -> ?buckets:int -> unit -> t end - val bucket : params:Params.t -> t -> [`Index of int | `Overflow | `Underflow] + val bucket : params:Params.t -> t -> [ `Index of int | `Overflow | `Underflow ] val interval_of_bucket : params:Params.t -> int -> t * t end) = struct type t = - { buckets: int Array.t - ; intervals: (Elem.t * Elem.t) List.t - ; mutable underflow: int - ; mutable overflow: int - ; params: Elem.Params.t } + { buckets : int Array.t + ; intervals : (Elem.t * Elem.t) List.t + ; mutable underflow : int + ; mutable overflow : int + ; params : Elem.Params.t + } let create ?buckets ?min ?max () = let params = Elem.Params.create ?min ?max ?buckets () in let intervals = List.init (Elem.Params.buckets params) ~f:(fun i -> - Elem.interval_of_bucket ~params i ) + Elem.interval_of_bucket ~params i) in - { buckets= Array.init (Elem.Params.buckets params) ~f:(fun _ -> 0) + { buckets = Array.init (Elem.Params.buckets params) ~f:(fun _ -> 0) ; intervals - ; underflow= 0 - ; overflow= 0 - ; params } + ; underflow = 0 + ; overflow = 0 + ; params + } let clear t = Array.fill t.buckets ~pos:0 ~len:(Array.length t.buckets) 0 ; @@ -46,17 +48,18 @@ struct module Pretty = struct type t = - { values: int list - ; intervals: (Elem.t * Elem.t) list - ; underflow: int - ; overflow: int } + { values : int list + ; intervals : (Elem.t * Elem.t) list + ; underflow : int + ; overflow : int + } [@@deriving yojson, bin_io_unversioned, fields] end - let report {intervals; buckets; underflow; overflow; params= _} = - {Pretty.values= Array.to_list buckets; intervals; underflow; overflow} + let report { intervals; buckets; underflow; overflow; params = _ } = + { Pretty.values = Array.to_list buckets; intervals; underflow; overflow } - let add ({params; _} as t) e = + let add ({ params; _ } as t) e = match Elem.bucket ~params e with | `Index i -> t.buckets.(i) <- Int.succ t.buckets.(i) @@ -83,9 +86,9 @@ module Exp_time_spans = Make (struct module Params = struct type t0 = t - type t = {a: float; b: float; buckets: int} + type t = { a : float; b : float; buckets : int } - let buckets {buckets; _} = buckets + let buckets { buckets; _ } = buckets (* See http://mathworld.wolfram.com/LeastSquaresFittingLogarithmic.html *) let fit min max buckets = @@ -126,10 +129,10 @@ module Exp_time_spans = Make (struct let create ?(min = Time.Span.of_sec 1.) ?(max = Time.Span.of_min 10.) ?(buckets = 50) () = let a, b = fit min max buckets in - {a; b; buckets} + { a; b; buckets } end - let interval_of_bucket ~params:{Params.a; b; _} i = + let interval_of_bucket ~params:{ Params.a; b; _ } i = (* f-1(y) = e^{y/b - a/b} *) let f_1 y = let y = Float.of_int y in @@ -137,7 +140,7 @@ module Exp_time_spans = Make (struct in (Time.Span.of_ms (f_1 i), Time.Span.of_ms (f_1 (i + 1))) - let bucket ~params:{Params.a; b; buckets} span = + let bucket ~params:{ Params.a; b; buckets } span = let x = Time.Span.to_ms span in if Float.( <= ) x 0.0 then `Underflow else diff --git a/src/lib/perf_histograms/intf.ml b/src/lib/perf_histograms/intf.ml index 9bedec4e212..7db8f2b34c6 100644 --- a/src/lib/perf_histograms/intf.ml +++ b/src/lib/perf_histograms/intf.ml @@ -21,10 +21,10 @@ module Rpc = struct include Versioned_rpc.Both_convert.Plain.S - with type callee_query := Callee.query - and type callee_response := Callee.response - and type caller_query := Caller.query - and type caller_response := Caller.response + with type callee_query := Callee.query + and type callee_response := Callee.response + and type caller_query := Caller.query + and type caller_response := Caller.response end end diff --git a/src/lib/perf_histograms/perf_histograms.mli b/src/lib/perf_histograms/perf_histograms.mli index 6c80b2c80f5..578644b68dd 100644 --- a/src/lib/perf_histograms/perf_histograms.mli +++ b/src/lib/perf_histograms/perf_histograms.mli @@ -4,23 +4,24 @@ module Rpc : sig module Plain : sig module Extend (Rpc : Intf.Rpc.S) : Intf.Patched.S - with type callee_query := Rpc.Callee.query - and type callee_response := Rpc.Callee.response - and type caller_query := Rpc.Caller.query - and type caller_response := Rpc.Caller.response + with type callee_query := Rpc.Callee.query + and type callee_response := Rpc.Callee.response + and type caller_query := Rpc.Caller.query + and type caller_response := Rpc.Caller.response module Decorate_bin_io (M : Intf.Rpc.S) (Rpc : Intf.Versioned_rpc(M).S) : Intf.Versioned_rpc(M).S - [@@warning "-67"] + [@@warning "-67"] end end module Report : sig type t = - { values: int list - ; intervals: (Time.Span.t * Time.Span.t) list - ; underflow: int - ; overflow: int } + { values : int list + ; intervals : (Time.Span.t * Time.Span.t) list + ; underflow : int + ; overflow : int + } [@@deriving yojson, bin_io, fields] end diff --git a/src/lib/perf_histograms/rpc.ml b/src/lib/perf_histograms/rpc.ml index 2f85f3fc6ee..9df37bc6a77 100644 --- a/src/lib/perf_histograms/rpc.ml +++ b/src/lib/perf_histograms/rpc.ml @@ -12,8 +12,7 @@ let decorate_dispatch ~name (dispatch : ('q, 'r) Intf.dispatch) : Mina_metrics.( Network.Rpc_latency_histogram.observe Network.rpc_latency_ms_summary (Time.Span.to_ms span)) ; - Mina_metrics.( - Gauge.set (Network.rpc_latency_ms ~name) (Time.Span.to_ms span)) ; + Mina_metrics.(Gauge.set (Network.rpc_latency_ms ~name) (Time.Span.to_ms span)) ; r let decorate_impl ~name (impl : ('q, 'r, 'state) Intf.impl) : @@ -30,17 +29,16 @@ let decorate_impl ~name (impl : ('q, 'r, 'state) Intf.impl) : module Plain = struct module Extend (Rpc : Intf.Rpc.S) : Intf.Patched.S - with type callee_query := Rpc.Callee.query - and type callee_response := Rpc.Callee.response - and type caller_query := Rpc.Caller.query - and type caller_response := Rpc.Caller.response = struct + with type callee_query := Rpc.Callee.query + and type callee_response := Rpc.Callee.response + and type caller_query := Rpc.Caller.query + and type caller_response := Rpc.Caller.response = struct include Rpc let dispatch_multi = dispatch_multi |> decorate_dispatch ~name let implement_multi ?log_not_previously_seen_version f = - implement_multi ?log_not_previously_seen_version - (f |> decorate_impl ~name) + implement_multi ?log_not_previously_seen_version (f |> decorate_impl ~name) end module Decorate_bin_io (M : Intf.Rpc.S) (Rpc : Intf.Versioned_rpc(M).S) = @@ -86,15 +84,17 @@ module Plain = struct let observations = [ (Network.rpc_size_bytes ~name, read_size |> Float.of_int) ; (Network.rpc_max_bytes ~name, Read_stats.max_value () |> Float.of_int) - ; (Network.rpc_avg_bytes ~name, Read_stats.average_value ()) ] + ; (Network.rpc_avg_bytes ~name, Read_stats.average_value ()) + ] in List.iter observations ~f:(fun (histogram, value) -> - Network.Rpc_size_histogram.observe histogram value ) ; + Network.Rpc_size_histogram.observe histogram value) ; response let bin_reader_response = - { Bin_prot.Type_class.read= bin_read_response - ; vtag_read= __bin_read_response__ } + { Bin_prot.Type_class.read = bin_read_response + ; vtag_read = __bin_read_response__ + } module Write_stats = Make_stats () @@ -105,20 +105,23 @@ module Plain = struct let name = M.name ^ "_write_response" in let observations = [ (Network.rpc_size_bytes ~name, write_size |> Float.of_int) - ; ( Network.rpc_max_bytes ~name - , Write_stats.max_value () |> Float.of_int ) - ; (Network.rpc_avg_bytes ~name, Write_stats.average_value ()) ] + ; (Network.rpc_max_bytes ~name, Write_stats.max_value () |> Float.of_int) + ; (Network.rpc_avg_bytes ~name, Write_stats.average_value ()) + ] in List.iter observations ~f:(fun (histogram, value) -> - Network.Rpc_size_histogram.observe histogram value ) ; + Network.Rpc_size_histogram.observe histogram value) ; bin_write_response buf ~pos response let bin_writer_response = - {Bin_prot.Type_class.write= bin_write_response; size= bin_size_response} + { Bin_prot.Type_class.write = bin_write_response + ; size = bin_size_response + } let bin_response = - { Bin_prot.Type_class.reader= bin_reader_response - ; writer= bin_writer_response - ; shape= bin_shape_response } + { Bin_prot.Type_class.reader = bin_reader_response + ; writer = bin_writer_response + ; shape = bin_shape_response + } end end diff --git a/src/lib/pickles/cache.ml b/src/lib/pickles/cache.ml index 38b8b2beb71..0407e2c94bf 100644 --- a/src/lib/pickles/cache.ml +++ b/src/lib/pickles/cache.ml @@ -11,8 +11,8 @@ module Step = struct let to_string : t -> _ = function | _id, header, n, h -> - sprintf !"step-%s-%s-%d-%s" header.kind.type_ - header.kind.identifier n header.identifying_hash + sprintf !"step-%s-%s-%d-%s" header.kind.type_ header.kind.identifier + n header.identifying_hash end module Verification = struct @@ -35,7 +35,7 @@ module Step = struct Snark_keys_header.read_with_header ~read_data:(fun ~offset -> Marlin_plonk_bindings.Pasta_fp_index.read ~offset - (Backend.Tick.Keypair.load_urs ()) ) + (Backend.Tick.Keypair.load_urs ())) path in [%test_eq: int] header.header_version header_read.header_version ; @@ -44,7 +44,7 @@ module Step = struct header.constraint_constants header_read.constraint_constants ; [%test_eq: string] header.constraint_system_hash header_read.constraint_system_hash ; - {Backend.Tick.Keypair.index; cs} ) ) + { Backend.Tick.Keypair.index; cs })) (fun (_, header, _, _) t path -> Or_error.try_with (fun () -> Snark_keys_header.write_with_header @@ -52,7 +52,7 @@ module Step = struct ~append_data: (Marlin_plonk_bindings.Pasta_fp_index.write ~append:true t.Backend.Tick.Keypair.index) - header path ) ) + header path)) let vk_storable = Key_cache.Sync.Disk_storable.simple Key.Verification.to_string @@ -64,7 +64,7 @@ module Step = struct ~read_data:(fun ~offset path -> Marlin_plonk_bindings.Pasta_fp_verifier_index.read ~offset (Backend.Tick.Keypair.load_urs ()) - path ) + path) path in [%test_eq: int] header.header_version header_read.header_version ; @@ -73,7 +73,7 @@ module Step = struct header.constraint_constants header_read.constraint_constants ; [%test_eq: string] header.constraint_system_hash header_read.constraint_system_hash ; - index ) ) + index)) (fun (_, header, _, _) x path -> Or_error.try_with (fun () -> Snark_keys_header.write_with_header @@ -81,7 +81,7 @@ module Step = struct ~append_data: (Marlin_plonk_bindings.Pasta_fp_verifier_index.write ~append:true x) - header path ) ) + header path)) let read_or_generate cache k_p k_v typ main = let s_p = storable in @@ -91,15 +91,15 @@ module Step = struct lazy ( match Common.time "step keypair read" (fun () -> - Key_cache.Sync.read cache s_p (Lazy.force k_p) ) + Key_cache.Sync.read cache s_p (Lazy.force k_p)) with | Ok (pk, dirty) -> Common.time "step keypair create" (fun () -> - (Keypair.create ~pk ~vk:(Backend.Tick.Keypair.vk pk), dirty) ) + (Keypair.create ~pk ~vk:(Backend.Tick.Keypair.vk pk), dirty)) | Error _e -> let r = Common.time "stepkeygen" (fun () -> - generate_keypair ~exposing:[typ] main ) + generate_keypair ~exposing:[ typ ] main) in Timer.clock __LOC__ ; ignore @@ -112,7 +112,7 @@ module Step = struct (let k_v = Lazy.force k_v in match Common.time "step vk read" (fun () -> - Key_cache.Sync.read cache s_v k_v ) + Key_cache.Sync.read cache s_v k_v) with | Ok (vk, _) -> (vk, `Cache_hit) @@ -136,8 +136,8 @@ module Wrap = struct let to_string : t -> _ = function | _id, header, h -> - sprintf !"vk-wrap-%s-%s-%s" header.kind.type_ - header.kind.identifier header.identifying_hash + sprintf !"vk-wrap-%s-%s-%s" header.kind.type_ header.kind.identifier + header.identifying_hash end module Proving = struct @@ -162,7 +162,7 @@ module Wrap = struct Snark_keys_header.read_with_header ~read_data:(fun ~offset -> Marlin_plonk_bindings.Pasta_fq_index.read ~offset - (Backend.Tock.Keypair.load_urs ()) ) + (Backend.Tock.Keypair.load_urs ())) path in [%test_eq: int] header.header_version header_read.header_version ; @@ -171,7 +171,7 @@ module Wrap = struct header.constraint_constants header_read.constraint_constants ; [%test_eq: string] header.constraint_system_hash header_read.constraint_system_hash ; - {Backend.Tock.Keypair.index; cs} ) ) + { Backend.Tock.Keypair.index; cs })) (fun (_, header, _) t path -> Or_error.try_with (fun () -> Snark_keys_header.write_with_header @@ -179,7 +179,7 @@ module Wrap = struct ~append_data: (Marlin_plonk_bindings.Pasta_fq_index.write ~append:true t.index) - header path ) ) + header path)) let read_or_generate step_domains cache k_p k_v typ main = let module Vk = Verification_key in @@ -190,14 +190,14 @@ module Wrap = struct (let k = Lazy.force k_p in match Common.time "wrap key read" (fun () -> - Key_cache.Sync.read cache s_p k ) + Key_cache.Sync.read cache s_p k) with | Ok (pk, d) -> (Keypair.create ~pk ~vk:(Backend.Tock.Keypair.vk pk), d) | Error _e -> let r = Common.time "wrapkeygen" (fun () -> - generate_keypair ~exposing:[typ] main ) + generate_keypair ~exposing:[ typ ] main) in ignore ( Key_cache.Sync.write cache s_p k (Keypair.pk r) @@ -217,7 +217,7 @@ module Wrap = struct ~read_data:(fun ~offset path -> Binable.of_string (module Vk.Stable.Latest) - (In_channel.read_all path) ) + (In_channel.read_all path)) path in [%test_eq: int] header.header_version @@ -229,7 +229,7 @@ module Wrap = struct header_read.constraint_constants ; [%test_eq: string] header.constraint_system_hash header_read.constraint_system_hash ; - index ) ) + index)) (fun (_, header, _) t path -> Or_error.try_with (fun () -> Snark_keys_header.write_with_header @@ -237,9 +237,8 @@ module Wrap = struct ~append_data:(fun path -> Out_channel.with_file ~append:true path ~f:(fun file -> Out_channel.output_string file - (Binable.to_string (module Vk.Stable.Latest) t) ) - ) - header path ) ) + (Binable.to_string (module Vk.Stable.Latest) t))) + header path)) in match Key_cache.Sync.read cache s_v k_v with | Ok (vk, d) -> @@ -249,19 +248,20 @@ module Wrap = struct let vk = Keypair.vk kp in let pk = Keypair.pk kp in let vk : Vk.t = - { index= vk - ; commitments= + { index = vk + ; commitments = Pickles_types.Plonk_verification_key_evals.map vk.evals ~f:(fun x -> Array.map x.unshifted ~f:(function | Infinity -> failwith "Unexpected zero curve point" | Finite x -> - x ) ) + x)) ; step_domains - ; data= + ; data = (let open Marlin_plonk_bindings.Pasta_fq_index in - {constraints= domain_d1_size pk.index}) } + { constraints = domain_d1_size pk.index }) + } in ignore (Key_cache.Sync.write cache s_v k_v vk : unit Or_error.t) ; let _vk = Key_cache.Sync.read cache s_v k_v in diff --git a/src/lib/pickles/commitment_lengths.ml b/src/lib/pickles/commitment_lengths.ml index cffe32f51a2..d2ad8dcf4c8 100644 --- a/src/lib/pickles/commitment_lengths.ml +++ b/src/lib/pickles/commitment_lengths.ml @@ -12,17 +12,16 @@ let generic' ~h ~sub ~add:( + ) ~mul:( * ) ~of_int ~ceil_div_max_degree : ceil_div_max_degree (Common.max_quot_size ~of_int ~mul:( * ) ~sub n) in let h = ceil_div_max_degree n in - {l= n; r= n; o= n; z= n; t= t_bound; f= n; sigma1= h; sigma2= h} + { l = n; r = n; o = n; z = n; t = t_bound; f = n; sigma1 = h; sigma2 = h } let generic map ~h ~max_degree : _ Dlog_plonk_types.Evals.t = let t_bound = map h ~f:(fun h -> Common.max_quot_size_int h) in Dlog_plonk_types.Evals.map ~f:(fun v -> - map v ~f:(fun x -> Int.round_up x ~to_multiple_of:max_degree / max_degree) - ) - {l= h; r= h; o= h; z= h; t= t_bound; f= h; sigma1= h; sigma2= h} + map v ~f:(fun x -> Int.round_up x ~to_multiple_of:max_degree / max_degree)) + { l = h; r = h; o = h; z = h; t = t_bound; f = h; sigma1 = h; sigma2 = h } -let of_domains {Domains.h; _} ~max_degree : int Dlog_plonk_types.Evals.t = +let of_domains { Domains.h; _ } ~max_degree : int Dlog_plonk_types.Evals.t = let h = Domain.size h in generic ~max_degree (fun x ~f -> f x) ~h diff --git a/src/lib/pickles/common.ml b/src/lib/pickles/common.ml index 6c11dd97053..f9a83c1808b 100644 --- a/src/lib/pickles/common.ml +++ b/src/lib/pickles/common.ml @@ -1,7 +1,9 @@ open Core_kernel open Pickles_types + module Unshifted_acc = Pairing_marlin_types.Accumulator.Degree_bound_checks.Unshifted_accumulators + open Import open Backend @@ -14,8 +16,8 @@ end let tick_shifts, tock_shifts = let mk g = let f = - Memo.general ~cache_size_bound:20 ~hashable:Int.hashable - (fun log2_size -> g ~log2_size) + Memo.general ~cache_size_bound:20 ~hashable:Int.hashable (fun log2_size -> + g ~log2_size) in fun ~log2_size -> f log2_size in @@ -23,15 +25,16 @@ let tick_shifts, tock_shifts = , mk Backend.Tock.Verification_key.shifts ) let wrap_domains = - { Domains.h= Pow_2_roots_of_unity 17 - ; x= + { Domains.h = Pow_2_roots_of_unity 17 + ; x = Pow_2_roots_of_unity (let (T (typ, _)) = Impls.Wrap.input () in - Int.ceil_log2 (Impls.Wrap.Data_spec.size [typ])) } + Int.ceil_log2 (Impls.Wrap.Data_spec.size [ typ ])) + } let hash_pairing_me_only ~app_state (t : _ Types.Pairing_based.Proof_state.Me_only.t) = - let g (x, y) = [x; y] in + let g (x, y) = [ x; y ] in let open Backend in Tick_field_sponge.digest Tick_field_sponge.params (Types.Pairing_based.Proof_state.Me_only.to_field_elements t ~g @@ -39,7 +42,7 @@ let hash_pairing_me_only ~app_state (fun (x : Tock.Curve.Affine.t Dlog_plonk_types.Poly_comm.Without_degree_bound.t) -> - Array.concat_map x ~f:(Fn.compose Array.of_list g) ) + Array.concat_map x ~f:(Fn.compose Array.of_list g)) ~app_state) let hash_dlog_me_only (type n) (_max_branching : n Nat.t) @@ -49,12 +52,12 @@ let hash_dlog_me_only (type n) (_max_branching : n Nat.t) Types.Dlog_based.Proof_state.Me_only.t) = Tock_field_sponge.digest Tock_field_sponge.params (Types.Dlog_based.Proof_state.Me_only.to_field_elements t - ~g1:(fun ((x, y) : Tick.Curve.Affine.t) -> [x; y])) + ~g1:(fun ((x, y) : Tick.Curve.Affine.t) -> [ x; y ])) let dlog_pcs_batch (type n_branching total) ((without_degree_bound, _pi) : total Nat.t * (n_branching, Nat.N8.n, total) Nat.Adds.t) ~max_quot_size = - Pcs_batch.create ~without_degree_bound ~with_degree_bound:[max_quot_size] + Pcs_batch.create ~without_degree_bound ~with_degree_bound:[ max_quot_size ] module Pairing_pcs_batch = struct let beta_1 : (int, _, _) Pcs_batch.t = @@ -68,9 +71,7 @@ module Pairing_pcs_batch = struct end let when_profiling profiling default = - match - Option.map (Sys.getenv_opt "PICKLES_PROFILING") ~f:String.lowercase - with + match Option.map (Sys.getenv_opt "PICKLES_PROFILING") ~f:String.lowercase with | None | Some ("0" | "false") -> default | Some _ -> @@ -83,7 +84,7 @@ let time lab f = let x = f () in let stop = Time.now () in printf "%s: %s\n%!" lab (Time.Span.to_string_hum (Time.diff stop start)) ; - x ) + x) f () let bits_random_oracle = @@ -92,20 +93,20 @@ let bits_random_oracle = Digestif.digest_string h s |> Digestif.to_raw_string h |> String.to_list |> List.concat_map ~f:(fun c -> let c = Char.to_int c in - List.init 8 ~f:(fun i -> (c lsr i) land 1 = 1) ) + List.init 8 ~f:(fun i -> (c lsr i) land 1 = 1)) |> fun a -> List.take a length let bits_to_bytes bits = let byte_of_bits bs = List.foldi bs ~init:0 ~f:(fun i acc b -> - if b then acc lor (1 lsl i) else acc ) + if b then acc lor (1 lsl i) else acc) |> Char.of_int_exn in List.map (List.groupi bits ~break:(fun i _ _ -> i mod 8 = 0)) ~f:byte_of_bits |> String.of_char_list let group_map m ~a ~b = - let params = Group_map.Params.create m {a; b} in + let params = Group_map.Params.create m { a; b } in stage (fun x -> Group_map.to_group m ~params x) module Shifts = struct @@ -126,8 +127,8 @@ module Ipa = struct endo_to_field c let compute_challenges ~endo_to_field field chals = - Vector.map chals ~f:(fun {Bulletproof_challenge.prechallenge} -> - compute_challenge field ~endo_to_field prechallenge ) + Vector.map chals ~f:(fun { Bulletproof_challenge.prechallenge } -> + compute_challenge field ~endo_to_field prechallenge) module Wrap = struct let field = @@ -173,26 +174,25 @@ module Ipa = struct in let comms = Array.of_list_map comm_chals ~f:(fun (comm, _) -> - Or_infinity.Finite comm ) + Or_infinity.Finite comm) in let urs = Backend.Tick.Keypair.load_urs () in Async.In_thread.run (fun () -> Marlin_plonk_bindings.Pasta_fp_urs.batch_accumulator_check urs comms - chals ) + chals) end end let tock_unpadded_public_input_of_statement prev_statement = let input = let (T (typ, _conv)) = Impls.Wrap.input () in - Impls.Wrap.generate_public_input [typ] prev_statement + Impls.Wrap.generate_public_input [ typ ] prev_statement in List.init (Backend.Tock.Field.Vector.length input) ~f:(Backend.Tock.Field.Vector.get input) -let tock_public_input_of_statement s = - tock_unpadded_public_input_of_statement s +let tock_public_input_of_statement s = tock_unpadded_public_input_of_statement s let tick_public_input_of_statement ~max_branching (prev_statement : _ Types.Pairing_based.Statement.t) = @@ -200,7 +200,7 @@ let tick_public_input_of_statement ~max_branching let (T (input, _conv)) = Impls.Step.input ~branching:max_branching ~wrap_rounds:Tock.Rounds.n in - Impls.Step.generate_public_input [input] prev_statement + Impls.Step.generate_public_input [ input ] prev_statement in List.init (Backend.Tick.Field.Vector.length input) diff --git a/src/lib/pickles/composition_types/bulletproof_challenge.ml b/src/lib/pickles/composition_types/bulletproof_challenge.ml index 09ad8eb56c3..09cb16a8dcf 100644 --- a/src/lib/pickles/composition_types/bulletproof_challenge.ml +++ b/src/lib/pickles/composition_types/bulletproof_challenge.ml @@ -3,18 +3,18 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type 'challenge t = {prechallenge: 'challenge} + type 'challenge t = { prechallenge : 'challenge } [@@deriving sexp, compare, yojson, hash, equal] end end] -let pack {prechallenge} = prechallenge +let pack { prechallenge } = prechallenge -let unpack = function prechallenge -> {prechallenge} +let unpack = function prechallenge -> { prechallenge } let typ chal = - let there {prechallenge} = prechallenge in - let back prechallenge = {prechallenge} in + let there { prechallenge } = prechallenge in + let back prechallenge = { prechallenge } in let open Snarky_backendless in Typ.transport ~there ~back (Pickles_types.Scalar_challenge.typ chal) |> Typ.transport_var ~there ~back diff --git a/src/lib/pickles/composition_types/composition_types.ml b/src/lib/pickles/composition_types/composition_types.ml index f8d903c8f3d..7bb37738e92 100644 --- a/src/lib/pickles/composition_types/composition_types.ml +++ b/src/lib/pickles/composition_types/composition_types.ml @@ -24,7 +24,8 @@ let index_to_field_elements (k : 'a Plonk_verification_key_evals.t) ~g = ; g15 ; g16 ; g17 - ; g18 ] = + ; g18 + ] = Plonk_verification_key_evals.to_hlist k in List.map @@ -45,7 +46,8 @@ let index_to_field_elements (k : 'a Plonk_verification_key_evals.t) ~g = ; g15 ; g16 ; g17 - ; g18 ] + ; g18 + ] ~f:g |> Array.concat @@ -58,10 +60,11 @@ module Dlog_based = struct module Stable = struct module V1 = struct type ('challenge, 'scalar_challenge) t = - { alpha: 'scalar_challenge - ; beta: 'challenge - ; gamma: 'challenge - ; zeta: 'scalar_challenge } + { alpha : 'scalar_challenge + ; beta : 'challenge + ; gamma : 'challenge + ; zeta : 'scalar_challenge + } [@@deriving sexp, compare, yojson, hlist, hash, equal] let to_latest = Fn.id @@ -73,45 +76,48 @@ module Dlog_based = struct module In_circuit = struct type ('challenge, 'scalar_challenge, 'fp) t = - { alpha: 'scalar_challenge - ; beta: 'challenge - ; gamma: 'challenge - ; zeta: 'scalar_challenge - ; perm0: 'fp - ; perm1: 'fp - ; gnrc_l: 'fp - ; gnrc_r: 'fp - ; gnrc_o: 'fp - ; psdn0: 'fp - ; ecad0: 'fp - ; vbmul0: 'fp - ; vbmul1: 'fp - ; endomul0: 'fp - ; endomul1: 'fp - ; endomul2: 'fp } + { alpha : 'scalar_challenge + ; beta : 'challenge + ; gamma : 'challenge + ; zeta : 'scalar_challenge + ; perm0 : 'fp + ; perm1 : 'fp + ; gnrc_l : 'fp + ; gnrc_r : 'fp + ; gnrc_o : 'fp + ; psdn0 : 'fp + ; ecad0 : 'fp + ; vbmul0 : 'fp + ; vbmul1 : 'fp + ; endomul0 : 'fp + ; endomul1 : 'fp + ; endomul2 : 'fp + } [@@deriving sexp, compare, yojson, hlist, hash, equal, fields] let map_challenges t ~f ~scalar = { t with - alpha= scalar t.alpha - ; beta= f t.beta - ; gamma= f t.gamma - ; zeta= scalar t.zeta } + alpha = scalar t.alpha + ; beta = f t.beta + ; gamma = f t.gamma + ; zeta = scalar t.zeta + } let map_fields t ~f = { t with - perm0= f t.perm0 - ; perm1= f t.perm1 - ; gnrc_l= f t.gnrc_l - ; gnrc_r= f t.gnrc_r - ; gnrc_o= f t.gnrc_o - ; psdn0= f t.psdn0 - ; ecad0= f t.ecad0 - ; vbmul0= f t.vbmul0 - ; vbmul1= f t.vbmul1 - ; endomul0= f t.endomul0 - ; endomul1= f t.endomul1 - ; endomul2= f t.endomul2 } + perm0 = f t.perm0 + ; perm1 = f t.perm1 + ; gnrc_l = f t.gnrc_l + ; gnrc_r = f t.gnrc_r + ; gnrc_o = f t.gnrc_o + ; psdn0 = f t.psdn0 + ; ecad0 = f t.ecad0 + ; vbmul0 = f t.vbmul0 + ; vbmul1 = f t.vbmul1 + ; endomul0 = f t.endomul0 + ; endomul1 = f t.endomul1 + ; endomul2 = f t.endomul2 + } let typ (type f fp) ~challenge ~scalar_challenge (fp : (fp, _, f) Snarky_backendless.Typ.t) = @@ -131,13 +137,14 @@ module Dlog_based = struct ; fp ; fp ; fp - ; fp ] + ; fp + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end let to_minimal (t : _ In_circuit.t) : _ Minimal.t = - {alpha= t.alpha; beta= t.beta; zeta= t.zeta; gamma= t.gamma} + { alpha = t.alpha; beta = t.beta; zeta = t.zeta; gamma = t.gamma } end [%%versioned @@ -152,12 +159,13 @@ module Dlog_based = struct , 'bulletproof_challenges , 'index ) t = - { plonk: 'plonk - ; combined_inner_product: 'fp - ; b: 'fp - ; xi: 'scalar_challenge - ; bulletproof_challenges: 'bulletproof_challenges - ; which_branch: 'index } + { plonk : 'plonk + ; combined_inner_product : 'fp + ; b : 'fp + ; xi : 'scalar_challenge + ; bulletproof_challenges : 'bulletproof_challenges + ; which_branch : 'index + } [@@deriving sexp, compare, yojson, hlist, hash, equal] let to_latest = Fn.id @@ -178,12 +186,13 @@ module Dlog_based = struct , 'bulletproof_challenges , 'index ) Stable.Latest.t = - { plonk: 'plonk - ; combined_inner_product: 'fp - ; b: 'fp - ; xi: 'scalar_challenge - ; bulletproof_challenges: 'bulletproof_challenges - ; which_branch: 'index } + { plonk : 'plonk + ; combined_inner_product : 'fp + ; b : 'fp + ; xi : 'scalar_challenge + ; bulletproof_challenges : 'bulletproof_challenges + ; which_branch : 'index + } [@@deriving sexp, compare, yojson, hlist, hash, equal] module Minimal = struct @@ -207,16 +216,18 @@ module Dlog_based = struct let map_challenges { plonk ; combined_inner_product - ; b: 'fp + ; b : 'fp ; xi ; bulletproof_challenges - ; which_branch } ~f ~scalar = - { xi= scalar xi + ; which_branch + } ~f ~scalar = + { xi = scalar xi ; combined_inner_product ; b ; plonk ; bulletproof_challenges - ; which_branch } + ; which_branch + } module In_circuit = struct type ( 'challenge @@ -247,13 +258,14 @@ module Dlog_based = struct ; Vector.typ (Bulletproof_challenge.typ scalar_challenge) Backend.Tick.Rounds.n - ; index ] + ; index + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end let to_minimal (t : _ In_circuit.t) : _ Minimal.t = - {t with plonk= Plonk.to_minimal t.plonk} + { t with plonk = Plonk.to_minimal t.plonk } end module Me_only = struct @@ -261,23 +273,24 @@ module Dlog_based = struct module Stable = struct module V1 = struct type ('g1, 'bulletproof_challenges) t = - {sg: 'g1; old_bulletproof_challenges: 'bulletproof_challenges} + { sg : 'g1; old_bulletproof_challenges : 'bulletproof_challenges } [@@deriving sexp, compare, yojson, hlist, hash, equal] end end] - let to_field_elements {sg; old_bulletproof_challenges} + let to_field_elements { sg; old_bulletproof_challenges } ~g1:g1_to_field_elements = Array.concat [ Vector.to_array old_bulletproof_challenges |> Array.concat_map ~f:Vector.to_array - ; Array.of_list (g1_to_field_elements sg) ] + ; Array.of_list (g1_to_field_elements sg) + ] let typ g1 chal ~length = Snarky_backendless.Typ.of_hlistable - [g1; Vector.typ chal length] - ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist - ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist + [ g1; Vector.typ chal length ] + ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist + ~value_of_hlist:of_hlist end [%%versioned @@ -292,7 +305,7 @@ module Dlog_based = struct , 'bp_chals , 'index ) t = - { deferred_values: + { deferred_values : ( 'plonk , 'scalar_challenge , 'fp @@ -300,9 +313,10 @@ module Dlog_based = struct , 'bp_chals , 'index ) Deferred_values.Stable.V1.t - ; sponge_digest_before_evaluations: 'digest + ; sponge_digest_before_evaluations : 'digest (* Not needed by other proof system *) - ; me_only: 'me_only } + ; me_only : 'me_only + } [@@deriving sexp, compare, yojson, hlist, hash, equal] end end] @@ -339,10 +353,7 @@ module Dlog_based = struct , 'bp_chals , 'index ) t = - ( ( 'challenge - , 'scalar_challenge - , 'fp ) - Deferred_values.Plonk.In_circuit.t + ( ('challenge, 'scalar_challenge, 'fp) Deferred_values.Plonk.In_circuit.t , 'scalar_challenge , 'fp , 'fq @@ -361,58 +372,66 @@ module Dlog_based = struct [ Deferred_values.In_circuit.typ ~challenge ~scalar_challenge fp fq index ; digest - ; me_only ] - ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist - ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist + ; me_only + ] + ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist + ~value_of_hlist:of_hlist end let to_minimal (t : _ In_circuit.t) : _ Minimal.t = - {t with deferred_values= Deferred_values.to_minimal t.deferred_values} + { t with deferred_values = Deferred_values.to_minimal t.deferred_values } end module Pass_through = struct type ('g, 's, 'sg, 'bulletproof_challenges) t = - { app_state: 's - ; dlog_plonk_index: + { app_state : 's + ; dlog_plonk_index : 'g Dlog_plonk_types.Poly_comm.Without_degree_bound.t Plonk_verification_key_evals.t - ; sg: 'sg - ; old_bulletproof_challenges: 'bulletproof_challenges } + ; sg : 'sg + ; old_bulletproof_challenges : 'bulletproof_challenges + } [@@deriving sexp] let to_field_elements - {app_state; dlog_plonk_index; sg; old_bulletproof_challenges} + { app_state; dlog_plonk_index; sg; old_bulletproof_challenges } ~app_state:app_state_to_field_elements ~comm ~g = Array.concat [ index_to_field_elements ~g:comm dlog_plonk_index ; app_state_to_field_elements app_state ; Array.of_list (List.concat_map ~f:g (Vector.to_list sg)) ; Vector.to_array old_bulletproof_challenges - |> Array.concat_map ~f:Vector.to_array ] + |> Array.concat_map ~f:Vector.to_array + ] let to_field_elements_without_index - {app_state; dlog_plonk_index= _; sg; old_bulletproof_challenges} + { app_state; dlog_plonk_index = _; sg; old_bulletproof_challenges } ~app_state:app_state_to_field_elements ~g = Array.concat [ app_state_to_field_elements app_state ; Array.of_list (List.concat_map ~f:g (Vector.to_list sg)) ; Vector.to_array old_bulletproof_challenges - |> Array.concat_map ~f:Vector.to_array ] + |> Array.concat_map ~f:Vector.to_array + ] open Snarky_backendless.H_list - let to_hlist {app_state; dlog_plonk_index; sg; old_bulletproof_challenges} + let to_hlist { app_state; dlog_plonk_index; sg; old_bulletproof_challenges } = - [app_state; dlog_plonk_index; sg; old_bulletproof_challenges] + [ app_state; dlog_plonk_index; sg; old_bulletproof_challenges ] let of_hlist - ([app_state; dlog_plonk_index; sg; old_bulletproof_challenges] : + ([ app_state; dlog_plonk_index; sg; old_bulletproof_challenges ] : (unit, _) t) = - {app_state; dlog_plonk_index; sg; old_bulletproof_challenges} + { app_state; dlog_plonk_index; sg; old_bulletproof_challenges } let typ comm g s chal branching = Snarky_backendless.Typ.of_hlistable - [s; Plonk_verification_key_evals.typ comm; Vector.typ g branching; chal] + [ s + ; Plonk_verification_key_evals.typ comm + ; Vector.typ g branching + ; chal + ] (* TODO: Should this really just be a vector typ of length Rounds.n ?*) ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -432,7 +451,7 @@ module Dlog_based = struct , 'bp_chals , 'index ) t = - { proof_state: + { proof_state : ( 'plonk , 'scalar_challenge , 'fp @@ -442,7 +461,8 @@ module Dlog_based = struct , 'bp_chals , 'index ) Proof_state.Stable.V1.t - ; pass_through: 'pass_through } + ; pass_through : 'pass_through + } [@@deriving compare, yojson, sexp, hash, equal] end end] @@ -512,17 +532,18 @@ module Dlog_based = struct ; Vector (Scalar Challenge, Nat.N3.n) ; Vector (B Digest, Nat.N3.n) ; Vector (B Bulletproof_challenge, Backend.Tick.Rounds.n) - ; Vector (B Index, Nat.N1.n) ] + ; Vector (B Index, Nat.N1.n) + ] let to_data - ({ proof_state= - { deferred_values= + ({ proof_state = + { deferred_values = { xi ; combined_inner_product ; b ; which_branch ; bulletproof_challenges - ; plonk= + ; plonk = { alpha ; beta ; gamma @@ -538,10 +559,14 @@ module Dlog_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 } } + ; endomul2 + } + } ; sponge_digest_before_evaluations - ; me_only } - ; pass_through } : + ; me_only + } + ; pass_through + } : _ t) = let open Vector in let fp = @@ -558,21 +583,23 @@ module Dlog_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 ] + ; endomul2 + ] in - let challenge = [beta; gamma] in - let scalar_challenge = [alpha; zeta; xi] in + let challenge = [ beta; gamma ] in + let scalar_challenge = [ alpha; zeta; xi ] in let digest = - [sponge_digest_before_evaluations; me_only; pass_through] + [ sponge_digest_before_evaluations; me_only; pass_through ] in - let index = [which_branch] in + let index = [ which_branch ] in Hlist.HlistId. [ fp ; challenge ; scalar_challenge ; digest ; bulletproof_challenges - ; index ] + ; index + ] let of_data Hlist.HlistId. @@ -581,7 +608,8 @@ module Dlog_based = struct ; scalar_challenge ; digest ; bulletproof_challenges - ; index ] : _ t = + ; index + ] : _ t = let open Vector in let [ combined_inner_product ; b @@ -596,23 +624,24 @@ module Dlog_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 ] = + ; endomul2 + ] = fp in - let [beta; gamma] = challenge in - let [alpha; zeta; xi] = scalar_challenge in - let [sponge_digest_before_evaluations; me_only; pass_through] = + let [ beta; gamma ] = challenge in + let [ alpha; zeta; xi ] = scalar_challenge in + let [ sponge_digest_before_evaluations; me_only; pass_through ] = digest in - let [which_branch] = index in - { proof_state= - { deferred_values= + let [ which_branch ] = index in + { proof_state = + { deferred_values = { xi ; combined_inner_product ; b ; which_branch ; bulletproof_challenges - ; plonk= + ; plonk = { alpha ; beta ; gamma @@ -628,14 +657,18 @@ module Dlog_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 } } + ; endomul2 + } + } ; sponge_digest_before_evaluations - ; me_only } - ; pass_through } + ; me_only + } + ; pass_through + } end let to_minimal (t : _ In_circuit.t) : _ Minimal.t = - {t with proof_state= Proof_state.to_minimal t.proof_state} + { t with proof_state = Proof_state.to_minimal t.proof_state } end end @@ -645,7 +678,8 @@ module Pairing_based = struct module Openings = struct module Evaluations = struct module By_point = struct - type 'fq t = {beta_1: 'fq; beta_2: 'fq; beta_3: 'fq; g_challenge: 'fq} + type 'fq t = + { beta_1 : 'fq; beta_2 : 'fq; beta_3 : 'fq; g_challenge : 'fq } end type 'fq t = ('fq By_point.t, Plonk_polys.n Vector.s) Vector.t @@ -656,20 +690,20 @@ module Pairing_based = struct module Advice = struct (* This is data that can be computed in linear time from the above plus the statement. - - It doesn't need to be sent on the wire, but it does need to be provided to the verifier + + It doesn't need to be sent on the wire, but it does need to be provided to the verifier *) - type ('fq, 'g) t = {b: 'fq} [@@deriving hlist] + type ('fq, 'g) t = { b : 'fq } [@@deriving hlist] let typ fq g = let open Snarky_backendless.Typ in - of_hlistable [fq] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist + of_hlistable [ fq ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end end type ('fq, 'g) t = - {evaluations: 'fq Evaluations.t; proof: ('fq, 'g) Bulletproof.t} + { evaluations : 'fq Evaluations.t; proof : ('fq, 'g) Bulletproof.t } end module Proof_state = struct @@ -677,11 +711,12 @@ module Pairing_based = struct module Plonk = Dlog_based.Proof_state.Deferred_values.Plonk type ('plonk, 'scalar_challenge, 'fq, 'bulletproof_challenges) t_ = - { plonk: 'plonk - ; combined_inner_product: 'fq - ; xi: 'scalar_challenge (* 128 bits *) - ; bulletproof_challenges: 'bulletproof_challenges - ; b: 'fq } + { plonk : 'plonk + ; combined_inner_product : 'fq + ; xi : 'scalar_challenge (* 128 bits *) + ; bulletproof_challenges : 'bulletproof_challenges + ; b : 'fq + } [@@deriving sexp, compare, yojson] module Minimal = struct @@ -716,14 +751,15 @@ module Pairing_based = struct , 'digest , 'bool ) t_ = - { deferred_values: + { deferred_values : ( 'plonk , 'scalar_challenge , 'fq , 'bulletproof_challenges ) Deferred_values.t_ - ; should_finalize: 'bool - ; sponge_digest_before_evaluations: 'digest } + ; should_finalize : 'bool + ; sponge_digest_before_evaluations : 'digest + } [@@deriving sexp, compare, yojson] module Minimal = struct @@ -772,15 +808,16 @@ module Pairing_based = struct ; Vector (B Challenge, Nat.N2.n) ; Vector (Scalar Challenge, Nat.N3.n) ; Vector (B Bulletproof_challenge, bp_log2) - ; Vector (B Bool, Nat.N1.n) ] + ; Vector (B Bool, Nat.N1.n) + ] let to_data - ({ deferred_values= + ({ deferred_values = { xi ; bulletproof_challenges ; b ; combined_inner_product - ; plonk= + ; plonk = { alpha ; beta ; gamma @@ -796,9 +833,12 @@ module Pairing_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 } } + ; endomul2 + } + } ; should_finalize - ; sponge_digest_before_evaluations } : + ; sponge_digest_before_evaluations + } : _ t) = let open Vector in let fq = @@ -815,19 +855,21 @@ module Pairing_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 ] + ; endomul2 + ] in - let challenge = [beta; gamma] in - let scalar_challenge = [alpha; zeta; xi] in - let digest = [sponge_digest_before_evaluations] in - let bool = [should_finalize] in + let challenge = [ beta; gamma ] in + let scalar_challenge = [ alpha; zeta; xi ] in + let digest = [ sponge_digest_before_evaluations ] in + let bool = [ should_finalize ] in let open Hlist.HlistId in [ fq ; digest ; challenge ; scalar_challenge ; bulletproof_challenges - ; bool ] + ; bool + ] let of_data Hlist.HlistId. @@ -845,18 +887,20 @@ module Pairing_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 ] - ; Vector.[sponge_digest_before_evaluations] - ; Vector.[beta; gamma] - ; Vector.[alpha; zeta; xi] + ; endomul2 + ] + ; Vector.[ sponge_digest_before_evaluations ] + ; Vector.[ beta; gamma ] + ; Vector.[ alpha; zeta; xi ] ; bulletproof_challenges - ; Vector.[should_finalize] ] : _ t = - { deferred_values= + ; Vector.[ should_finalize ] + ] : _ t = + { deferred_values = { xi ; bulletproof_challenges ; b ; combined_inner_product - ; plonk= + ; plonk = { alpha ; beta ; gamma @@ -872,30 +916,36 @@ module Pairing_based = struct ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 } } + ; endomul2 + } + } ; should_finalize - ; sponge_digest_before_evaluations } + ; sponge_digest_before_evaluations + } end end type ('unfinalized_proofs, 'me_only) t = - {unfinalized_proofs: 'unfinalized_proofs; me_only: 'me_only} + { unfinalized_proofs : 'unfinalized_proofs; me_only : 'me_only } [@@deriving sexp, compare, yojson] let spec unfinalized_proofs me_only = let open Spec in - Struct [unfinalized_proofs; me_only] + Struct [ unfinalized_proofs; me_only ] include struct open Hlist.HlistId - let to_data {unfinalized_proofs; me_only} = - [Vector.map unfinalized_proofs ~f:Per_proof.In_circuit.to_data; me_only] + let to_data { unfinalized_proofs; me_only } = + [ Vector.map unfinalized_proofs ~f:Per_proof.In_circuit.to_data + ; me_only + ] - let of_data [unfinalized_proofs; me_only] = - { unfinalized_proofs= + let of_data [ unfinalized_proofs; me_only ] = + { unfinalized_proofs = Vector.map unfinalized_proofs ~f:Per_proof.In_circuit.of_data - ; me_only } + ; me_only + } end let typ impl branching fq : @@ -916,30 +966,38 @@ module Pairing_based = struct module Statement = struct type ('unfinalized_proofs, 'me_only, 'pass_through) t = - { proof_state: ('unfinalized_proofs, 'me_only) Proof_state.t - ; pass_through: 'pass_through } + { proof_state : ('unfinalized_proofs, 'me_only) Proof_state.t + ; pass_through : 'pass_through + } [@@deriving sexp, compare, yojson] - let to_data {proof_state= {unfinalized_proofs; me_only}; pass_through} = + let to_data { proof_state = { unfinalized_proofs; me_only }; pass_through } + = let open Hlist.HlistId in [ Vector.map unfinalized_proofs ~f:Proof_state.Per_proof.In_circuit.to_data ; me_only - ; pass_through ] + ; pass_through + ] - let of_data Hlist.HlistId.[unfinalized_proofs; me_only; pass_through] = - { proof_state= - { unfinalized_proofs= + let of_data Hlist.HlistId.[ unfinalized_proofs; me_only; pass_through ] = + { proof_state = + { unfinalized_proofs = Vector.map unfinalized_proofs ~f:Proof_state.Per_proof.In_circuit.of_data - ; me_only } - ; pass_through } + ; me_only + } + ; pass_through + } let spec branching bp_log2 = let open Spec in let per_proof = Proof_state.Per_proof.In_circuit.spec bp_log2 in Struct - [Vector (per_proof, branching); B Digest; Vector (B Digest, branching)] + [ Vector (per_proof, branching) + ; B Digest + ; Vector (B Digest, branching) + ] end end diff --git a/src/lib/pickles/composition_types/digest.ml b/src/lib/pickles/composition_types/digest.ml index 468a1bd323d..11eeef3b09e 100644 --- a/src/lib/pickles/composition_types/digest.ml +++ b/src/lib/pickles/composition_types/digest.ml @@ -51,7 +51,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.Run) = struct ~compute:As_prover.(fun () -> Field.Constant.unpack (read_var x)) in Field.Assert.equal x (Field.project res) ; - res ) + res) end let () = assert (Field.size_in_bits < 64 * Nat.to_int Limbs.n) diff --git a/src/lib/pickles/composition_types/index.ml b/src/lib/pickles/composition_types/index.ml index f711461e99f..07fbd904814 100644 --- a/src/lib/pickles/composition_types/index.ml +++ b/src/lib/pickles/composition_types/index.ml @@ -18,7 +18,7 @@ let of_int_exn = Char.of_int_exn let of_bits bits = List.foldi bits ~init:0 ~f:(fun i acc b -> - if b then acc lor (1 lsl i) else acc ) + if b then acc lor (1 lsl i) else acc) |> Char.of_int_exn module Checked (Impl : Snarky_backendless.Snark_intf.Run) = struct @@ -36,7 +36,7 @@ let typ bool : (('bvar, Nat.N8.n) Vector.t, t, 'f) Snarky_backendless.Typ.t = transport (Vector.typ bool Nat.N8.n) ~there:(fun (x : char) -> let x = Char.to_int x in - Vector.init Nat.N8.n ~f:(fun i -> (x lsr i) land 1 = 1) ) + Vector.init Nat.N8.n ~f:(fun i -> (x lsr i) land 1 = 1)) ~back:(fun bits -> of_bits (Vector.to_list bits)) let packed_typ (type f) diff --git a/src/lib/pickles/composition_types/spec.ml b/src/lib/pickles/composition_types/spec.ml index 220ccb25645..3efb6445f57 100644 --- a/src/lib/pickles/composition_types/spec.ml +++ b/src/lib/pickles/composition_types/spec.ml @@ -11,29 +11,29 @@ end open Basic type (_, _, _) Basic.t += - | Field : ('field1, 'field2, < field1: 'field1 ; field2: 'field2 ; .. >) t - | Bool : ('bool1, 'bool2, < bool1: 'bool1 ; bool2: 'bool2 ; .. >) t + | Field : ('field1, 'field2, < field1 : 'field1 ; field2 : 'field2 ; .. >) t + | Bool : ('bool1, 'bool2, < bool1 : 'bool1 ; bool2 : 'bool2 ; .. >) t | Digest : - ('digest1, 'digest2, < digest1: 'digest1 ; digest2: 'digest2 ; .. >) t + ('digest1, 'digest2, < digest1 : 'digest1 ; digest2 : 'digest2 ; .. >) t | Challenge : ( 'challenge1 , 'challenge2 - , < challenge1: 'challenge1 ; challenge2: 'challenge2 ; .. > ) + , < challenge1 : 'challenge1 ; challenge2 : 'challenge2 ; .. > ) t | Bulletproof_challenge : ( 'bp_chal1 , 'bp_chal2 - , < bulletproof_challenge1: 'bp_chal1 - ; bulletproof_challenge2: 'bp_chal2 + , < bulletproof_challenge1 : 'bp_chal1 + ; bulletproof_challenge2 : 'bp_chal2 ; .. > ) t - | Index : ('index1, 'index2, < index1: 'index1 ; index2: 'index2 ; .. >) t + | Index : ('index1, 'index2, < index1 : 'index1 ; index2 : 'index2 ; .. >) t module rec T : sig type (_, _, _) t = | B : ('a, 'b, 'env) Basic.t -> ('a, 'b, 'env) t | Scalar : - ('a, 'b, (< challenge1: 'a ; challenge2: 'b ; .. > as 'env)) Basic.t + ('a, 'b, (< challenge1 : 'a ; challenge2 : 'b ; .. > as 'env)) Basic.t -> ('a Sc.t, 'b Sc.t, 'env) t | Vector : ('t1, 't2, 'env) t * 'n Nat.t @@ -48,10 +48,11 @@ end = include T type ('bool, 'env) pack = - {pack: 'a 'b. ('a, 'b, 'env) Basic.t -> 'b -> 'bool list array} + { pack : 'a 'b. ('a, 'b, 'env) Basic.t -> 'b -> 'bool list array } -let rec pack : type t v env. - ('bool, env) pack -> (t, v, env) T.t -> v -> 'bool list array = +let rec pack : + type t v env. ('bool, env) pack -> (t, v, env) T.t -> v -> 'bool list array + = fun p spec t -> match spec with | B spec -> @@ -71,11 +72,13 @@ let rec pack : type t v env. Array.concat_map t ~f:(pack p spec) type ('f, 'env) typ = - { typ: + { typ : 'var 'value. ('value, 'var, 'env) Basic.t - -> ('var, 'value, 'f) Snarky_backendless.Typ.t } + -> ('var, 'value, 'f) Snarky_backendless.Typ.t + } -let rec typ : type f var value env. +let rec typ : + type f var value env. (f, env) typ -> (value, var, env) T.t -> (var, value, f) Snarky_backendless.Typ.t = @@ -106,7 +109,7 @@ let rec typ : type f var value env. type 'env exists = T : ('t1, 't2, 'env) T.t -> 'env exists -type generic_spec = {spec: 'env. 'env exists} +type generic_spec = { spec : 'env. 'env exists } module ETyp = struct type ('var, 'value, 'f) t = @@ -116,9 +119,12 @@ module ETyp = struct end type ('f, 'env) etyp = - {etyp: 'var 'value. ('value, 'var, 'env) Basic.t -> ('var, 'value, 'f) ETyp.t} + { etyp : + 'var 'value. ('value, 'var, 'env) Basic.t -> ('var, 'value, 'f) ETyp.t + } -let rec etyp : type f var value env. +let rec etyp : + type f var value env. (f, env) etyp -> (value, var, env) T.t -> (var, value, f) ETyp.t = let open Snarky_backendless.Typ in fun e spec -> @@ -138,9 +144,7 @@ let rec etyp : type f var value env. let open Hlist.HlistId in let there [] = () in let back () = [] in - T - ( transport (unit ()) ~there ~back |> transport_var ~there ~back - , Fn.id ) + T (transport (unit ()) ~there ~back |> transport_var ~there ~back, Fn.id) | Struct (spec :: specs) -> let open Hlist.HlistId in let (T (t1, f1)) = etyp e spec in @@ -159,19 +163,19 @@ module Common (Impl : Snarky_backendless.Snark_intf.Run) = struct module Env = struct type ('other_field, 'other_field_var, 'a) t = - < field1: 'other_field - ; field2: 'other_field_var - ; bool1: bool - ; bool2: Boolean.var - ; digest1: Digest.Constant.t - ; digest2: Digest.t - ; challenge1: Challenge.Constant.t - ; challenge2: Challenge.t - ; bulletproof_challenge1: + < field1 : 'other_field + ; field2 : 'other_field_var + ; bool1 : bool + ; bool2 : Boolean.var + ; digest1 : Digest.Constant.t + ; digest2 : Digest.t + ; challenge1 : Challenge.Constant.t + ; challenge2 : Challenge.t + ; bulletproof_challenge1 : Challenge.Constant.t Sc.t Bulletproof_challenge.t - ; bulletproof_challenge2: Challenge.t Sc.t Bulletproof_challenge.t - ; index1: Index.t - ; index2: (Boolean.var, Nat.N8.n) Vector.t + ; bulletproof_challenge2 : Challenge.t Sc.t Bulletproof_challenge.t + ; index1 : Index.t + ; index2 : (Boolean.var, Nat.N8.n) Vector.t ; .. > as 'a @@ -184,7 +188,8 @@ let pack_basic (type field other_field other_field_var) let open Impl in let module C = Common (Impl) in let open C in - let pack : type a b. + let pack : + type a b. (a, b, ((other_field, other_field_var, 'e) Env.t as 'e)) Basic.t -> b -> Boolean.var list array = @@ -193,20 +198,20 @@ let pack_basic (type field other_field other_field_var) | Field -> field x | Bool -> - [|[x]|] + [| [ x ] |] | Digest -> - [|Digest.to_bits x|] + [| Digest.to_bits x |] | Challenge -> - [|Challenge.to_bits x|] + [| Challenge.to_bits x |] | Index -> - [|Vector.to_list x|] + [| Vector.to_list x |] | Bulletproof_challenge -> - let {Bulletproof_challenge.prechallenge= Scalar_challenge pre} = x in - [|Challenge.to_bits pre|] + let { Bulletproof_challenge.prechallenge = Scalar_challenge pre } = x in + [| Challenge.to_bits pre |] | _ -> failwith "unknown basic spec" in - {pack} + { pack } let pack_basic_unboolean (type field other_field other_field_var) (module Impl : Snarky_backendless.Snark_intf.Run with type field = field) @@ -214,7 +219,8 @@ let pack_basic_unboolean (type field other_field other_field_var) let open Impl in let module C = Common (Impl) in let open C in - let pack : type a b. + let pack : + type a b. (a, b, ((other_field, other_field_var, 'e) Env.t as 'e)) Basic.t -> b -> Boolean.var list array = @@ -223,20 +229,20 @@ let pack_basic_unboolean (type field other_field other_field_var) | Field -> field x | Bool -> - [|[x]|] + [| [ x ] |] | Digest -> - [|Digest.Unsafe.to_bits_unboolean x|] + [| Digest.Unsafe.to_bits_unboolean x |] | Challenge -> - [|Challenge.to_bits x|] + [| Challenge.to_bits x |] | Index -> - [|Vector.to_list x|] + [| Vector.to_list x |] | Bulletproof_challenge -> - let {Bulletproof_challenge.prechallenge= Scalar_challenge pre} = x in - [|Challenge.to_bits pre|] + let { Bulletproof_challenge.prechallenge = Scalar_challenge pre } = x in + [| Challenge.to_bits pre |] | _ -> failwith "unknown basic spec" in - {pack} + { pack } let pack impl field t = pack (pack_basic_unboolean impl field) t @@ -247,7 +253,8 @@ let typ_basic (type field other_field other_field_var) let open Impl in let module C = Common (Impl) in let open C in - let typ : type a b. + let typ : + type a b. (a, b, ((other_field, other_field_var, 'e) Env.t as 'e)) Basic.t -> (b, a) Impl.Typ.t = fun basic -> @@ -267,7 +274,7 @@ let typ_basic (type field other_field other_field_var) | _ -> failwith "unknown basic spec" in - {typ} + { typ } let typ ~challenge ~scalar_challenge impl field t = typ (typ_basic ~challenge ~scalar_challenge impl field) t @@ -280,24 +287,25 @@ let packed_typ_basic (type field other_field other_field_var) let module Challenge = Limb_vector.Challenge.Make (Impl) in let module Env = struct type ('other_field, 'other_field_var, 'a) t = - < field1: 'other_field - ; field2: 'other_field_var - ; bool1: bool - ; bool2: Boolean.var - ; digest1: Digest.Constant.t - ; digest2: Field.t - ; challenge1: Challenge.Constant.t - ; challenge2: (* Challenge.t *) Field.t - ; bulletproof_challenge1: + < field1 : 'other_field + ; field2 : 'other_field_var + ; bool1 : bool + ; bool2 : Boolean.var + ; digest1 : Digest.Constant.t + ; digest2 : Field.t + ; challenge1 : Challenge.Constant.t + ; challenge2 : (* Challenge.t *) Field.t + ; bulletproof_challenge1 : Challenge.Constant.t Sc.t Bulletproof_challenge.t - ; bulletproof_challenge2: Field.t Sc.t Bulletproof_challenge.t - ; index1: Index.t - ; index2: Field.t + ; bulletproof_challenge2 : Field.t Sc.t Bulletproof_challenge.t + ; index1 : Index.t + ; index2 : Field.t ; .. > as 'a end in - let etyp : type a b. + let etyp : + type a b. (a, b, ((other_field, other_field_var, 'e) Env.t as 'e)) Basic.t -> (b, a, field) ETyp.t = function | Field -> @@ -313,11 +321,11 @@ let packed_typ_basic (type field other_field other_field_var) | Bulletproof_challenge -> let typ = let there - {Bulletproof_challenge.prechallenge= Sc.Scalar_challenge pre} = + { Bulletproof_challenge.prechallenge = Sc.Scalar_challenge pre } = pre in let back pre = - {Bulletproof_challenge.prechallenge= Sc.Scalar_challenge pre} + { Bulletproof_challenge.prechallenge = Sc.Scalar_challenge pre } in Typ.transport Challenge.packed_typ ~there ~back |> Typ.transport_var ~there ~back @@ -326,6 +334,6 @@ let packed_typ_basic (type field other_field other_field_var) | _ -> failwith "etyp: unhandled variant" in - {etyp} + { etyp } let packed_typ impl field t = etyp (packed_typ_basic impl field) t diff --git a/src/lib/pickles/dirty.ml b/src/lib/pickles/dirty.ml index 4bd7d1cb750..00eb0ff3081 100644 --- a/src/lib/pickles/dirty.ml +++ b/src/lib/pickles/dirty.ml @@ -1,4 +1,4 @@ -type t = [`Cache_hit | `Generated_something | `Locally_generated] +type t = [ `Cache_hit | `Generated_something | `Locally_generated ] let ( + ) x y = match (x, y) with diff --git a/src/lib/pickles/dlog_main.ml b/src/lib/pickles/dlog_main.ml index 2b322528846..7843980963d 100644 --- a/src/lib/pickles/dlog_main.ml +++ b/src/lib/pickles/dlog_main.ml @@ -32,12 +32,12 @@ let b_poly ~one ~add ~mul chals = done ; !r in - prod (fun i -> one + (chals.(i) * pow_two_pows.(k - 1 - i))) ) + prod (fun i -> one + (chals.(i) * pow_two_pows.(k - 1 - i)))) module Make (Inputs : Inputs - with type Impl.field = Tock.Field.t - and type Inner_curve.Constant.Scalar.t = Tick.Field.t) = + with type Impl.field = Tock.Field.t + and type Inner_curve.Constant.Scalar.t = Tick.Field.t) = struct open Inputs open Impl @@ -76,9 +76,10 @@ struct Bitstring_checked.lt_value (Msb_first.of_list (List.rev xs_lsb)) (Msb_first.of_list p_msb) - |> Boolean.Assert.is_true ) ] + |> Boolean.Assert.is_true) + ] in - {typ with check} + { typ with check } let assert_equal t1 t2 = Field.(Assert.equal (project t1) (project t2)) end @@ -99,9 +100,8 @@ struct if debug then Array.iteri gs ~f:(fun i (fin, g) -> as_prover - As_prover.( - fun () -> Core.printf "fin=%b %!" (read Boolean.typ fin)) ; - ksprintf print_g "%s[%d]" lab i g ) + As_prover.(fun () -> Core.printf "fin=%b %!" (read Boolean.typ fin)) ; + ksprintf print_g "%s[%d]" lab i g) let print_chal lab x = if debug then @@ -116,7 +116,7 @@ struct let print_bool lab x = if debug then as_prover (fun () -> - printf "%s: %b\n%!" lab (As_prover.read Boolean.typ x) ) + printf "%s: %b\n%!" lab (As_prover.read Boolean.typ x)) module Challenge = Challenge.Make (Impl) module Digest = Digest.Make (Impl) @@ -143,13 +143,13 @@ struct let prechallenges = Array.map gammas ~f:(fun gammas_i -> absorb (PC :: PC) gammas_i ; - squeeze_scalar sponge ) + squeeze_scalar sponge) in let term_and_challenge (l, r) (pre, pre_packed) = let left_term = Scalar_challenge.endo_inv l pre in let right_term = Scalar_challenge.endo r pre in ( Ops.add_fast left_term right_term - , {Bulletproof_challenge.prechallenge= pre_packed} ) + , { Bulletproof_challenge.prechallenge = pre_packed } ) in let terms, challenges = Array.map2_exn gammas prechallenges ~f:term_and_challenge |> Array.unzip @@ -188,10 +188,12 @@ struct ; mul2_comm ; emul1_comm ; emul2_comm - ; emul3_comm } + ; emul3_comm + } (* Mask out the given vector of indices with the given one-hot vector *) - let choose_key : type n. + let choose_key : + type n. n One_hot_vector.t -> (Inner_curve.t index', n) Vector.t -> Inner_curve.t index' = @@ -208,27 +210,29 @@ struct |> map ~f:(fun g -> Double.map ~f:(Util.seal (module Impl)) g) let lagrange (type n) - ~domain:( (which_branch : n One_hot_vector.t) - , (domains : (Domains.t, n) Vector.t) ) i = + ~domain: + ( (which_branch : n One_hot_vector.t) + , (domains : (Domains.t, n) Vector.t) ) i = Vector.map domains ~f:(fun d -> let d = Precomputed.Lagrange_precomputations.index_of_domain_log2 (Domain.log2_size d.h) in match Precomputed.Lagrange_precomputations.vesta.(d).(i) with - | [|g|] -> + | [| g |] -> let g = Inner_curve.Constant.of_affine g in Inner_curve.constant g | _ -> - assert false ) + assert false) |> Vector.map2 (which_branch :> (Boolean.var, n) Vector.t) ~f:(fun b (x, y) -> Field.((b :> t) * x, (b :> t) * y)) |> Vector.reduce_exn ~f:(Double.map2 ~f:Field.( + )) let lagrange_with_correction (type n) ~input_length - ~domain:( (which_branch : n One_hot_vector.t) - , (domains : (Domains.t, n) Vector.t) ) i = + ~domain: + ( (which_branch : n One_hot_vector.t) + , (domains : (Domains.t, n) Vector.t) ) i = let rec pow2pow x i = if i = 0 then x else pow2pow Inner_curve.Constant.(x + x) (i - 1) in @@ -238,19 +242,18 @@ struct (Domain.log2_size d.h) in match Precomputed.Lagrange_precomputations.vesta.(d).(i) with - | [|g|] -> + | [| g |] -> let g = Inner_curve.Constant.of_affine g in ( Inner_curve.constant g , Inner_curve.constant (Inner_curve.Constant.negate (pow2pow g input_length)) ) | xs -> failwithf "expected commitment to have length 1. got %d" - (Array.length xs) () ) + (Array.length xs) ()) |> Vector.map2 (which_branch :> (Boolean.var, n) Vector.t) ~f:(fun b pr -> - Double.map pr ~f:(fun (x, y) -> Field.((b :> t) * x, (b :> t) * y)) - ) + Double.map pr ~f:(fun (x, y) -> Field.((b :> t) * x, (b :> t) * y))) |> Vector.reduce_exn ~f:(Double.map2 ~f:(Double.map2 ~f:Field.( + ))) let h_precomp = @@ -265,7 +268,7 @@ struct let params = Group_map.Bw19.Params.create (module Field.Constant) - {b= Inner_curve.Params.b} + { b = Inner_curve.Params.b } end) in let open M in @@ -276,7 +279,7 @@ struct Field.( (x * x * x) + (constant Inner_curve.Params.a * x) - + constant Inner_curve.Params.b) ) + + constant Inner_curve.Params.b)) |> unstage) in fun x -> Lazy.force f x @@ -304,40 +307,38 @@ struct end module Curve_opt = struct - type t = {point: Inner_curve.t; non_zero: Boolean.var} + type t = { point : Inner_curve.t; non_zero : Boolean.var } end let combine batch ~xi without_bound with_bound = - let {Curve_opt.non_zero; point} = + let { Curve_opt.non_zero; point } = Pcs_batch.combine_split_commitments batch ~scale_and_add:(fun ~(acc : Curve_opt.t) ~xi (keep, (p : Point.t)) -> (* match acc.non_zero, keep with - | false, false -> acc - | true, false -> acc - | false, true -> { point= p; non_zero= true } - | true, true -> { point= p + xi * acc; non_zero= true } + | false, false -> acc + | true, false -> acc + | false, true -> { point= p; non_zero= true } + | true, true -> { point= p + xi * acc; non_zero= true } *) let point = Inner_curve.( if_ keep ~then_: (if_ acc.non_zero - ~then_: - (Point.add p (Scalar_challenge.endo acc.point xi)) + ~then_:(Point.add p (Scalar_challenge.endo acc.point xi)) ~else_: ((* In this branch, the accumulator was zero, so there is no harm in - putting the potentially junk underlying point here. *) + putting the potentially junk underlying point here. *) Point.underlying p)) ~else_:acc.point) in - let non_zero = - Boolean.(keep &&& Point.finite p ||| acc.non_zero) - in - {Curve_opt.non_zero; point} ) + let non_zero = Boolean.(keep &&& Point.finite p ||| acc.non_zero) in + { Curve_opt.non_zero; point }) ~xi ~init:(fun (keep, p) -> - { non_zero= Boolean.(keep &&& Point.finite p) - ; point= Point.underlying p } ) + { non_zero = Boolean.(keep &&& Point.finite p) + ; point = Point.underlying p + }) without_bound with_bound in Boolean.Assert.is_true non_zero ; @@ -349,23 +350,23 @@ struct let check_bulletproof ~pcs_batch ~sponge ~xi ~(combined_inner_product : Other_field.Packed.t Shifted_value.t) - ~ - (* Corresponds to y in figure 7 of WTS *) - (* sum_i r^i sum_j xi^j f_j(beta_i) *) + ~(* Corresponds to y in figure 7 of WTS *) + (* sum_i r^i sum_j xi^j f_j(beta_i) *) (advice : _ Types.Pairing_based.Openings.Bulletproof.Advice.t) ~polynomials:(without_degree_bound, with_degree_bound) - ~openings_proof:({lr; delta; z_1; z_2; sg} : - ( Inner_curve.t - , Other_field.Packed.t Shifted_value.t ) - Openings.Bulletproof.t) = + ~openings_proof: + ({ lr; delta; z_1; z_2; sg } : + ( Inner_curve.t + , Other_field.Packed.t Shifted_value.t ) + Openings.Bulletproof.t) = let scale_fast p s = scale_fast p (Shifted_value.map ~f:Other_field.Packed.to_bits_unsafe s) in with_label __LOC__ (fun () -> Other_field.Packed.absorb_shifted sponge combined_inner_product ; (* a_hat should be equal to - sum_i < t, r^i pows(beta_i) > - = sum_i r^i < t, pows(beta_i) > *) + sum_i < t, r^i pows(beta_i) > + = sum_i r^i < t, pows(beta_i) > *) let u = let t = Sponge.squeeze_field sponge in group_map t @@ -396,10 +397,11 @@ struct in z_1_g_plus_b_u + z2_h in - (`Success (equal_g lhs rhs), challenges) ) + (`Success (equal_g lhs rhs), challenges)) module Opt = - S.Bit_sponge.Make (struct + S.Bit_sponge.Make + (struct type t = Boolean.var end) (struct @@ -418,10 +420,10 @@ struct let absorb sponge ty t = Util.absorb ~absorb_field:(Opt.absorb sponge) - ~g1_to_field_elements:(fun (b, (x, y)) -> [(b, x); (b, y)]) + ~g1_to_field_elements:(fun (b, (x, y)) -> [ (b, x); (b, y) ]) ~absorb_scalar:(fun x -> Opt.absorb sponge (Boolean.true_, x)) ~mask_g1_opt:(fun (keep, ((finite : Boolean.var), (x, y))) -> - (keep, Field.((finite :> t) * x, (finite :> t) * y)) ) + (keep, Field.((finite :> t) * x, (finite :> t) * y))) ty t module Pseudo = Pseudo.Make (Impl) @@ -439,23 +441,32 @@ struct let mask_messages (type n) ~(lengths : (int, n) Vector.t Evals.t) (choice : n One_hot_vector.t) (m : _ Messages.t) = let f lens = Array.zip_exn (mask lens choice) in - let g lg {Poly_comm.With_degree_bound.shifted; unshifted} = - { Poly_comm.With_degree_bound.shifted= (Boolean.true_, shifted) - ; unshifted= f lg unshifted } + let g lg { Poly_comm.With_degree_bound.shifted; unshifted } = + { Poly_comm.With_degree_bound.shifted = (Boolean.true_, shifted) + ; unshifted = f lg unshifted + } in - { Messages.l_comm= f lengths.l m.l_comm - ; r_comm= f lengths.r m.r_comm - ; o_comm= f lengths.o m.o_comm - ; z_comm= f lengths.z m.z_comm - ; t_comm= g lengths.t m.t_comm } + { Messages.l_comm = f lengths.l m.l_comm + ; r_comm = f lengths.r m.r_comm + ; o_comm = f lengths.o m.o_comm + ; z_comm = f lengths.z m.z_comm + ; t_comm = g lengths.t m.t_comm + } module Plonk = Types.Dlog_based.Proof_state.Deferred_values.Plonk (* Just for exhaustiveness over fields *) let iter2 ~chal ~scalar_chal - {Plonk.Minimal.alpha= alpha_0; beta= beta_0; gamma= gamma_0; zeta= zeta_0} - {Plonk.Minimal.alpha= alpha_1; beta= beta_1; gamma= gamma_1; zeta= zeta_1} - = + { Plonk.Minimal.alpha = alpha_0 + ; beta = beta_0 + ; gamma = gamma_0 + ; zeta = zeta_0 + } + { Plonk.Minimal.alpha = alpha_1 + ; beta = beta_1 + ; gamma = gamma_1 + ; zeta = zeta_1 + } = scalar_chal alpha_0 alpha_1 ; chal beta_0 beta_1 ; chal gamma_0 gamma_1 ; @@ -469,12 +480,12 @@ struct ~scalar_chal: (fun (Scalar_challenge t1 : _ Pickles_types.Scalar_challenge.t) (Scalar_challenge t2 : Scalar_challenge.t) -> - Field.Assert.equal t1 (Field.project t2) ) + Field.Assert.equal t1 (Field.project t2)) let incrementally_verify_proof (type b) (module Max_branching : Nat.Add.Intf with type n = b) ~step_widths - ~step_domains ~verification_key:(m : _ Plonk_verification_key_evals.t) - ~xi ~sponge ~public_input ~(sg_old : (_, Max_branching.n) Vector.t) + ~step_domains ~verification_key:(m : _ Plonk_verification_key_evals.t) ~xi + ~sponge ~public_input ~(sg_old : (_, Max_branching.n) Vector.t) ~(combined_inner_product : _ Shifted_value.t) ~advice ~(messages : (_, Boolean.var * _) Messages.t) ~which_branch ~openings_proof @@ -489,7 +500,7 @@ struct Commitment_lengths.generic map ~h:(f Domains.h) ~max_degree:Common.Max_degree.step in - mask_messages ~lengths which_branch messages ) + mask_messages ~lengths which_branch messages) in let sg_old = with_label __LOC__ (fun () -> @@ -499,13 +510,13 @@ struct Vector.map2 (ones_vector (module Impl) ~first_zero:actual_width Max_branching.n) sg_old - ~f:(fun keep sg -> [|(keep, sg)|]) ) + ~f:(fun keep sg -> [| (keep, sg) |])) in with_label __LOC__ (fun () -> let receive ty f = with_label __LOC__ (fun () -> let x = f messages in - absorb sponge ty x ; x ) + absorb sponge ty x ; x) in let sample () = let xs = Opt.squeeze sponge ~length:Challenge.length in @@ -523,14 +534,14 @@ struct let terms = Array.mapi public_input ~f:(fun i x -> match Array.of_list x with - | [|(b : Boolean.var)|] -> + | [| (b : Boolean.var) |] -> assert_ (Constraint.boolean (b :> Field.t)) ; `Cond_add (b, lagrange ~domain i) | x -> `Add_with_correction ( x , lagrange_with_correction - ~input_length:(Array.length x) ~domain i ) ) + ~input_length:(Array.length x) ~domain i )) in let correction = with_label __LOC__ (fun () -> @@ -539,16 +550,15 @@ struct | `Cond_add _ -> None | `Add_with_correction (_, (_, corr)) -> - Some corr )) - ~f:Ops.add_fast ) + Some corr)) + ~f:Ops.add_fast) in Array.fold terms ~init:correction ~f:(fun acc term -> match term with | `Cond_add (b, g) -> Inner_curve.if_ b ~then_:(Ops.add_fast g acc) ~else_:acc | `Add_with_correction (x, (g, _)) -> - Ops.add_fast acc (Ops.scale_fast g (`Plus_two_to_len x)) - ) ) + Ops.add_fast acc (Ops.scale_fast g (`Plus_two_to_len x)))) |> Inner_curve.negate in let without = Type.Without_degree_bound in @@ -567,27 +577,28 @@ struct (module Impl) (match zeta with Scalar_challenge x -> x) ; (* At this point, we should use the previous "bulletproof_challenges" to - compute to compute f(beta_1) outside the snark - where f is the polynomial corresponding to sg_old - *) + compute to compute f(beta_1) outside the snark + where f is the polynomial corresponding to sg_old + *) let sponge = match S.Bit_sponge.underlying sponge with - | {state; sponge_state; params} -> ( - match sponge_state with - | Squeezed n -> - S.make ~state ~sponge_state:(Squeezed n) ~params - | _ -> - assert false ) + | { state; sponge_state; params } -> ( + match sponge_state with + | Squeezed n -> + S.make ~state ~sponge_state:(Squeezed n) ~params + | _ -> + assert false ) in let sponge_before_evaluations = Sponge.copy sponge in let sponge_digest_before_evaluations = Sponge.squeeze_field sponge in + (* xi, r are sampled here using the other sponge. *) (* No need to expose the polynomial evaluations as deferred values as they're - not needed here for the incremental verification. All we need is a_hat and - "combined_inner_product". + not needed here for the incremental verification. All we need is a_hat and + "combined_inner_product". - Then, in the other proof, we can witness the evaluations and check their correctness - against "combined_inner_product" *) + Then, in the other proof, we can witness the evaluations and check their correctness + against "combined_inner_product" *) let f_comm = let ( + ) = Ops.add_fast in let ( * ) = Fn.flip scale_fast in @@ -617,33 +628,35 @@ struct ; plonk.vbmul1 * m.mul2_comm ; plonk.endomul0 * m.emul1_comm ; plonk.endomul1 * m.emul2_comm - ; plonk.endomul2 * m.emul3_comm ] + ; plonk.endomul2 * m.emul3_comm + ] in let res = Array.map z_comm ~f:(fun (b, x) -> (b, plonk.perm0 * x)) in - res.(0) - <- (let b, r = res.(0) in - (Boolean.true_, Inner_curve.if_ b ~then_:(r + g) ~else_:g)) ; + res.(0) <- + (let b, r = res.(0) in + (Boolean.true_, Inner_curve.if_ b ~then_:(r + g) ~else_:g)) ; res in let bulletproof_challenges = (* This sponge needs to be initialized with (some derivative of) - 1. The polynomial commitments - 2. The combined inner product - 3. The challenge points. + 1. The polynomial commitments + 2. The combined inner product + 3. The challenge points. - It should be sufficient to fork the sponge after squeezing beta_3 and then to absorb - the combined inner product. - *) + It should be sufficient to fork the sponge after squeezing beta_3 and then to absorb + the combined inner product. + *) let without_degree_bound = Vector.append sg_old - [ [|(Boolean.true_, x_hat)|] + [ [| (Boolean.true_, x_hat) |] ; l_comm ; r_comm ; o_comm ; z_comm ; f_comm - ; [|(Boolean.true_, m.sigma_comm_0)|] - ; [|(Boolean.true_, m.sigma_comm_1)|] ] + ; [| (Boolean.true_, m.sigma_comm_0) |] + ; [| (Boolean.true_, m.sigma_comm_1) |] + ] (snd (Max_branching.add Nat.N8.n)) in check_bulletproof @@ -658,22 +671,24 @@ struct ~f:(Array.map ~f:(fun (keep, x) -> (keep, `Finite x))) , [ t_comm |> Dlog_plonk_types.Poly_comm.With_degree_bound.map - ~f:(fun (keep, x) -> (keep, `Maybe_finite x)) ] ) + ~f:(fun (keep, x) -> (keep, `Maybe_finite x)) + ] ) in assert_eq_marlin - { alpha= plonk.alpha - ; beta= plonk.beta - ; gamma= plonk.gamma - ; zeta= plonk.zeta } - {alpha; beta; gamma; zeta} ; - (sponge_digest_before_evaluations, bulletproof_challenges) ) + { alpha = plonk.alpha + ; beta = plonk.beta + ; gamma = plonk.gamma + ; zeta = plonk.zeta + } + { alpha; beta; gamma; zeta } ; + (sponge_digest_before_evaluations, bulletproof_challenges)) module Split_evaluations = struct let combine_split_evaluations' s = Pcs_batch.combine_split_evaluations s ~mul:(fun (keep, x) (y : Field.t) -> (keep, Field.(y * x))) ~mul_and_add:(fun ~acc ~xi (keep, fx) -> - Field.if_ keep ~then_:Field.(fx + (xi * acc)) ~else_:acc ) + Field.if_ keep ~then_:Field.(fx + (xi * acc)) ~else_:acc) ~init:(fun (_, fx) -> fx) ~shifted_pow: (Pseudo.Degree_bound.shifted_pow @@ -684,7 +699,7 @@ struct (choice : n One_hot_vector.t) (e : Field.t array Evals.t) : (Boolean.var * Field.t) array Evals.t = Evals.map2 lengths e ~f:(fun lengths e -> - Array.zip_exn (mask lengths choice) e ) + Array.zip_exn (mask lengths choice) e) let combined_evaluation (type b b_plus_8) b_plus_8 ~xi ~evaluation_point ((without_degree_bound : (_, b_plus_8) Vector.t), with_degree_bound) @@ -699,8 +714,8 @@ struct without_degree_bound with_degree_bound let compute_challenges ~scalar chals = - Vector.map chals ~f:(fun {Bulletproof_challenge.prechallenge} -> - scalar prechallenge ) + Vector.map chals ~f:(fun { Bulletproof_challenge.prechallenge } -> + scalar prechallenge) let b_poly = Field.(b_poly ~add ~mul ~one) @@ -715,7 +730,7 @@ struct let rec go acc i = if i = 0 then acc else go (Field.square acc) (i - 1) in - go pt max_degree_log2 ) + go pt max_degree_log2) in with_label __LOC__ (fun () -> match List.rev (Array.to_list e) with @@ -723,7 +738,7 @@ struct List.fold ~init:e es ~f:(fun acc y -> let acc' = exists Field.typ ~compute:(fun () -> - As_prover.read_var Field.(y + (pt_n * acc)) ) + As_prover.read_var Field.(y + (pt_n * acc))) in (* acc' = y + pt_n * acc *) let pt_n_acc = Field.(pt_n * acc) in @@ -732,40 +747,42 @@ struct (* 0 = - acc' + y + pt_n_acc *) let open Field.Constant in assert_ - [ { annotation= None - ; basic= + [ { annotation = None + ; basic = T (Basic - { l= (one, y) - ; r= (one, pt_n_acc) - ; o= (negate one, acc') - ; m= zero - ; c= zero }) } ] ; - acc' ) + { l = (one, y) + ; r = (one, pt_n_acc) + ; o = (negate one, acc') + ; m = zero + ; c = zero + }) + } + ] ; + acc') | [] -> - failwith "empty list" ) + failwith "empty list") let shift = - Shifted_value.Shift.( - map ~f:Field.constant (create (module Field.Constant))) + Shifted_value.Shift.(map ~f:Field.constant (create (module Field.Constant))) let%test_unit "endo scalar" = SC.test (module Impl) ~endo:Endo.Step_inner_curve.scalar (* This finalizes the "deferred values" coming from a previous proof over the same field. - It - 1. Checks that [xi] and [r] where sampled correctly. I.e., by absorbing all the - evaluation openings and then squeezing. - 2. Checks that the "combined inner product" value used in the elliptic curve part of - the opening proof was computed correctly, in terms of the evaluation openings and the - evaluation points. - 3. Check that the "b" value was computed correctly. - 4. Perform the arithmetic checks from marlin. *) + It + 1. Checks that [xi] and [r] where sampled correctly. I.e., by absorbing all the + evaluation openings and then squeezing. + 2. Checks that the "combined inner product" value used in the elliptic curve part of + the opening proof was computed correctly, in terms of the evaluation openings and the + evaluation points. + 3. Check that the "b" value was computed correctly. + 4. Perform the arithmetic checks from marlin. *) let finalize_other_proof (type b) (module Branching : Nat.Add.Intf with type n = b) ?actual_branching ~domain ~max_quot_size ~sponge ~(old_bulletproof_challenges : (_, b) Vector.t) - ({xi; combined_inner_product; bulletproof_challenges; b; plonk} : + ({ xi; combined_inner_product; bulletproof_challenges; b; plonk } : ( _ , _ , _ Shifted_value.t @@ -780,8 +797,8 @@ struct with_label __LOC__ (fun () -> let xs, ys = Evals.to_vectors e in List.iter - Vector.([|x_hat|] :: (to_list xs @ to_list ys)) - ~f:(Array.iter ~f:(Sponge.absorb sponge)) ) + Vector.([| x_hat |] :: (to_list xs @ to_list ys)) + ~f:(Array.iter ~f:(Sponge.absorb sponge))) in (* A lot of hashing. *) absorb_evals x_hat1 evals1 ; @@ -790,14 +807,14 @@ struct with_label __LOC__ (fun () -> let x, x_packed = Sponge.squeeze sponge ~length:Challenge.length in Util.boolean_constrain (module Impl) x ; - (x, x_packed) ) + (x, x_packed)) in let _, xi_actual = squeeze () in let r_actual, _ = squeeze () in let xi_correct = with_label __LOC__ (fun () -> (* Sample new sg challenge point here *) - Field.equal xi_actual (pack_scalar_challenge xi) ) + Field.equal xi_actual (pack_scalar_challenge xi)) in let scalar = SC.to_field_checked (module Impl) ~endo:Endo.Step_inner_curve.scalar @@ -810,7 +827,7 @@ struct ~scalar plonk |> Types.Pairing_based.Proof_state.Deferred_values.Plonk.In_circuit .map_fields - ~f:(Shifted_value.map ~f:(Util.seal (module Impl))) ) + ~f:(Shifted_value.map ~f:(Util.seal (module Impl)))) in let xi = scalar xi in (* TODO: r actually does not need to be a scalar challenge. *) @@ -822,7 +839,7 @@ struct let actual_combined_inner_product = let sg_olds = Vector.map old_bulletproof_challenges ~f:(fun chals -> - unstage (b_poly (Vector.to_array chals)) ) + unstage (b_poly (Vector.to_array chals))) in let combine pt x_hat e = let pi = Branching.add Nat.N8.n in @@ -830,7 +847,7 @@ struct let sg_evals = match actual_branching with | None -> - Vector.map sg_olds ~f:(fun f -> [|f pt|]) + Vector.map sg_olds ~f:(fun f -> [| f pt |]) | Some branching -> let mask = ones_vector @@ -838,9 +855,9 @@ struct ~first_zero:branching (Vector.length sg_olds) in Vector.map2 mask sg_olds ~f:(fun b f -> - [|Field.((b :> t) * f pt)|] ) + [| Field.((b :> t) * f pt) |]) in - let v = Vector.append sg_evals ([|x_hat|] :: a) (snd pi) in + let v = Vector.append sg_evals ([| x_hat |] :: a) (snd pi) in combined_evaluation pi ~xi ~evaluation_point:pt (v, b) ~max_quot_size in @@ -850,11 +867,11 @@ struct (Shifted_value.to_field (module Field) ~shift combined_inner_product) - actual_combined_inner_product ) + actual_combined_inner_product) in let bulletproof_challenges = with_label __LOC__ (fun () -> - compute_challenges ~scalar bulletproof_challenges ) + compute_challenges ~scalar bulletproof_challenges) in let b_correct = with_label __LOC__ (fun () -> @@ -862,7 +879,7 @@ struct unstage (b_poly (Vector.to_array bulletproof_challenges)) in let b_actual = b_poly plonk.zeta + (r * b_poly zetaw) in - equal (Shifted_value.to_field (module Field) ~shift b) b_actual ) + equal (Shifted_value.to_field (module Field) ~shift b) b_actual) in let plonk_checks_passed = with_label __LOC__ (fun () -> @@ -873,7 +890,7 @@ struct ~domain ~shift plonk ~mds:sponge_params.mds ( Dlog_plonk_types.Evals.map ~f:(e plonk.zeta) evals1 , Dlog_plonk_types.Evals.map ~f:(e zetaw) evals2 ) - x_hat1 ) + x_hat1) in print_bool "xi_correct" xi_correct ; print_bool "combined_inner_product_correct" combined_inner_product_correct ; @@ -883,7 +900,8 @@ struct [ xi_correct ; b_correct ; combined_inner_product_correct - ; plonk_checks_passed ] + ; plonk_checks_passed + ] , bulletproof_challenges ) let map_challenges @@ -891,22 +909,24 @@ struct ; combined_inner_product ; xi ; bulletproof_challenges - ; b } ~f ~scalar = - { Types.Pairing_based.Proof_state.Deferred_values.plonk= + ; b + } ~f ~scalar = + { Types.Pairing_based.Proof_state.Deferred_values.plonk = Types.Pairing_based.Proof_state.Deferred_values.Plonk.In_circuit .map_challenges plonk ~f ~scalar ; combined_inner_product - ; bulletproof_challenges= + ; bulletproof_challenges = Vector.map bulletproof_challenges ~f:(fun (r : _ Bulletproof_challenge.t) -> - {Bulletproof_challenge.prechallenge= scalar r.prechallenge} ) - ; xi= scalar xi - ; b } + { Bulletproof_challenge.prechallenge = scalar r.prechallenge }) + ; xi = scalar xi + ; b + } (* TODO: No need to hash the entire bulletproof challenges. Could - just hash the segment of the public input LDE corresponding to them - that we compute when verifying the previous proof. That is a commitment - to them. *) + just hash the segment of the public input LDE corresponding to them + that we compute when verifying the previous proof. That is a commitment + to them. *) let hash_me_only (type n) (_max_branching : n Nat.t) (t : (_, (_, n) Vector.t) Types.Dlog_based.Proof_state.Me_only.t) = diff --git a/src/lib/pickles/dummy.ml b/src/lib/pickles/dummy.ml index 4996cf671aa..d97b84d0652 100644 --- a/src/lib/pickles/dummy.ml +++ b/src/lib/pickles/dummy.ml @@ -18,18 +18,18 @@ let evals = let evals_combined = Tuple_lib.Double.map evals ~f:(fun (e, _x) -> Dlog_plonk_types.Evals.map e - ~f:(Array.reduce_exn ~f:Backend.Tock.Field.( + )) ) + ~f:(Array.reduce_exn ~f:Backend.Tock.Field.( + ))) module Ipa = struct module Wrap = struct let challenges = Vector.init Tock.Rounds.n ~f:(fun _ -> let prechallenge = Ro.scalar_chal () in - {Bulletproof_challenge.prechallenge} ) + { Bulletproof_challenge.prechallenge }) let challenges_computed = - Vector.map challenges ~f:(fun {prechallenge} -> - (Ipa.Wrap.compute_challenge prechallenge : Tock.Field.t) ) + Vector.map challenges ~f:(fun { prechallenge } : Tock.Field.t -> + Ipa.Wrap.compute_challenge prechallenge) let sg = lazy @@ -40,11 +40,11 @@ module Ipa = struct let challenges = Vector.init Tick.Rounds.n ~f:(fun _ -> let prechallenge = Ro.scalar_chal () in - {Bulletproof_challenge.prechallenge} ) + { Bulletproof_challenge.prechallenge }) let challenges_computed = - Vector.map challenges ~f:(fun {prechallenge} -> - (Ipa.Step.compute_challenge prechallenge : Tick.Field.t) ) + Vector.map challenges ~f:(fun { prechallenge } : Tick.Field.t -> + Ipa.Step.compute_challenge prechallenge) let sg = lazy diff --git a/src/lib/pickles/fix_domains.ml b/src/lib/pickles/fix_domains.ml index 029d0fdb4eb..aefe43e2d50 100644 --- a/src/lib/pickles/fix_domains.ml +++ b/src/lib/pickles/fix_domains.ml @@ -9,12 +9,13 @@ let domains (sys : _ Zexe_backend_common.Plonk_constraint_system.t) : Domains.t Zexe_backend_common.Plonk_constraint_system.zk_rows + public_input_size + List.length sys.rows_rev in - { h= Pow_2_roots_of_unity Int.(ceil_log2 rows) - ; x= Pow_2_roots_of_unity (Int.ceil_log2 public_input_size) } + { h = Pow_2_roots_of_unity Int.(ceil_log2 rows) + ; x = Pow_2_roots_of_unity (Int.ceil_log2 public_input_size) + } let rough_domains : Domains.t = let d = Domain.Pow_2_roots_of_unity 20 in - {h= d; x= Pow_2_roots_of_unity 6} + { h = d; x = Pow_2_roots_of_unity 6 } let domains (type field a) (module Impl : Snarky_backendless.Snark_intf.Run @@ -25,4 +26,4 @@ let domains (type field a) .Plonk_constraint_system .t) (Spec.ETyp.T (typ, conv)) main = let main x () : unit = main (conv x) in - domains (Impl.constraint_system ~exposing:[typ] main) + domains (Impl.constraint_system ~exposing:[ typ ] main) diff --git a/src/lib/pickles/full_signature.ml b/src/lib/pickles/full_signature.ml index a879f65abbf..9eea229033b 100644 --- a/src/lib/pickles/full_signature.ml +++ b/src/lib/pickles/full_signature.ml @@ -1,7 +1,7 @@ open Pickles_types type ('max_width, 'branches, 'maxes) t = - { padded: ((int, 'branches) Vector.t, 'max_width) Vector.t - ; maxes: + { padded : ((int, 'branches) Vector.t, 'max_width) Vector.t + ; maxes : (module Hlist.Maxes.S with type length = 'max_width and type ns = 'maxes) } diff --git a/src/lib/pickles/impls.ml b/src/lib/pickles/impls.ml index 88ea2710dda..344cbe9428d 100644 --- a/src/lib/pickles/impls.ml +++ b/src/lib/pickles/impls.ml @@ -18,7 +18,7 @@ let forbidden_shifted_values ~modulus:r ~size_in_bits ~f = |> take_while ~f:fits_in_n_bits |> to_list in - List.concat_map [neg_two_to_n; B.(neg_two_to_n - one)] ~f:representatives + List.concat_map [ neg_two_to_n; B.(neg_two_to_n - one) ] ~f:representatives |> List.dedup_and_sort ~compare:B.compare |> List.map ~f @@ -40,7 +40,7 @@ module Step = struct ~modulus:(Wrap_impl.Bigint.to_bignum_bigint Constant.size) ~f:(fun x -> let hi = test_bit x (Field.size_in_bits - 1) in let lo = B.shift_right x 1 in - (Impl.Bigint.(to_field (of_bignum_bigint lo)), hi) ) + (Impl.Bigint.(to_field (of_bignum_bigint lo)), hi)) let (typ_unchecked : (t, Constant.t) Typ.t), check = let t0 = @@ -48,10 +48,10 @@ module Step = struct (Typ.tuple2 Field.typ Boolean.typ) ~there:(fun x -> let low, high = Util.split_last (Tock.Field.to_bits x) in - (Field.Constant.project low, high) ) + (Field.Constant.project low, high)) ~back:(fun (low, high) -> let low, _ = Util.split_last (Field.Constant.unpack low) in - Tock.Field.of_bits (low @ [high]) ) + Tock.Field.of_bits (low @ [ high ])) in let check t = let open Internal_Basic in @@ -67,9 +67,9 @@ module Step = struct in (t0, check) - let typ = {typ_unchecked with check} + let typ = { typ_unchecked with check } - let to_bits (x, b) = Field.unpack x ~length:(Field.size_in_bits - 1) @ [b] + let to_bits (x, b) = Field.unpack x ~length:(Field.size_in_bits - 1) @ [ b ] end module Digest = Digest.Make (Impl) @@ -109,7 +109,7 @@ module Wrap = struct let forbidden_shifted_values = forbidden_shifted_values ~size_in_bits:Constant.size_in_bits ~modulus:(Step.Impl.Bigint.to_bignum_bigint Constant.size) ~f:(fun x -> - Impl.Bigint.(to_field (of_bignum_bigint x)) ) + Impl.Bigint.(to_field (of_bignum_bigint x))) let typ_unchecked, check = let t0 = @@ -127,7 +127,7 @@ module Wrap = struct in (t0, check) - let typ = {typ_unchecked with check} + let typ = { typ_unchecked with check } let to_bits x = Field.unpack x ~length:Field.size_in_bits end diff --git a/src/lib/pickles/inductive_rule.ml b/src/lib/pickles/inductive_rule.ml index b1c0688e35b..e52f8e78632 100644 --- a/src/lib/pickles/inductive_rule.ml +++ b/src/lib/pickles/inductive_rule.ml @@ -13,11 +13,12 @@ end allow predecessor proofs to conditionally fail to verify *) type ('prev_vars, 'prev_values, 'widths, 'heights, 'a_var, 'a_value) t = - { identifier: string - ; prevs: ('prev_vars, 'prev_values, 'widths, 'heights) H4.T(Tag).t - ; main: 'prev_vars H1.T(Id).t -> 'a_var -> 'prev_vars H1.T(E01(B)).t - ; main_value: - 'prev_values H1.T(Id).t -> 'a_value -> 'prev_vars H1.T(E01(Bool)).t } + { identifier : string + ; prevs : ('prev_vars, 'prev_values, 'widths, 'heights) H4.T(Tag).t + ; main : 'prev_vars H1.T(Id).t -> 'a_var -> 'prev_vars H1.T(E01(B)).t + ; main_value : + 'prev_values H1.T(Id).t -> 'a_value -> 'prev_vars H1.T(E01(Bool)).t + } module T (Statement : T0) (Statement_value : T0) = struct type nonrec ('prev_vars, 'prev_values, 'widths, 'heights) t = diff --git a/src/lib/pickles/intf.ml b/src/lib/pickles/intf.ml index 1c8c0b2fbb5..40dbc8b0a17 100644 --- a/src/lib/pickles/intf.ml +++ b/src/lib/pickles/intf.ml @@ -202,11 +202,11 @@ module Sponge (Impl : Snarky_backendless.Snark_intf.Run) = struct module type S = Sponge.Intf.Sponge - with module Field := Field - and module State := Sponge.State - and type input := Field.t - and type digest := length:int -> Boolean.var list * Field.t - and type t = Field.t Sponge.t + with module Field := Field + and module State := Sponge.State + and type input := Field.t + and type digest := length:int -> Boolean.var list * Field.t + and type t = Field.t Sponge.t end module type Inputs_base = sig @@ -274,12 +274,12 @@ module Pairing_main_inputs = struct module Sponge : sig include Sponge_lib.Intf.Sponge - with module Field := Impl.Field - and module State := Sponge_lib.State - and type input := - [`Field of Impl.Field.t | `Bits of Impl.Boolean.var list] - and type digest := length:int -> Impl.Boolean.var list * Impl.Field.t - and type t = Impl.Field.t Sponge_lib.t + with module Field := Impl.Field + and module State := Sponge_lib.State + and type input := + [ `Field of Impl.Field.t | `Bits of Impl.Boolean.var list ] + and type digest := length:int -> Impl.Boolean.var list * Impl.Field.t + and type t = Impl.Field.t Sponge_lib.t val squeeze_field : t -> Impl.Field.t end diff --git a/src/lib/pickles/limb_vector/challenge.ml b/src/lib/pickles/limb_vector/challenge.ml index 0e62efbca24..d0266017865 100644 --- a/src/lib/pickles/limb_vector/challenge.ml +++ b/src/lib/pickles/limb_vector/challenge.ml @@ -21,7 +21,7 @@ module type S = sig val dummy : t end - val typ' : [`Constrained | `Unconstrained] -> (t, Constant.t) Typ.t + val typ' : [ `Constrained | `Unconstrained ] -> (t, Constant.t) Typ.t val typ_unchecked : (t, Constant.t) Typ.t diff --git a/src/lib/pickles/limb_vector/constant.ml b/src/lib/pickles/limb_vector/constant.ml index 13728ebf5f6..1b02f6cb5fc 100644 --- a/src/lib/pickles/limb_vector/constant.ml +++ b/src/lib/pickles/limb_vector/constant.ml @@ -7,7 +7,7 @@ let to_bits t = Vector.to_list t |> List.concat_map ~f:(fun n -> let test_bit i = Int64.(shift_right n i land one = one) in - List.init 64 ~f:test_bit ) + List.init 64 ~f:test_bit) module Hex64 = struct module T = struct @@ -23,8 +23,8 @@ module Hex64 = struct let of_yojson yojson = match yojson with | `String x -> ( - try Result.Ok (Int64.of_string x) - with _ -> Result.Error "Constant.Make.Hex64.t" ) + try Result.Ok (Int64.of_string x) + with _ -> Result.Error "Constant.Make.Hex64.t" ) | _ -> of_yojson yojson @@ -44,7 +44,7 @@ module Hex64 = struct let%test_unit "int64 hex" = Quickcheck.test (Int64.gen_incl zero max_value) ~f:(fun x -> - assert (equal x (of_hex (to_hex x))) ) + assert (equal x (of_hex (to_hex x)))) let sexp_of_t = Fn.compose String.sexp_of_t to_hex @@ -67,7 +67,6 @@ module Hex64 = struct (* TODO: Add serialization tests here to make sure that Core doesn't change it out from under us between versions. *) - end end] end @@ -84,7 +83,7 @@ module Make (N : Vector.Nat_intf) = struct let of_bits bits = let pack = List.foldi ~init:Int64.zero ~f:(fun i acc b -> - if b then Int64.(acc lor shift_left one i) else acc ) + if b then Int64.(acc lor shift_left one i) else acc) in let bits = List.groupi ~break:(fun i _ _ -> i mod 64 = 0) bits |> List.map ~f:pack diff --git a/src/lib/pickles/limb_vector/make.ml b/src/lib/pickles/limb_vector/make.ml index f2c9b197dcb..86f6479cd2f 100644 --- a/src/lib/pickles/limb_vector/make.ml +++ b/src/lib/pickles/limb_vector/make.ml @@ -28,5 +28,5 @@ struct |> Typ.transport ~there:(fun x -> Field.Constant.project (Constant.to_bits x)) ~back:(fun x -> - Constant.of_bits (List.take (Field.Constant.unpack x) length) ) + Constant.of_bits (List.take (Field.Constant.unpack x) length)) end diff --git a/src/lib/pickles/make_sponge.ml b/src/lib/pickles/make_sponge.ml index 7a51be15599..e98a56e19f0 100644 --- a/src/lib/pickles/make_sponge.ml +++ b/src/lib/pickles/make_sponge.ml @@ -66,15 +66,15 @@ module T (M : Sponge.Intf.T) = M module Test (Impl : Snarky_backendless.Snark_intf.Run with type prover_state = unit) (S_constant : Sponge.Intf.Sponge - with module Field := T(Impl.Field.Constant) - and module State := Sponge.State - and type input := Impl.field - and type digest := Impl.field) + with module Field := T(Impl.Field.Constant) + and module State := Sponge.State + and type input := Impl.field + and type digest := Impl.field) (S_checked : Sponge.Intf.Sponge - with module Field := Impl.Field - and module State := Sponge.State - and type input := Impl.Field.t - and type digest := Impl.Field.t) = + with module Field := Impl.Field + and module State := Sponge.State + and type input := Impl.Field.t + and type digest := Impl.Field.t) = struct open Impl @@ -91,10 +91,10 @@ struct S_checked.create (Sponge.Params.map ~f:Field.constant params) in Array.iter a ~f:(S_checked.absorb s) ; - S_checked.squeeze s ) ) + S_checked.squeeze s)) (fun a -> let s = S_constant.create params in Array.iter a ~f:(S_constant.absorb s) ; - S_constant.squeeze s ) + S_constant.squeeze s) a end diff --git a/src/lib/pickles/one_hot_vector/one_hot_vector.ml b/src/lib/pickles/one_hot_vector/one_hot_vector.ml index 67267692695..6e8fed58a99 100644 --- a/src/lib/pickles/one_hot_vector/one_hot_vector.ml +++ b/src/lib/pickles/one_hot_vector/one_hot_vector.ml @@ -7,7 +7,7 @@ end module T (Impl : Snarky_backendless.Snark_intf.Run) = struct (* TODO: Optimization. Have this have length n - 1 since the last one is - determined by the remaining ones. *) + determined by the remaining ones. *) type 'n t = (Impl.Boolean.var, 'n) Vector.t end @@ -27,11 +27,12 @@ struct let typ = Vector.typ Boolean.typ n in let typ = { typ with - check= + check = (fun x -> Snarky_backendless.Checked.bind (typ.check x) ~f:(fun () -> make_checked (fun () -> - Boolean.Assert.exactly_one (Vector.to_list x) ) ) ) } + Boolean.Assert.exactly_one (Vector.to_list x)))) + } in Typ.transport typ ~there:(fun i -> Vector.init n ~f:(( = ) i)) @@ -39,5 +40,5 @@ struct let i, _ = List.findi (Vector.to_list v) ~f:(fun _ b -> b) |> Option.value_exn in - i ) + i) end diff --git a/src/lib/pickles/opt_sponge.ml b/src/lib/pickles/opt_sponge.ml index 1118eaef855..cd9d59f034e 100644 --- a/src/lib/pickles/opt_sponge.ml +++ b/src/lib/pickles/opt_sponge.ml @@ -13,14 +13,16 @@ let rate = m - capacity type 'f sponge_state = | Absorbing of - { next_index: 'f Snarky_backendless.Boolean.t - ; xs: ('f Snarky_backendless.Boolean.t * 'f) list } + { next_index : 'f Snarky_backendless.Boolean.t + ; xs : ('f Snarky_backendless.Boolean.t * 'f) list + } | Squeezed of int type 'f t = - { mutable state: 'f array - ; params: 'f Sponge.Params.t - ; mutable sponge_state: 'f sponge_state } + { mutable state : 'f array + ; params : 'f Sponge.Params.t + ; mutable sponge_state : 'f sponge_state + } module Make (Impl : Snarky_backendless.Snark_intf.Run with type prover_state = unit) @@ -31,14 +33,14 @@ struct type nonrec t = Field.t t - let state {state; _} = Array.copy state + let state { state; _ } = Array.copy state - let copy {state; params; sponge_state} = - {state= Array.copy state; params; sponge_state} + let copy { state; params; sponge_state } = + { state = Array.copy state; params; sponge_state } let initial_state = Array.init m ~f:(fun _ -> Field.zero) - let of_sponge {Sponge.state; params; sponge_state} = + let of_sponge { Sponge.state; params; sponge_state } = let sponge_state = match sponge_state with | Squeezed n -> @@ -53,14 +55,15 @@ struct | _ -> assert false in - Absorbing {next_index; xs= []} + Absorbing { next_index; xs = [] } in - {sponge_state; state= Array.copy state; params} + { sponge_state; state = Array.copy state; params } let create ?(init = initial_state) params = { params - ; state= Array.copy init - ; sponge_state= Absorbing {next_index= Boolean.false_; xs= []} } + ; state = Array.copy init + ; sponge_state = Absorbing { next_index = Boolean.false_; xs = [] } + } let () = assert (rate = 2) @@ -70,7 +73,7 @@ struct (* a.(0) <- a.(0) + i_equals_0 * x a.(1) <- a.(1) + i_equals_1 * x *) - List.iteri [i_equals_0; i_equals_1] ~f:(fun j i_equals_j -> + List.iteri [ i_equals_0; i_equals_1 ] ~f:(fun j i_equals_j -> let a_j' = exists Field.typ ~compute: @@ -82,7 +85,7 @@ struct else a_j) in assert_r1cs x (i_equals_j :> Field.t) Field.(a_j' - a.(j)) ; - a.(j) <- a_j' ) + a.(j) <- a_j') let consume ~params ~start_pos input state = assert (Array.length state = m) ; @@ -122,41 +125,41 @@ struct let y = Field.(y * (b' :> t)) in let add_in_y_after_perm = (* post - add in - (1, 1, 1) + add in + (1, 1, 1) - do not add in - (1, 1, 0) - (0, 1, 0) - (0, 1, 1) + do not add in + (1, 1, 0) + (0, 1, 0) + (0, 1, 1) - (1, 0, 0) - (1, 0, 1) - (0, 0, 0) - (0, 0, 1) + (1, 0, 0) + (1, 0, 1) + (0, 0, 0) + (0, 0, 1) *) (* Only one case where we add in y after the permutation is applied *) - Boolean.all [b; b'; p] + Boolean.all [ b; b'; p ] in let add_in_y_before_perm = Boolean.not add_in_y_after_perm in add_in state p Field.(x * (b :> t)) ; add_in state p' Field.(y * (add_in_y_before_perm :> t)) ; let permute = (* (b, b', p) - true: - (0, 1, 1) - (1, 0, 1) - (1, 1, 0) - (1, 1, 1) + true: + (0, 1, 1) + (1, 0, 1) + (1, 1, 0) + (1, 1, 1) - false: - (0, 0, 0) - (0, 0, 1) - (0, 1, 0) - (1, 0, 0) + false: + (0, 0, 0) + (0, 0, 1) + (0, 1, 0) + (1, 0, 0) *) (* (b && b') || (p && (b || b')) *) - Boolean.(any [all [b; b']; all [p; b ||| b']]) + Boolean.(any [ all [ b; b' ]; all [ p; b ||| b' ] ]) in cond_permute permute ; add_in state p' Field.(y * (add_in_y_after_perm :> t)) @@ -173,7 +176,7 @@ struct let p = !pos in pos := Boolean.( lxor ) p b ; add_in state p Field.(x * (b :> t)) ; - Boolean.any [p; b; empty_imput] + Boolean.any [ p; b; empty_imput ] | _ -> assert false in @@ -181,10 +184,10 @@ struct let absorb (t : t) x = match t.sponge_state with - | Absorbing {next_index; xs} -> - t.sponge_state <- Absorbing {next_index; xs= x :: xs} + | Absorbing { next_index; xs } -> + t.sponge_state <- Absorbing { next_index; xs = x :: xs } | Squeezed _ -> - t.sponge_state <- Absorbing {next_index= Boolean.false_; xs= [x]} + t.sponge_state <- Absorbing { next_index = Boolean.false_; xs = [ x ] } let squeeze (t : t) = match t.sponge_state with @@ -196,7 +199,7 @@ struct else ( t.sponge_state <- Squeezed (n + 1) ; t.state.(n) ) - | Absorbing {next_index; xs} -> + | Absorbing { next_index; xs } -> consume ~start_pos:next_index ~params:t.params (Array.of_list_rev xs) t.state ; t.sponge_state <- Squeezed 1 ; @@ -222,8 +225,9 @@ struct let a () = Array.init 3 ~f:(fun _ -> Field.(constant (Constant.random ()))) in - { mds= Array.init 3 ~f:(fun _ -> a ()) - ; round_constants= Array.init 40 ~f:(fun _ -> a ()) } + { mds = Array.init 3 ~f:(fun _ -> a ()) + ; round_constants = Array.init 40 ~f:(fun _ -> a ()) + } in let filtered_res = let n = List.length filtered in @@ -234,7 +238,7 @@ struct make_checked (fun () -> let s = S.create params in List.iter xs ~f:(S.absorb s) ; - S.squeeze s ) ) + S.squeeze s)) filtered in let opt_res = @@ -246,15 +250,14 @@ struct make_checked (fun () -> let s = create params in List.iter xs ~f:(absorb s) ; - squeeze s ) ) + squeeze s)) ps in if not (Field.Constant.equal filtered_res opt_res) then failwithf - !"hash(%{sexp:Field.Constant.t list}) = \ - %{sexp:Field.Constant.t}\n\ + !"hash(%{sexp:Field.Constant.t list}) = %{sexp:Field.Constant.t}\n\ hash(%{sexp:(bool * Field.Constant.t) list}) = \ %{sexp:Field.Constant.t}" - filtered filtered_res ps opt_res () ) + filtered filtered_res ps opt_res ()) end ) end diff --git a/src/lib/pickles/pairing_main.ml b/src/lib/pickles/pairing_main.ml index c7d672ea2bf..70d95b1d854 100644 --- a/src/lib/pickles/pairing_main.ml +++ b/src/lib/pickles/pairing_main.ml @@ -11,9 +11,9 @@ module S = Sponge module Make (Inputs : Intf.Pairing_main_inputs.S - with type Impl.field = Backend.Tick.Field.t - and type Impl.prover_state = unit - and type Inner_curve.Constant.Scalar.t = Backend.Tock.Field.t) = + with type Impl.field = Backend.Tick.Field.t + and type Impl.prover_state = unit + and type Inner_curve.Constant.Scalar.t = Backend.Tock.Field.t) = struct open Inputs open Impl @@ -33,9 +33,8 @@ struct let typ = Impls.Step.Other_field.typ let to_bits_unsafe ((x, b) : t) = - Step_main_inputs.Unsafe.unpack_unboolean x - ~length:(Field.size_in_bits - 1) - @ [b] + Step_main_inputs.Unsafe.unpack_unboolean x ~length:(Field.size_in_bits - 1) + @ [ b ] end let print_g lab g = @@ -44,7 +43,7 @@ struct As_prover.( fun () -> match Inner_curve.to_field_elements g with - | [x; y] -> + | [ x; y ] -> printf !"%s: %!" lab ; Field.Constant.print (read_var x) ; printf ", %!" ; @@ -68,12 +67,12 @@ struct as_prover (fun () -> printf "%s: %!" lab ; Field.Constant.print (As_prover.read Field.typ x) ; - printf "\n%!" ) + printf "\n%!") let print_bool lab x = if debug then as_prover (fun () -> - printf "%s: %b\n%!" lab (As_prover.read Boolean.typ x) ) + printf "%s: %b\n%!" lab (As_prover.read Boolean.typ x)) let equal_g g1 g2 = List.map2_exn ~f:Field.equal @@ -87,9 +86,9 @@ struct ~g1_to_field_elements:Inner_curve.to_field_elements ~absorb_scalar:(fun (x, b) -> Sponge.absorb sponge (`Field x) ; - Sponge.absorb sponge (`Bits [b]) ) + Sponge.absorb sponge (`Bits [ b ])) ~mask_g1_opt:(fun ((b : Boolean.var), (x, y)) -> - Field.((b :> t) * x, (b :> t) * y) ) + Field.((b :> t) * x, (b :> t) * y)) ty t module Scalar_challenge = @@ -110,15 +109,15 @@ struct let correction = Array.mapi ts ~f:(fun i (s, x) -> let n = Array.length s in - pow2pow x n ) + pow2pow x n) |> Array.reduce_exn ~f:Inner_curve.Constant.( + ) in let acc = Array.mapi ts ~f:(fun i (s, x) -> - Ops.scale_fast (Inner_curve.constant x) (`Plus_two_to_len s) ) + Ops.scale_fast (Inner_curve.constant x) (`Plus_two_to_len s)) |> Array.reduce_exn ~f:Inner_curve.( + ) in - Inner_curve.(acc + constant (Constant.negate correction)) ) + Inner_curve.(acc + constant (Constant.negate correction))) let squeeze_scalar sponge : Scalar_challenge.t * Field.t SC.SC.t = let bits, packed = Sponge.squeeze sponge ~length:Challenge.length in @@ -131,19 +130,19 @@ struct let prechallenges = Array.mapi gammas ~f:(fun i gammas_i -> absorb (PC :: PC) gammas_i ; - squeeze_scalar sponge ) + squeeze_scalar sponge) in let term_and_challenge (l, r) (pre, pre_packed) = let left_term = Scalar_challenge.endo_inv l pre in let right_term = Scalar_challenge.endo r pre in ( Inner_curve.(left_term + right_term) - , {Bulletproof_challenge.prechallenge= pre_packed} ) + , { Bulletproof_challenge.prechallenge = pre_packed } ) in let terms, challenges = Array.map2_exn gammas prechallenges ~f:term_and_challenge |> Array.unzip in - (Array.reduce_exn terms ~f:Inner_curve.( + ), challenges) ) + (Array.reduce_exn terms ~f:Inner_curve.( + ), challenges)) let group_map = let f = @@ -154,7 +153,7 @@ struct let params = Group_map.Bw19.Params.create (module Field.Constant) - {b= Inner_curve.Params.b} + { b = Inner_curve.Params.b } end) in let open M in @@ -165,7 +164,7 @@ struct Field.( (x * x * x) + (constant Inner_curve.Params.a * x) - + constant Inner_curve.Params.b) ) + + constant Inner_curve.Params.b)) |> unstage) in fun x -> Lazy.force f x @@ -174,15 +173,14 @@ struct Ops.scale_fast p (`Plus_two_to_len (Array.of_list bits)) let check_bulletproof ~pcs_batch ~sponge ~xi ~combined_inner_product - ~ - (* Corresponds to y in figure 7 of WTS *) - (* sum_i r^i sum_j xi^j f_j(beta_i) *) + ~(* Corresponds to y in figure 7 of WTS *) + (* sum_i r^i sum_j xi^j f_j(beta_i) *) (advice : _ Openings.Bulletproof.Advice.t) ~polynomials:(without_degree_bound, with_degree_bound) - ~openings_proof:({lr; delta; z_1; z_2; sg} : - ( Inner_curve.t - , Other_field.t Shifted_value.t ) - Openings.Bulletproof.t) = + ~openings_proof: + ({ lr; delta; z_1; z_2; sg } : + (Inner_curve.t, Other_field.t Shifted_value.t) Openings.Bulletproof.t) + = let scale_fast p s = scale_fast p (Shifted_value.map ~f:Other_field.to_bits_unsafe s) in @@ -192,8 +190,8 @@ struct | Shifted_value.Shifted_value x -> x ) ; (* a_hat should be equal to - sum_i < t, r^i pows(beta_i) > - = sum_i r^i < t, pows(beta_i) > *) + sum_i < t, r^i pows(beta_i) > + = sum_i r^i < t, pows(beta_i) > *) let u = let t = Sponge.squeeze_field sponge in group_map t @@ -208,19 +206,19 @@ struct | `Finite of Inner_curve.t ]) ~xi p -> match acc with | `Maybe_finite (acc_is_finite, (acc : Inner_curve.t)) -> ( - match p with - | `Maybe_finite (p_is_finite, p) -> - let is_finite = - Boolean.(p_is_finite ||| acc_is_finite) - in - let xi_acc = Scalar_challenge.endo acc xi in - `Maybe_finite - ( is_finite - , if_ acc_is_finite ~then_:(p + xi_acc) ~else_:p ) - | `Finite p -> - let xi_acc = Scalar_challenge.endo acc xi in - `Finite - (if_ acc_is_finite ~then_:(p + xi_acc) ~else_:p) ) + match p with + | `Maybe_finite (p_is_finite, p) -> + let is_finite = + Boolean.(p_is_finite ||| acc_is_finite) + in + let xi_acc = Scalar_challenge.endo acc xi in + `Maybe_finite + ( is_finite + , if_ acc_is_finite ~then_:(p + xi_acc) ~else_:p ) + | `Finite p -> + let xi_acc = Scalar_challenge.endo acc xi in + `Finite + (if_ acc_is_finite ~then_:(p + xi_acc) ~else_:p) ) | `Finite acc -> let xi_acc = Scalar_challenge.endo acc xi in `Finite @@ -228,21 +226,20 @@ struct | `Finite p -> p + xi_acc | `Maybe_finite (p_is_finite, p) -> - if_ p_is_finite ~then_:(p + xi_acc) ~else_:xi_acc - ) ) + if_ p_is_finite ~then_:(p + xi_acc) ~else_:xi_acc )) ~xi ~init:(function - | `Finite x -> `Finite x | `Maybe_finite x -> `Maybe_finite x - ) + | `Finite x -> `Finite x | `Maybe_finite x -> `Maybe_finite x) (Vector.map without_degree_bound ~f:(Array.map ~f:(fun x -> `Finite x))) (Vector.map with_degree_bound ~f: (let open Dlog_plonk_types.Poly_comm.With_degree_bound in - fun {shifted; unshifted} -> + fun { shifted; unshifted } -> let f x = `Maybe_finite x in - {unshifted= Array.map ~f unshifted; shifted= f shifted})) - ) + { unshifted = Array.map ~f unshifted + ; shifted = f shifted + }))) |> function `Finite x -> x | `Maybe_finite _ -> assert false in let lr_prod, challenges = bullet_reduce sponge lr in @@ -265,9 +262,9 @@ struct let z2_h = scale_fast (Inner_curve.constant (Lazy.force Generators.h)) z_2 in - z_1_g_plus_b_u + z2_h ) + z_1_g_plus_b_u + z2_h) in - (`Success (equal_g lhs rhs), challenges) ) + (`Success (equal_g lhs rhs), challenges)) let assert_eq_marlin (m1 : @@ -295,15 +292,15 @@ struct .index_of_domain_log2 (Domain.log2_size domain) in match Precomputed.Lagrange_precomputations.pallas.(d).(i) with - | [|g|] -> + | [| g |] -> Inner_curve.Constant.of_affine g | _ -> assert false let incrementally_verify_proof (type b) (module Branching : Nat.Add.Intf with type n = b) ~domain - ~verification_key:(m : _ array Plonk_verification_key_evals.t) ~xi - ~sponge ~public_input ~(sg_old : (_, Branching.n) Vector.t) + ~verification_key:(m : _ array Plonk_verification_key_evals.t) ~xi ~sponge + ~public_input ~(sg_old : (_, Branching.n) Vector.t) ~combined_inner_product ~advice ~(messages : (_, Boolean.var * _) Dlog_plonk_types.Messages.t) ~openings_proof @@ -314,16 +311,16 @@ struct Types.Dlog_based.Proof_state.Deferred_values.Plonk.In_circuit.t) = let m = Plonk_verification_key_evals.map m ~f:(function - | [|g|] -> + | [| g |] -> g | _ -> - assert false ) + assert false) in with_label __LOC__ (fun () -> let receive ty f = with_label __LOC__ (fun () -> let x = f messages in - absorb sponge ty x ; x ) + absorb sponge ty x ; x) in let sample () = let bits, packed = Sponge.squeeze sponge ~length:Challenge.length in @@ -336,8 +333,8 @@ struct with_label __LOC__ (fun () -> multiscale_known (Array.mapi public_input ~f:(fun i x -> - (Array.of_list x, lagrange_commitment ~domain i) )) - |> Inner_curve.negate ) + (Array.of_list x, lagrange_commitment ~domain i))) + |> Inner_curve.negate) in let without = Type.Without_degree_bound in let with_ = Type.With_degree_bound in @@ -355,18 +352,19 @@ struct (module Impl) (match zeta with Scalar_challenge x -> x) ; (* At this point, we should use the previous "bulletproof_challenges" to - compute to compute f(beta_1) outside the snark - where f is the polynomial corresponding to sg_old - *) + compute to compute f(beta_1) outside the snark + where f is the polynomial corresponding to sg_old + *) let sponge_before_evaluations = Sponge.copy sponge in let sponge_digest_before_evaluations = Sponge.squeeze_field sponge in + (* xi, r are sampled here using the other sponge. *) (* No need to expose the polynomial evaluations as deferred values as they're - not needed here for the incremental verification. All we need is a_hat and - "combined_inner_product". + not needed here for the incremental verification. All we need is a_hat and + "combined_inner_product". - Then, in the other proof, we can witness the evaluations and check their correctness - against "combined_inner_product" *) + Then, in the other proof, we can witness the evaluations and check their correctness + against "combined_inner_product" *) let f_comm = let ( + ) = Inner_curve.( + ) in let ( * ) = Fn.flip scale_fast in @@ -382,7 +380,7 @@ struct (* alpha^2 rcm_comm[0] + alpha^3 rcm_comm[1] + alpha^4 rcm_comm[2] = alpha^2 (rcm_comm[0] + alpha (rcm_comm[1] + alpha rcm_comm[2])) - *) + *) let a = alpha in let ( * ) = Fn.flip Scalar_challenge.endo in m.rcm_comm_0 + (a * (m.rcm_comm_1 + (a * m.rcm_comm_2))) @@ -399,7 +397,8 @@ struct ; with_label __LOC__ (fun () -> plonk.vbmul1 * m.mul2_comm) ; with_label __LOC__ (fun () -> plonk.endomul0 * m.emul1_comm) ; with_label __LOC__ (fun () -> plonk.endomul1 * m.emul2_comm) - ; with_label __LOC__ (fun () -> plonk.endomul2 * m.emul3_comm) ] + ; with_label __LOC__ (fun () -> plonk.endomul2 * m.emul3_comm) + ] in let res = Array.map z_comm ~f:(( * ) plonk.perm0) in res.(0) <- res.(0) + g ; @@ -407,25 +406,26 @@ struct in let bulletproof_challenges = (* This sponge needs to be initialized with (some derivative of) - 1. The polynomial commitments - 2. The combined inner product - 3. The challenge points. + 1. The polynomial commitments + 2. The combined inner product + 3. The challenge points. - It should be sufficient to fork the sponge after squeezing beta_3 and then to absorb - the combined inner product. - *) + It should be sufficient to fork the sponge after squeezing beta_3 and then to absorb + the combined inner product. + *) let without_degree_bound = let T = Branching.eq in Vector.append - (Vector.map sg_old ~f:(fun g -> [|g|])) - [ [|x_hat|] + (Vector.map sg_old ~f:(fun g -> [| g |])) + [ [| x_hat |] ; l_comm ; r_comm ; o_comm ; z_comm ; f_comm - ; [|m.sigma_comm_0|] - ; [|m.sigma_comm_1|] ] + ; [| m.sigma_comm_0 |] + ; [| m.sigma_comm_1 |] + ] (snd (Branching.add Nat.N8.n)) in with_label __LOC__ (fun () -> @@ -436,34 +436,37 @@ struct (Common.max_quot_size_int (Domain.size domain))) ~sponge:sponge_before_evaluations ~xi ~combined_inner_product ~advice ~openings_proof - ~polynomials:(without_degree_bound, [t_comm]) ) + ~polynomials:(without_degree_bound, [ t_comm ])) in assert_eq_marlin - { alpha= plonk.alpha - ; beta= plonk.beta - ; gamma= plonk.gamma - ; zeta= plonk.zeta } - { alpha= alpha_packed - ; beta= beta_packed - ; gamma= gamma_packed - ; zeta= zeta_packed } ; - (sponge_digest_before_evaluations, bulletproof_challenges) ) + { alpha = plonk.alpha + ; beta = plonk.beta + ; gamma = plonk.gamma + ; zeta = plonk.zeta + } + { alpha = alpha_packed + ; beta = beta_packed + ; gamma = gamma_packed + ; zeta = zeta_packed + } ; + (sponge_digest_before_evaluations, bulletproof_challenges)) let compute_challenges ~scalar chals = with_label __LOC__ (fun () -> - Vector.map chals ~f:(fun {Bulletproof_challenge.prechallenge} -> - scalar prechallenge ) ) + Vector.map chals ~f:(fun { Bulletproof_challenge.prechallenge } -> + scalar prechallenge)) let b_poly = Field.(Dlog_main.b_poly ~add ~mul ~one) module Pseudo = Pseudo.Make (Impl) module Bounded = struct - type t = {max: int; actual: Field.t} + type t = { max : int; actual : Field.t } let of_pseudo ((_, ns) as p : _ Pseudo.t) = - { max= Vector.reduce_exn ~f:Int.max ns - ; actual= Pseudo.choose p ~f:Field.of_int } + { max = Vector.reduce_exn ~f:Int.max ns + ; actual = Pseudo.choose p ~f:Field.of_int + } end let vanishing_polynomial mask = @@ -521,7 +524,8 @@ struct method generator = Lazy.force generator method size = - Pseudo.choose (mask, domain_log2s) ~f:(fun x -> Field.of_int (1 lsl x)) + Pseudo.choose (mask, domain_log2s) ~f:(fun x -> + Field.of_int (1 lsl x)) method vanishing_polynomial = vp end @@ -544,7 +548,7 @@ struct Vector.map mask ~f:(fun b -> (* 0 -> 1 1 -> 2 *) - Field.((b :> t) + one) ) + Field.((b :> t) + one)) |> Vector.reduce_exn ~f:Field.( * ) in object @@ -559,10 +563,11 @@ struct method generator = generator end in - { Domains.h= + { Domains.h = domain - (Vector.map domains ~f:(fun {h; _} -> h)) - ~max:(Domain.log2_size max_domains.h) } + (Vector.map domains ~f:(fun { h; _ } -> h)) + ~max:(Domain.log2_size max_domains.h) + } let%test_module "side loaded domains" = ( module struct @@ -571,7 +576,7 @@ struct run_and_check (fun () -> let y = k () in - fun () -> As_prover.read_var y ) + fun () -> As_prover.read_var y) () |> Or_error.ok_exn in @@ -597,14 +602,13 @@ struct [%test_eq: Field.Constant.t] (d_unchecked#vanishing_polynomial pt) (run (fun () -> - (checked_domain ())#vanishing_polynomial (Field.constant pt) - )) ) + (checked_domain ())#vanishing_polynomial (Field.constant pt)))) let%test_unit "side loaded domains" = let module O = One_hot_vector.Make (Impl) in let open Side_loaded_verification_key in let branches = Nat.N2.n in - let domains = Vector.[{Domains.h= 10}; {h= 15}] in + let domains = Vector.[ { Domains.h = 10 }; { h = 15 } ] in let pt = Field.Constant.random () in List.iteri (Vector.to_list domains) ~f:(fun i ds -> let check field1 field2 = @@ -620,7 +624,7 @@ struct (Vector.map domains ~f: (Domains.map ~f:(fun x -> - Domain.Pow_2_roots_of_unity (Field.of_int x) ))) + Domain.Pow_2_roots_of_unity (Field.of_int x)))) (O.of_index (Field.of_int i) ~length:branches) |> field2 in @@ -630,15 +634,15 @@ struct (d_unchecked#vanishing_polynomial pt) (run (fun () -> (checked_domain ())#vanishing_polynomial - (Field.constant pt) )) + (Field.constant pt))) in - check Domains.h Domains.h ) + check Domains.h Domains.h) end ) module Split_evaluations = struct open Dlog_plonk_types - let mask' {Bounded.max; actual} : Boolean.var array = + let mask' { Bounded.max; actual } : Boolean.var array = let (T max) = Nat.of_int max in Vector.to_array (ones_vector (module Impl) ~first_zero:actual max) @@ -649,11 +653,11 @@ struct (List.max_elt ~compare:Int.compare (Vector.to_list lengths)) in let actual = Pseudo.choose (choice, lengths) ~f:Field.of_int in - mask' {max; actual} + mask' { max; actual } let last = Array.reduce_exn ~f:(fun (b_acc, x_acc) (b, x) -> - (Boolean.(b_acc ||| b), Field.if_ b ~then_:x ~else_:x_acc) ) + (Boolean.(b_acc ||| b), Field.if_ b ~then_:x ~else_:x_acc)) let rec pow x bits_lsb = let rec go acc bs = @@ -677,7 +681,7 @@ struct Pcs_batch.combine_split_evaluations ~last ~mul:(fun (keep, x) (y : Field.t) -> (keep, Field.(y * x))) ~mul_and_add:(fun ~acc ~xi (keep, fx) -> - Field.if_ keep ~then_:Field.(fx + (xi * acc)) ~else_:acc ) + Field.if_ keep ~then_:Field.(fx + (xi * acc)) ~else_:acc) ~init:(fun (_, fx) -> fx) (Common.dlog_pcs_batch b_plus_8 ~max_quot_size) ~shifted_pow: @@ -687,7 +691,7 @@ struct Pcs_batch.combine_split_evaluations ~last ~mul:(fun (keep, x) (y : Field.t) -> (keep, Field.(y * x))) ~mul_and_add:(fun ~acc ~xi (keep, fx) -> - Field.if_ keep ~then_:Field.(fx + (xi * acc)) ~else_:acc ) + Field.if_ keep ~then_:Field.(fx + (xi * acc)) ~else_:acc) ~init:(fun (_, fx) -> fx) (Common.dlog_pcs_batch b_plus_8 ~max_quot_size) ~shifted_pow:(fun deg x -> pow x deg) @@ -696,7 +700,7 @@ struct (choice : n One_hot_vector.T(Impl).t) (e : Field.t array Evals.t) : (Boolean.var * Field.t) array Evals.t = Evals.map2 lengths e ~f:(fun lengths e -> - Array.zip_exn (mask ~lengths choice) e ) + Array.zip_exn (mask ~lengths choice) e) end let combined_evaluation (type b b_plus_8) b_plus_8 ~xi ~evaluation_point @@ -721,9 +725,7 @@ struct Field.t = let pt_n = let max_degree_log2 = Int.ceil_log2 Max_degree.step in - let rec go acc i = - if i = 0 then acc else go (Field.square acc) (i - 1) - in + let rec go acc i = if i = 0 then acc else go (Field.square acc) (i - 1) in go pt max_degree_log2 in match List.rev (Array.to_list e) with @@ -732,7 +734,7 @@ struct ~init:Field.((b :> t) * e) es ~f:(fun acc (keep, fx) -> - Field.if_ keep ~then_:Field.(fx + (pt_n * acc)) ~else_:acc ) + Field.if_ keep ~then_:Field.(fx + (pt_n * acc)) ~else_:acc) | [] -> failwith "empty list" @@ -742,7 +744,8 @@ struct module Underlying = Opt_sponge.Make (Impl) (Step_main_inputs.Sponge.Permutation) - include S.Bit_sponge.Make (struct + include S.Bit_sponge.Make + (struct type t = Boolean.var end) (struct @@ -763,10 +766,11 @@ struct let side_loaded_commitment_lengths ~h = let max_lengths = Commitment_lengths.of_domains ~max_degree:Max_degree.step - { h= + { h = Pow_2_roots_of_unity Side_loaded_verification_key.(Domain.log2_size max_domains.h) - ; x= Pow_2_roots_of_unity 0 } + ; x = Pow_2_roots_of_unity 0 + } in Commitment_lengths.generic' ~h ~add:Field.add ~mul:Field.mul ~sub:Field.sub ~of_int:Field.of_int @@ -778,26 +782,25 @@ struct let d = of_bits (Field.unpack ~length:max_log2_degree d) in to_var (Number.ceil_div_pow_2 d (`Two_to_the k))) |> Evals.map2 max_lengths ~f:(fun max actual -> - Split_evaluations.mask' {actual; max} ) + Split_evaluations.mask' { actual; max }) let shift = - Shifted_value.Shift.( - map ~f:Field.constant (create (module Field.Constant))) + Shifted_value.Shift.(map ~f:Field.constant (create (module Field.Constant))) let%test_unit "endo scalar" = SC.test (module Impl) ~endo:Endo.Wrap_inner_curve.scalar (* This finalizes the "deferred values" coming from a previous proof over the same field. - It - 1. Checks that [xi] and [r] where sampled correctly. I.e., by absorbing all the - evaluation openings and then squeezing. - 2. Checks that the "combined inner product" value used in the elliptic curve part of - the opening proof was computed correctly, in terms of the evaluation openings and the - evaluation points. - 3. Check that the "b" value was computed correctly. - 4. Perform the arithmetic checks from marlin. *) + It + 1. Checks that [xi] and [r] where sampled correctly. I.e., by absorbing all the + evaluation openings and then squeezing. + 2. Checks that the "combined inner product" value used in the elliptic curve part of + the opening proof was computed correctly, in terms of the evaluation openings and the + evaluation points. + 3. Check that the "b" value was computed correctly. + 4. Perform the arithmetic checks from marlin. *) (* TODO: This needs to handle the fact of variable length evaluations. - Meaning it needs opt sponge. *) + Meaning it needs opt sponge. *) let finalize_other_proof (type b branches) (module Branching : Nat.Add.Intf with type n = b) ~max_width ~(step_domains : @@ -808,14 +811,15 @@ struct , branches ) Vector.t ]) ~step_widths ~(* TODO: Add "actual branching" so that proofs don't - carry around dummy "old bulletproof challenges" *) + carry around dummy "old bulletproof challenges" *) sponge ~(old_bulletproof_challenges : (_, b) Vector.t) ({ xi ; combined_inner_product ; bulletproof_challenges ; which_branch ; b - ; plonk } : + ; plonk + } : ( _ , _ , Field.t Shifted_value.t @@ -837,7 +841,7 @@ struct side_loaded_input_domain ~width: (Side_loaded_verification_key.Width.Checked.to_field - (Option.value_exn max_width)) ) ) + (Option.value_exn max_width)) )) in let actual_width = Pseudo.choose (which_branch, step_widths) ~f:Fn.id in let (evals1, x_hat1), (evals2, x_hat2) = @@ -845,17 +849,17 @@ struct let lengths = match step_domains with | `Known domains -> - let hs = map domains ~f:(fun {Domains.h; _} -> h) in + let hs = map domains ~f:(fun { Domains.h; _ } -> h) in Commitment_lengths.generic map ~h:(map hs ~f:Domain.size) ~max_degree:Max_degree.step |> Evals.map ~f:(fun lengths -> - Bounded.of_pseudo (which_branch, lengths) ) + Bounded.of_pseudo (which_branch, lengths)) |> Evals.map ~f:Split_evaluations.mask' - | `Side_loaded {h} -> + | `Side_loaded { h } -> side_loaded_commitment_lengths ~h in Tuple_lib.Double.map es ~f:(fun (e, x) -> - (Evals.map2 lengths e ~f:Array.zip_exn, x) ) ) + (Evals.map2 lengths e ~f:Array.zip_exn, x))) in let T = Branching.eq in (* You use the NEW bulletproof challenges to check b. Not the old ones. *) @@ -864,9 +868,8 @@ struct with_label __LOC__ (fun () -> let xs, ys = Evals.to_vectors e in List.iter - Vector.([|(Boolean.true_, x_hat)|] :: (to_list xs @ to_list ys)) - ~f:(Array.iter ~f:(fun (b, x) -> Opt_sponge.absorb sponge (b, x))) - ) + Vector.([| (Boolean.true_, x_hat) |] :: (to_list xs @ to_list ys)) + ~f:(Array.iter ~f:(fun (b, x) -> Opt_sponge.absorb sponge (b, x)))) in (* A lot of hashing. *) absorb_evals x_hat1 evals1 ; @@ -889,9 +892,9 @@ struct let domain = match step_domains with | `Known ds -> - let hs = map ds ~f:(fun {Domains.h; _} -> h) in + let hs = map ds ~f:(fun { Domains.h; _ } -> h) in Pseudo.Domain.to_domain (which_branch, hs) ~shifts ~domain_generator - | `Side_loaded {h} -> + | `Side_loaded { h } -> (h :> _ Plonk_checks.plonk_domain) in let zetaw = Field.mul domain#generator plonk.zeta in @@ -902,7 +905,7 @@ struct let actual_combined_inner_product = let sg_olds = Vector.map old_bulletproof_challenges ~f:(fun chals -> - unstage (b_poly (Vector.to_array chals)) ) + unstage (b_poly (Vector.to_array chals))) in let max_quot_size = match step_domains with @@ -911,12 +914,11 @@ struct `Known ( which_branch , Vector.map step_domains ~f:(fun x -> - Common.max_quot_size_int (Domain.size x.Domains.h) ) ) + Common.max_quot_size_int (Domain.size x.Domains.h)) ) | `Side_loaded domains -> let conv domain = let deg = - Common.max_quot_size ~of_int ~mul:( * ) ~sub:( - ) - domain#size + Common.max_quot_size ~of_int ~mul:( * ) ~sub:( - ) domain#size in let d = Split_evaluations.mod_max_degree deg in Number.( @@ -933,10 +935,10 @@ struct Vector.map2 (ones_vector (module Impl) ~first_zero:actual_width Branching.n) sg_olds - ~f:(fun keep f -> [|(keep, f pt)|]) + ~f:(fun keep f -> [| (keep, f pt) |]) in let v = - Vector.append sg_evals ([|(Boolean.true_, x_hat)|] :: a) (snd pi) + Vector.append sg_evals ([| (Boolean.true_, x_hat) |] :: a) (snd pi) in match max_quot_size with | `Known max_quot_size -> @@ -979,7 +981,8 @@ struct [ xi_correct ; b_correct ; combined_inner_product_correct - ; plonk_checks_passed ] + ; plonk_checks_passed + ] , bulletproof_challenges ) let hash_me_only (type s) ~index @@ -994,7 +997,7 @@ struct Inputs.Inner_curve.t Dlog_plonk_types.Poly_comm.Without_degree_bound.t) -> Array.concat_map z - ~f:(Fn.compose List.to_array Inner_curve.to_field_elements) ) + ~f:(Fn.compose List.to_array Inner_curve.to_field_elements)) index) ~f:(fun x -> Sponge.absorb sponge (`Field x)) ; sponge @@ -1005,7 +1008,7 @@ struct ~f:(fun x -> Sponge.absorb sponge (`Field x)) (to_field_elements_without_index t ~app_state:state_to_field_elements ~g:Inner_curve.to_field_elements) ; - Sponge.squeeze_field sponge ) + Sponge.squeeze_field sponge) let hash_me_only_opt (type s) ~index (state_to_field_elements : s -> Field.t array) = @@ -1019,7 +1022,7 @@ struct Inputs.Inner_curve.t Dlog_plonk_types.Poly_comm.Without_degree_bound.t) -> Array.concat_map z - ~f:(Fn.compose List.to_array Inner_curve.to_field_elements) ) + ~f:(Fn.compose List.to_array Inner_curve.to_field_elements)) index) ~f:(fun x -> Sponge.absorb sponge (`Field x)) ; sponge @@ -1034,10 +1037,11 @@ struct let sponge = Sponge.copy after_index in let t = { t with - old_bulletproof_challenges= + old_bulletproof_challenges = Vector.map2 mask t.old_bulletproof_challenges ~f:(fun b v -> - Vector.map v ~f:(fun x -> `Opt (b, x)) ) - ; sg= Vector.map2 mask t.sg ~f:(fun b g -> (b, g)) } + Vector.map v ~f:(fun x -> `Opt (b, x))) + ; sg = Vector.map2 mask t.sg ~f:(fun b g -> (b, g)) + } in let not_opt x = `Not_opt x in let hash_inputs = @@ -1047,7 +1051,7 @@ struct ~g:(fun (b, g) -> List.map ~f:(fun x -> `Opt (b, x)) - (Inner_curve.to_field_elements g) ) + (Inner_curve.to_field_elements g)) in match Array.fold hash_inputs ~init:(`Not_opt sponge) ~f:(fun acc t -> @@ -1063,13 +1067,13 @@ struct | `Opt sponge, `Opt t -> Opt_sponge.absorb sponge t ; acc | `Opt _, `Not_opt _ -> - assert false ) + assert false) with | `Not_opt sponge -> (* This means there were no optional inputs. *) Sponge.squeeze_field sponge | `Opt sponge -> - Opt_sponge.squeeze_field sponge ) + Opt_sponge.squeeze_field sponge) let pack_scalar_challenge (Scalar_challenge c : Scalar_challenge.t) = Field.pack (Challenge.to_bits c) @@ -1087,18 +1091,19 @@ struct Types.Pairing_based.Proof_state.Per_proof.In_circuit.t) = let public_input = let fp (Shifted_value.Shifted_value x) = - [|Step_main_inputs.Unsafe.unpack_unboolean x|] + [| Step_main_inputs.Unsafe.unpack_unboolean x |] in with_label __LOC__ (fun () -> Spec.pack (module Impl) fp Types.Dlog_based.Statement.In_circuit.spec - (Types.Dlog_based.Statement.In_circuit.to_data statement) ) + (Types.Dlog_based.Statement.In_circuit.to_data statement)) in let sponge = Sponge.create sponge_params in let { Types.Pairing_based.Proof_state.Deferred_values.xi ; combined_inner_product - ; b } = + ; b + } = unfinalized.deferred_values in let ( sponge_digest_before_evaluations_actual @@ -1109,7 +1114,7 @@ struct in incrementally_verify_proof branching ~domain:wrap_domain ~xi ~verification_key:wrap_verification_key ~sponge ~public_input ~sg_old - ~combined_inner_product ~advice:{b} ~messages ~openings_proof:opening + ~combined_inner_product ~advice:{ b } ~messages ~openings_proof:opening ~plonk: ((* Actually no need to boolean constrain here (i.e. in Other_field.to_bits_unsafe) when unpacking because the scaling functions boolean constrain the bits. @@ -1132,7 +1137,7 @@ struct Field.if_ is_base_case ~then_:c1 ~else_:(match c2.prechallenge with Scalar_challenge c2 -> c2) in - Field.Assert.equal c1 c2 ) ) ; + Field.Assert.equal c1 c2)) ; bulletproof_success end diff --git a/src/lib/pickles/pickles.ml b/src/lib/pickles/pickles.ml index 76d385d31ad..63af2f2e85a 100644 --- a/src/lib/pickles/pickles.ml +++ b/src/lib/pickles/pickles.ml @@ -167,7 +167,8 @@ let pad_pass_throughs and type length = max_branching) (pass_throughs : local_max_branchings H1.T(Proof_.Me_only.Dlog_based).t) = let dummy_chals = Dummy.Ipa.Wrap.challenges in - let rec go : type len ms ns. + let rec go : + type len ms ns. ms H1.T(Nat).t -> ns H1.T(Proof_.Me_only.Dlog_based).t -> ms H1.T(Proof_.Me_only.Dlog_based).t = @@ -178,16 +179,16 @@ let pad_pass_throughs | [], [] -> [] | m :: maxes, [] -> - { sg= Lazy.force Dummy.Ipa.Step.sg - ; old_bulletproof_challenges= Vector.init m ~f:(fun _ -> dummy_chals) + { sg = Lazy.force Dummy.Ipa.Step.sg + ; old_bulletproof_challenges = Vector.init m ~f:(fun _ -> dummy_chals) } :: go maxes [] | m :: maxes, me_only :: me_onlys -> let me_only = { me_only with - old_bulletproof_challenges= - Vector.extend_exn me_only.old_bulletproof_challenges m - dummy_chals } + old_bulletproof_challenges = + Vector.extend_exn me_only.old_bulletproof_challenges m dummy_chals + } in me_only :: go maxes me_onlys in @@ -203,24 +204,26 @@ module Verification_key = struct let dummy : unit -> t = let header = - { Snark_keys_header.header_version= Snark_keys_header.header_version - ; kind= {type_= "verification key"; identifier= "dummy"} - ; constraint_constants= - { sub_windows_per_window= 0 - ; ledger_depth= 0 - ; work_delay= 0 - ; block_window_duration_ms= 0 - ; transaction_capacity= Log_2 0 - ; pending_coinbase_depth= 0 - ; coinbase_amount= Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor= 0 - ; account_creation_fee= Unsigned.UInt64.of_int 0 - ; fork= None } - ; commits= {mina= ""; marlin= ""} - ; length= 0 - ; commit_date= "" - ; constraint_system_hash= "" - ; identifying_hash= "" } + { Snark_keys_header.header_version = Snark_keys_header.header_version + ; kind = { type_ = "verification key"; identifier = "dummy" } + ; constraint_constants = + { sub_windows_per_window = 0 + ; ledger_depth = 0 + ; work_delay = 0 + ; block_window_duration_ms = 0 + ; transaction_capacity = Log_2 0 + ; pending_coinbase_depth = 0 + ; coinbase_amount = Unsigned.UInt64.of_int 0 + ; supercharged_coinbase_factor = 0 + ; account_creation_fee = Unsigned.UInt64.of_int 0 + ; fork = None + } + ; commits = { mina = ""; marlin = "" } + ; length = 0 + ; commit_date = "" + ; constraint_system_hash = "" + ; identifying_hash = "" + } in let t = lazy (dummy_id, header, Md5.digest_string "") in fun () -> Lazy.force t @@ -249,8 +252,9 @@ end module Prover = struct type ('prev_values, 'local_widths, 'local_heights, 'a_value, 'proof) t = - ?handler:( Snarky_backendless.Request.request - -> Snarky_backendless.Request.response) + ?handler: + ( Snarky_backendless.Request.request + -> Snarky_backendless.Request.response) -> ( 'prev_values , 'local_widths , 'local_heights ) @@ -270,8 +274,7 @@ module Proof_system = struct t = | T : ('a_var, 'a_value, 'max_branching, 'branches) Tag.t - * (module Proof_intf with type t = 'proof - and type statement = 'a_value) + * (module Proof_intf with type t = 'proof and type statement = 'a_value) * ( 'prev_valuess , 'widthss , 'heightss @@ -347,18 +350,17 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let sys = Backend.Tick.R1CS_constraint_system.create () in fun (c : Impls.Step.Constraint.t) -> let prev = sys.next_row in - List.iter c ~f:(fun {annotation; basic} -> + List.iter c ~f:(fun { annotation; basic } -> Backend.Tick.R1CS_constraint_system.add_constraint sys - ?label:annotation basic ) ; + ?label:annotation basic) ; let next = sys.next_row in next - prev in Constraints.log ~weight Impls.Step.( - make_checked (fun () -> - ( let x = with_label __LOC__ (fun () -> exists typ) in - main x () - : unit ) )) + make_checked (fun () : unit -> + let x = with_label __LOC__ (fun () -> exists typ) in + main x ())) in Snarky_log.to_file (sprintf "step-snark-%s-%d.json" name (Index.to_int index)) @@ -370,19 +372,18 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let sys = Backend.Tock.R1CS_constraint_system.create () in let weight (c : Impls.Wrap.Constraint.t) = let prev = sys.next_row in - List.iter c ~f:(fun {annotation; basic} -> + List.iter c ~f:(fun { annotation; basic } -> Backend.Tock.R1CS_constraint_system.add_constraint sys - ?label:annotation basic ) ; + ?label:annotation basic) ; let next = sys.next_row in next - prev in let log = Constraints.log ~weight Impls.Wrap.( - make_checked (fun () -> - ( let x = with_label __LOC__ (fun () -> exists typ) in - main x () - : unit ) )) + make_checked (fun () : unit -> + let x = with_label __LOC__ (fun () -> exists typ) in + main x ())) in log in @@ -392,19 +393,21 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct name (Type_equal.Id.uid id)) log - let compile - : type prev_varss prev_valuess widthss heightss max_branching branches. + let compile : + type prev_varss prev_valuess widthss heightss max_branching branches. self:(A.t, A_value.t, max_branching, branches) Tag.t -> cache:Key_cache.Spec.t list - -> ?disk_keys:(Cache.Step.Key.Verification.t, branches) Vector.t - * Cache.Wrap.Key.Verification.t + -> ?disk_keys: + (Cache.Step.Key.Verification.t, branches) Vector.t + * Cache.Wrap.Key.Verification.t -> branches:(module Nat.Intf with type n = branches) -> max_branching:(module Nat.Add.Intf with type n = max_branching) -> name:string -> constraint_constants:Snark_keys_header.Constraint_constants.t -> typ:(A.t, A_value.t) Impls.Step.Typ.t - -> choices:( self:(A.t, A_value.t, max_branching, branches) Tag.t - -> (prev_varss, prev_valuess, widthss, heightss) H4.T(IR).t) + -> choices: + ( self:(A.t, A_value.t, max_branching, branches) Tag.t + -> (prev_varss, prev_valuess, widthss, heightss) H4.T(IR).t) -> ( prev_valuess , widthss , heightss @@ -418,17 +421,20 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct ~max_branching:(module Max_branching) ~name ~constraint_constants ~typ ~choices -> let snark_keys_header kind constraint_system_hash = - { Snark_keys_header.header_version= Snark_keys_header.header_version + { Snark_keys_header.header_version = Snark_keys_header.header_version ; kind ; constraint_constants - ; commits= - {mina= Mina_version.commit_id; marlin= Mina_version.marlin_commit_id} - ; length= (* This is a dummy, it gets filled in on read/write. *) 0 - ; commit_date= Mina_version.commit_date + ; commits = + { mina = Mina_version.commit_id + ; marlin = Mina_version.marlin_commit_id + } + ; length = (* This is a dummy, it gets filled in on read/write. *) 0 + ; commit_date = Mina_version.commit_date ; constraint_system_hash - ; identifying_hash= + ; identifying_hash = (* TODO: Proper identifying hash. *) - constraint_system_hash } + constraint_system_hash + } in Timer.start __LOC__ ; let T = Max_branching.eq in @@ -440,12 +446,13 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct (module Max_branching) prev_varss_length choices ~self:self.id in - let full_signature = {Full_signature.padded; maxes= (module Maxes)} in + let full_signature = { Full_signature.padded; maxes = (module Maxes) } in Timer.clock __LOC__ ; let wrap_domains = let module M = Wrap_domains.Make (A) (A_value) in - let rec f : type a b c d. - (a, b, c, d) H4.T(IR).t -> (a, b, c, d) H4.T(M.I).t = function + let rec f : + type a b c d. (a, b, c, d) H4.T(IR).t -> (a, b, c, d) H4.T(M.I).t = + function | [] -> [] | x :: xs -> @@ -491,8 +498,8 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let module M = H4.Map (IR) (Branch_data) (struct - let f : type a b c d. - (a, b, c, d) IR.t -> (a, b, c, d) Branch_data.t = + let f : + type a b c d. (a, b, c, d) IR.t -> (a, b, c, d) Branch_data.t = fun rule -> Timer.clock __LOC__ ; let res = @@ -500,7 +507,7 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct Step_branch_data.create ~index:(Index.of_int_exn !i) ~max_branching:Max_branching.n ~branches:Branches.n ~self ~typ A.to_field_elements A_value.to_field_elements rule - ~wrap_domains ~branchings:step_widths ) + ~wrap_domains ~branchings:step_widths) in Timer.clock __LOC__ ; incr i ; res end) @@ -547,14 +554,15 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let open Impls.Step in let k_p = lazy - (let cs = constraint_system ~exposing:[typ] main in + (let cs = constraint_system ~exposing:[ typ ] main in let cs_hash = Md5.to_hex (R1CS_constraint_system.digest cs) in ( Type_equal.Id.uid self.id , snark_keys_header - { type_= "step-proving-key" - ; identifier= name ^ "-" ^ b.rule.identifier } + { type_ = "step-proving-key" + ; identifier = name ^ "-" ^ b.rule.identifier + } cs_hash , Index.to_int b.index , cs )) @@ -569,15 +577,16 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let digest = R1CS_constraint_system.digest cs in ( id , snark_keys_header - { type_= "step-verification-key" - ; identifier= name ^ "-" ^ b.rule.identifier } + { type_ = "step-verification-key" + ; identifier = name ^ "-" ^ b.rule.identifier + } (Md5.to_hex digest) , index , digest )) in let ((pk, vk) as res) = Common.time "step read or generate" (fun () -> - Cache.Step.read_or_generate cache k_p k_v typ main ) + Cache.Step.read_or_generate cache k_p k_v typ main) in accum_dirty (Lazy.map pk ~f:snd) ; accum_dirty (Lazy.map vk ~f:snd) ; @@ -591,7 +600,7 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let module V = H4.To_vector (Lazy_keys) in lazy (Vector.map (V.f prev_varss_length step_keypairs) ~f:(fun (_, vk) -> - Tick.Keypair.vk_commitments (fst (Lazy.force vk)) )) + Tick.Keypair.vk_commitments (fst (Lazy.force vk)))) in Timer.clock __LOC__ ; let wrap_requests, wrap_main = @@ -603,7 +612,8 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct (H4.T (E04 (Domains))) (struct - let f : type a b c d. + let f : + type a b c d. (a, b, c, d) IR.t -> (a, b, c, d) H4.T(E04(Domains)).t = fun rule -> let module M = @@ -618,7 +628,7 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct | `Compiled d -> d.wrap_domains | `Side_loaded _ -> - Common.wrap_domains ) + Common.wrap_domains) end) in M.f rule.Inductive_rule.prevs @@ -640,11 +650,11 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let self_id = Type_equal.Id.uid self.id in let disk_key_prover = lazy - (let cs = constraint_system ~exposing:[typ] main in + (let cs = constraint_system ~exposing:[ typ ] main in let cs_hash = Md5.to_hex (R1CS_constraint_system.digest cs) in ( self_id , snark_keys_header - {type_= "wrap-proving-key"; identifier= name} + { type_ = "wrap-proving-key"; identifier = name } cs_hash , cs )) in @@ -656,7 +666,7 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let digest = R1CS_constraint_system.digest cs in ( id , snark_keys_header - {type_= "wrap-verification-key"; identifier= name} + { type_ = "wrap-verification-key"; identifier = name } (Md5.to_hex digest) , digest )) | Some (_, (_id, header, digest)) -> @@ -666,7 +676,7 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct Common.time "wrap read or generate " (fun () -> Cache.Wrap.read_or_generate (Vector.to_array step_domains) - cache disk_key_prover disk_key_verifier typ main ) + cache disk_key_prover disk_key_verifier typ main) in (r, disk_key_verifier) in @@ -677,11 +687,13 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let module S = Step.Make (A) (A_value) (Max_branching) in let provers = let module Z = H4.Zip (Branch_data) (E04 (Impls.Step.Keypair)) in - let f : type prev_vars prev_values local_widths local_heights. + let f : + type prev_vars prev_values local_widths local_heights. (prev_vars, prev_values, local_widths, local_heights) Branch_data.t -> Lazy_keys.t - -> ?handler:( Snarky_backendless.Request.request - -> Snarky_backendless.Request.response) + -> ?handler: + ( Snarky_backendless.Request.request + -> Snarky_backendless.Request.response) -> ( prev_values , local_widths , local_heights ) @@ -708,10 +720,11 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct let f ((app_state, T proof) : _ Statement_with_proof.t) = P.T { proof with - statement= + statement = { proof.statement with - pass_through= - {proof.statement.pass_through with app_state} } + pass_through = + { proof.statement.pass_through with app_state } + } } end) in @@ -722,12 +735,14 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct in let proof = { proof with - statement= + statement = { proof.statement with - pass_through= + pass_through = pad_pass_throughs (module Maxes) - proof.statement.pass_through } } + proof.statement.pass_through + } + } in let%map.Async.Deferred proof = Wrap.wrap ~max_branching:Max_branching.n full_signature.maxes @@ -739,14 +754,17 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct in Proof.T { proof with - statement= + statement = { proof.statement with - pass_through= - {proof.statement.pass_through with app_state= ()} } } + pass_through = + { proof.statement.pass_through with app_state = () } + } + } in wrap in - let rec go : type xs1 xs2 xs3 xs4. + let rec go : + type xs1 xs2 xs3 xs4. (xs1, xs2, xs3, xs4) H4.T(Branch_data).t -> (xs1, xs2, xs3, xs4) H4.T(E04(Lazy_keys)).t -> ( xs2 @@ -766,16 +784,17 @@ module Make (A : Statement_var_intf) (A_value : Statement_value_intf) = struct in Timer.clock __LOC__ ; let data : _ Types_map.Compiled.t = - { branches= Branches.n - ; branchings= step_widths - ; max_branching= (module Max_branching) + { branches = Branches.n + ; branchings = step_widths + ; max_branching = (module Max_branching) ; typ - ; value_to_field_elements= A_value.to_field_elements - ; var_to_field_elements= A.to_field_elements - ; wrap_key= Lazy.map wrap_vk ~f:Verification_key.commitments - ; wrap_vk= Lazy.map wrap_vk ~f:Verification_key.index + ; value_to_field_elements = A_value.to_field_elements + ; var_to_field_elements = A.to_field_elements + ; wrap_key = Lazy.map wrap_vk ~f:Verification_key.commitments + ; wrap_vk = Lazy.map wrap_vk ~f:Verification_key.index ; wrap_domains - ; step_domains } + ; step_domains + } in Timer.clock __LOC__ ; Types_map.add_exn self data ; @@ -790,23 +809,24 @@ module Side_loaded = struct let of_compiled tag : t = let d = Types_map.lookup_compiled tag.Tag.id in - { wrap_vk= Some (Lazy.force d.wrap_vk) - ; wrap_index= + { wrap_vk = Some (Lazy.force d.wrap_vk) + ; wrap_index = Lazy.force d.wrap_key |> Plonk_verification_key_evals.map ~f:Array.to_list - ; max_width= Width.of_int_exn (Nat.to_int (Nat.Add.n d.max_branching)) - ; step_data= + ; max_width = Width.of_int_exn (Nat.to_int (Nat.Add.n d.max_branching)) + ; step_data = At_most.of_vector (Vector.map2 d.branchings d.step_domains ~f:(fun width ds -> - ({Domains.h= ds.h}, Width.of_int_exn width) )) - (Nat.lte_exn (Vector.length d.step_domains) Max_branches.n) } + ({ Domains.h = ds.h }, Width.of_int_exn width))) + (Nat.lte_exn (Vector.length d.step_domains) Max_branches.n) + } module Max_width = Width.Max end - let in_circuit tag vk = Types_map.set_ephemeral tag {index= `In_circuit vk} + let in_circuit tag vk = Types_map.set_ephemeral tag { index = `In_circuit vk } - let in_prover tag vk = Types_map.set_ephemeral tag {index= `In_prover vk} + let in_prover tag vk = Types_map.set_ephemeral tag { index = `In_prover vk } let create ~name ~max_branching ~value_to_field_elements ~var_to_field_elements ~typ = @@ -815,7 +835,8 @@ module Side_loaded = struct ; value_to_field_elements ; var_to_field_elements ; typ - ; branches= Verification_key.Max_branches.n } + ; branches = Verification_key.Max_branches.n + } module Proof = Proof.Branching_max @@ -826,52 +847,53 @@ module Side_loaded = struct type nonrec t = t let to_field_elements = value_to_field_elements - end - : Intf.Statement_value + end : Intf.Statement_value with type t = t ) in (* TODO: This should be the actual max width on a per proof basis *) let max_branching = - (module Verification_key.Max_width - : Nat.Intf + (module Verification_key.Max_width : Nat.Intf with type n = Verification_key.Max_width.n ) in - with_return (fun {return} -> + with_return (fun { return } -> List.map ts ~f:(fun (vk, x, p) -> let vk : V.t = - { commitments= + { commitments = Plonk_verification_key_evals.map ~f:Array.of_list vk.wrap_index - ; step_domains= + ; step_domains = Array.map (At_most.to_array vk.step_data) ~f:(fun (d, w) -> let input_size = Side_loaded_verification_key.( input_size ~of_int:Fn.id ~add:( + ) ~mul:( * ) (Width.to_int vk.max_width)) in - { Domains.x= + { Domains.x = Pow_2_roots_of_unity (Int.ceil_log2 input_size) - ; h= d.h } ) - ; index= + ; h = d.h + }) + ; index = ( match vk.wrap_vk with | None -> return (Async.return false) | Some x -> x ) - ; data= + ; data = (* This isn't used in verify_heterogeneous, so we can leave this dummy *) - {constraints= 0} } + { constraints = 0 } + } in - Verify.Instance.T (max_branching, m, vk, x, p) ) - |> Verify.verify_heterogenous ) + Verify.Instance.T (max_branching, m, vk, x, p)) + |> Verify.verify_heterogenous) end -let compile - : type a_var a_value prev_varss prev_valuess widthss heightss max_branching branches. +let compile : + type a_var a_value prev_varss prev_valuess widthss heightss max_branching branches. ?self:(a_var, a_value, max_branching, branches) Tag.t -> ?cache:Key_cache.Spec.t list - -> ?disk_keys:(Cache.Step.Key.Verification.t, branches) Vector.t - * Cache.Wrap.Key.Verification.t + -> ?disk_keys: + (Cache.Step.Key.Verification.t, branches) Vector.t + * Cache.Wrap.Key.Verification.t -> (module Statement_var_intf with type t = a_var) -> (module Statement_value_intf with type t = a_value) -> typ:(a_var, a_value) Impls.Step.Typ.t @@ -879,14 +901,15 @@ let compile -> max_branching:(module Nat.Add.Intf with type n = max_branching) -> name:string -> constraint_constants:Snark_keys_header.Constraint_constants.t - -> choices:( self:(a_var, a_value, max_branching, branches) Tag.t - -> ( prev_varss - , prev_valuess - , widthss - , heightss - , a_var - , a_value ) - H4_2.T(Inductive_rule).t) + -> choices: + ( self:(a_var, a_value, max_branching, branches) Tag.t + -> ( prev_varss + , prev_valuess + , widthss + , heightss + , a_var + , a_value ) + H4_2.T(Inductive_rule).t) -> (a_var, a_value, max_branching, branches) Tag.t * Cache_handle.t * (module Proof_intf @@ -903,12 +926,13 @@ let compile let self = match self with | None -> - {Tag.id= Type_equal.Id.create ~name sexp_of_opaque; kind= Compiled} + { Tag.id = Type_equal.Id.create ~name sexp_of_opaque; kind = Compiled } | Some self -> self in let module M = Make (A_var) (A_value) in - let rec conv_irs : type v1ss v2ss wss hss. + let rec conv_irs : + type v1ss v2ss wss hss. (v1ss, v2ss, wss, hss, a_var, a_value) H4_2.T(Inductive_rule).t -> (v1ss, v2ss, wss, hss) H4.T(M.IR).t = function | [] -> @@ -951,11 +975,11 @@ let%test_module "test no side-loaded" = ( module struct let () = Tock.Keypair.set_urs_info - [On_disk {directory= "/tmp/"; should_write= true}] + [ On_disk { directory = "/tmp/"; should_write = true } ] let () = Tick.Keypair.set_urs_info - [On_disk {directory= "/tmp/"; should_write= true}] + [ On_disk { directory = "/tmp/"; should_write = true } ] open Impls.Step @@ -964,19 +988,19 @@ let%test_module "test no side-loaded" = module Statement = struct type t = Field.t - let to_field_elements x = [|x|] + let to_field_elements x = [| x |] module Constant = struct type t = Field.Constant.t [@@deriving bin_io] - let to_field_elements x = [|x|] + let to_field_elements x = [| x |] end end module Blockchain_snark = struct module Statement = Statement - let tag, _, p, Provers.[step] = + let tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile (module Statement) @@ -987,31 +1011,34 @@ let%test_module "test no side-loaded" = ~name:"blockchain-snark" ~constraint_constants: (* Dummy values *) - { sub_windows_per_window= 0 - ; ledger_depth= 0 - ; work_delay= 0 - ; block_window_duration_ms= 0 - ; transaction_capacity= Log_2 0 - ; pending_coinbase_depth= 0 - ; coinbase_amount= Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor= 0 - ; account_creation_fee= Unsigned.UInt64.of_int 0 - ; fork= None } + { sub_windows_per_window = 0 + ; ledger_depth = 0 + ; work_delay = 0 + ; block_window_duration_ms = 0 + ; transaction_capacity = Log_2 0 + ; pending_coinbase_depth = 0 + ; coinbase_amount = Unsigned.UInt64.of_int 0 + ; supercharged_coinbase_factor = 0 + ; account_creation_fee = Unsigned.UInt64.of_int 0 + ; fork = None + } ~choices:(fun ~self -> - [ { identifier= "main" - ; prevs= [self; self] - ; main= - (fun [prev; _] self -> + [ { identifier = "main" + ; prevs = [ self; self ] + ; main = + (fun [ prev; _ ] self -> let is_base_case = Field.equal Field.zero self in let proof_must_verify = Boolean.not is_base_case in let self_correct = Field.(equal (one + prev) self) in - Boolean.Assert.any [self_correct; is_base_case] ; - [proof_must_verify; Boolean.false_] ) - ; main_value= + Boolean.Assert.any [ self_correct; is_base_case ] ; + [ proof_must_verify; Boolean.false_ ]) + ; main_value = (fun _ self -> let is_base_case = Field.Constant.(equal zero self) in let proof_must_verify = not is_base_case in - [proof_must_verify; false] ) } ] ) ) + [ proof_must_verify; false ]) + } + ])) module Proof = (val p) end @@ -1025,22 +1052,22 @@ let%test_module "test no side-loaded" = Common.time "b0" (fun () -> Async.Thread_safe.block_on_async_exn (fun () -> Blockchain_snark.step - [(s_neg_one, b_neg_one); (s_neg_one, b_neg_one)] - Field.Constant.zero ) ) + [ (s_neg_one, b_neg_one); (s_neg_one, b_neg_one) ] + Field.Constant.zero)) in let b1 = Common.time "b1" (fun () -> Async.Thread_safe.block_on_async_exn (fun () -> Blockchain_snark.step - [(Field.Constant.zero, b0); (Field.Constant.zero, b0)] - Field.Constant.one ) ) + [ (Field.Constant.zero, b0); (Field.Constant.zero, b0) ] + Field.Constant.one)) in - [(Field.Constant.zero, b0); (Field.Constant.one, b1)] + [ (Field.Constant.zero, b0); (Field.Constant.one, b1) ] let%test_unit "verify" = assert ( Async.Thread_safe.block_on_async_exn (fun () -> - Blockchain_snark.Proof.verify xs ) ) + Blockchain_snark.Proof.verify xs) ) end ) (* diff --git a/src/lib/pickles/pickles.mli b/src/lib/pickles/pickles.mli index b68e507ac35..669b14a927d 100644 --- a/src/lib/pickles/pickles.mli +++ b/src/lib/pickles/pickles.mli @@ -46,7 +46,7 @@ module Verification_key : sig val load : cache:Key_cache.Spec.t list -> Id.t - -> (t * [`Cache_hit | `Locally_generated]) Async.Deferred.Or_error.t + -> (t * [ `Cache_hit | `Locally_generated ]) Async.Deferred.Or_error.t end module type Proof_intf = sig @@ -94,8 +94,9 @@ val verify : module Prover : sig type ('prev_values, 'local_widths, 'local_heights, 'a_value, 'proof) t = - ?handler:( Snarky_backendless.Request.request - -> Snarky_backendless.Request.response) + ?handler: + ( Snarky_backendless.Request.request + -> Snarky_backendless.Request.response) -> ( 'prev_values , 'local_widths , 'local_heights ) @@ -107,7 +108,7 @@ end module Provers : module type of H3_2.T (Prover) module Dirty : sig - type t = [`Cache_hit | `Generated_something | `Locally_generated] + type t = [ `Cache_hit | `Generated_something | `Locally_generated ] val ( + ) : t -> t -> t end @@ -172,12 +173,12 @@ module Side_loaded : sig -> bool Async.Deferred.t (* Must be called in the inductive rule snarky function defining a - rule for which this tag is used as a predecessor. *) + rule for which this tag is used as a predecessor. *) val in_circuit : ('var, 'value, 'n1, 'n2) Tag.t -> Verification_key.Checked.t -> unit (* Must be called immediately before calling the prover for the inductive rule - for which this tag is used as a predecessor. *) + for which this tag is used as a predecessor. *) val in_prover : ('var, 'value, 'n1, 'n2) Tag.t -> Verification_key.t -> unit end @@ -187,8 +188,9 @@ end val compile : ?self:('a_var, 'a_value, 'max_branching, 'branches) Tag.t -> ?cache:Key_cache.Spec.t list - -> ?disk_keys:(Cache.Step.Key.Verification.t, 'branches) Vector.t - * Cache.Wrap.Key.Verification.t + -> ?disk_keys: + (Cache.Step.Key.Verification.t, 'branches) Vector.t + * Cache.Wrap.Key.Verification.t -> (module Statement_var_intf with type t = 'a_var) -> (module Statement_value_intf with type t = 'a_value) -> typ:('a_var, 'a_value) Impls.Step.Typ.t @@ -196,14 +198,15 @@ val compile : -> max_branching:(module Nat.Add.Intf with type n = 'max_branching) -> name:string -> constraint_constants:Snark_keys_header.Constraint_constants.t - -> choices:( self:('a_var, 'a_value, 'max_branching, 'branches) Tag.t - -> ( 'prev_varss - , 'prev_valuess - , 'widthss - , 'heightss - , 'a_var - , 'a_value ) - H4_2.T(Inductive_rule).t) + -> choices: + ( self:('a_var, 'a_value, 'max_branching, 'branches) Tag.t + -> ( 'prev_varss + , 'prev_valuess + , 'widthss + , 'heightss + , 'a_var + , 'a_value ) + H4_2.T(Inductive_rule).t) -> ('a_var, 'a_value, 'max_branching, 'branches) Tag.t * Cache_handle.t * (module Proof_intf diff --git a/src/lib/pickles/plonk_checks/plonk_checks.ml b/src/lib/pickles/plonk_checks/plonk_checks.ml index 3dc7a3030f3..d9ceb74a16a 100644 --- a/src/lib/pickles/plonk_checks/plonk_checks.ml +++ b/src/lib/pickles/plonk_checks/plonk_checks.ml @@ -5,15 +5,15 @@ open Tuple_lib module Domain = Domain type 'field vanishing_polynomial_domain = - < vanishing_polynomial: 'field -> 'field > + < vanishing_polynomial : 'field -> 'field > type 'field plonk_domain = - < vanishing_polynomial: 'field -> 'field - ; shifts: 'field Marlin_plonk_bindings.Types.Plonk_verification_shifts.t - ; generator: 'field - ; size: 'field > + < vanishing_polynomial : 'field -> 'field + ; shifts : 'field Marlin_plonk_bindings.Types.Plonk_verification_shifts.t + ; generator : 'field + ; size : 'field > -type 'field domain = < size: 'field ; vanishing_polynomial: 'field -> 'field > +type 'field domain = < size : 'field ; vanishing_polynomial : 'field -> 'field > let debug = false @@ -108,10 +108,10 @@ let derive_plonk (type t) ?(with_label = fun _ (f : unit -> t) -> f ()) (* x^5 *) square (square x) * x in - let {Marlin_plonk_bindings.Types.Plonk_verification_shifts.r; o} = + let { Marlin_plonk_bindings.Types.Plonk_verification_shifts.r; o } = domain#shifts in - fun ({alpha; beta; gamma; zeta} : _ Minimal.t) + fun ({ alpha; beta; gamma; zeta } : _ Minimal.t) ((e0, e1) : _ Dlog_plonk_types.Evals.t Double.t) p_eval0 -> let alphas = let arr = @@ -151,7 +151,7 @@ let derive_plonk (type t) ?(with_label = fun _ (f : unit -> t) -> f ()) * (e0.o + (bz * o) + gamma) * alpha * zkp + (alphas Range.perm 0 * vp_zeta / (zeta - one)) - + (alphas Range.perm 1 * vp_zeta / (zeta - w3)) ) + + (alphas Range.perm 1 * vp_zeta / (zeta - w3))) in let perm1 = let beta_sigma1 = with_label __LOC__ (fun () -> beta * e0.sigma1) in @@ -160,7 +160,7 @@ let derive_plonk (type t) ?(with_label = fun _ (f : unit -> t) -> f ()) with_label __LOC__ (fun () -> negate (e0.l + beta_sigma1 + gamma) * (e0.r + beta_sigma2 + gamma) - * (e1.z * beta_alpha * zkp) ) + * (e1.z * beta_alpha * zkp)) in (perm0, perm1) in @@ -169,14 +169,14 @@ let derive_plonk (type t) ?(with_label = fun _ (f : unit -> t) -> f ()) let gnrc_o = e0.o in let psdn0 = let lro = - let s = [|sbox e0.l; sbox e0.r; sbox e0.o|] in + let s = [| sbox e0.l; sbox e0.r; sbox e0.o |] in Array.map mds ~f:(fun m -> - Array.reduce_exn ~f:F.( + ) (Array.map2_exn s m ~f:F.( * )) ) + Array.reduce_exn ~f:F.( + ) (Array.map2_exn s m ~f:F.( * ))) in with_label __LOC__ (fun () -> - Array.mapi [|e1.l; e1.r; e1.o|] ~f:(fun i e -> - (lro.(i) - e) * alphas Range.psdn i ) - |> Array.reduce_exn ~f:( + ) ) + Array.mapi [| e1.l; e1.r; e1.o |] ~f:(fun i e -> + (lro.(i) - e) * alphas Range.psdn i) + |> Array.reduce_exn ~f:( + )) in let ecad0 = with_label __LOC__ (fun () -> @@ -184,21 +184,21 @@ let derive_plonk (type t) ?(with_label = fun _ (f : unit -> t) -> f ()) * alphas Range.add 0 + ( ((e1.l + e1.r + e1.o) * (e1.l - e1.o) * (e1.l - e1.o)) - ((e0.o + e0.l) * (e0.o + e0.l)) ) - * alphas Range.add 1 ) + * alphas Range.add 1) in let vbmul0, vbmul1 = let tmp = double e0.l - square e0.r + e1.r in ( with_label __LOC__ (fun () -> ((square e0.r - e0.r) * alphas Range.mul 0) + (((e1.l - e0.l) * e1.r) - e1.o + (e0.o * (double e0.r - one))) - * alphas Range.mul 1 ) + * alphas Range.mul 1) , with_label __LOC__ (fun () -> ( square (double e0.o - (tmp * e0.r)) - ((square e0.r - e1.r + e1.l) * square tmp) ) * alphas Range.mul 2 + ( ((e0.l - e1.l) * (double e0.o - (tmp * e0.r))) - ((e1.o + e0.o) * tmp) ) - * alphas Range.mul 3 ) ) + * alphas Range.mul 3) ) in let endomul0, endomul1, endomul2 = let xr = square e0.r - e0.l - e1.r in @@ -208,15 +208,14 @@ let derive_plonk (type t) ?(with_label = fun _ (f : unit -> t) -> f ()) ((square e0.l - e0.l) * alphas Range.endml 0) + ((square e1.l - e1.l) * alphas Range.endml 1) + (e1.r - ((one + (e0.l * (endo - one))) * e0.r)) - * alphas Range.endml 2 ) + * alphas Range.endml 2) , with_label __LOC__ (fun () -> (((e1.l - e0.r) * e1.r) - e1.o + (e0.o * (double e0.l - one))) - * alphas Range.endml 3 ) + * alphas Range.endml 3) , with_label __LOC__ (fun () -> - (square u - (square t * (xr + e0.l + e1.l))) - * alphas Range.endml 4 + ((square u - (square t * (xr + e0.l + e1.l))) * alphas Range.endml 4) + (((e0.l - e1.l) * u) - (t * (e0.o + e1.o))) - * alphas Range.endml 5 ) ) + * alphas Range.endml 5) ) in let linearization_check = let w = w3 in @@ -247,7 +246,8 @@ let derive_plonk (type t) ?(with_label = fun _ (f : unit -> t) -> f ()) ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 } + ; endomul2 + } , linearization_check ) let checked (type t) @@ -257,10 +257,11 @@ let checked (type t) derive_plonk ~with_label:Impl.with_label (module Impl.Field) ~endo ~mds ~domain ~shift - { alpha= plonk.alpha - ; beta= plonk.beta - ; gamma= plonk.gamma - ; zeta= plonk.zeta } + { alpha = plonk.alpha + ; beta = plonk.beta + ; gamma = plonk.gamma + ; zeta = plonk.zeta + } evals p0 in let open Impl in @@ -280,5 +281,6 @@ let checked (type t) ; vbmul1 ; endomul0 ; endomul1 - ; endomul2 ] - |> Boolean.all ) + ; endomul2 + ] + |> Boolean.all) diff --git a/src/lib/pickles/plonk_curve_ops.ml b/src/lib/pickles/plonk_curve_ops.ml index 05172464f71..d5776a8fd1e 100644 --- a/src/lib/pickles/plonk_curve_ops.ml +++ b/src/lib/pickles/plonk_curve_ops.ml @@ -5,6 +5,7 @@ module Make (G : Intf.Group(Impl).S with type t = Impl.Field.t * Impl.Field.t) = struct open Zexe_backend_common.Plonk_constraint_system.Plonk_constraint + open Impl let seal = Tuple_lib.Double.map ~f:(Util.seal (module Impl)) @@ -15,20 +16,21 @@ struct let p3 = exists G.typ_unchecked ~compute: - As_prover.( - fun () -> G.Constant.( + ) (read G.typ p1) (read G.typ p2)) + As_prover.(fun () -> G.Constant.( + ) (read G.typ p1) (read G.typ p2)) in assert_ - [ { annotation= Some __LOC__ - ; basic= + [ { annotation = Some __LOC__ + ; basic = Zexe_backend_common.Plonk_constraint_system.Plonk_constraint.T - (EC_add {p1; p2; p3}) } ] ; + (EC_add { p1; p2; p3 }) + } + ] ; p3 let scale_fast t (`Plus_two_to_len scalar) = let ((xt, yt) as t) = with_label __LOC__ (fun () -> - Tuple_lib.Double.map ~f:(Util.seal (module Impl)) t ) + Tuple_lib.Double.map ~f:(Util.seal (module Impl)) t) in let module S = Zexe_backend_common.Scale_round in let rec go rows p i = @@ -58,7 +60,7 @@ struct G.Constant.(p + (p + tl'))) in let open Field.Constant in - let row = {S.xt; yt; b= (b :> Field.t); xp; yp; l1; xs; ys} in + let row = { S.xt; yt; b = (b :> Field.t); xp; yp; l1; xs; ys } in go (row :: rows) s Int.(i - 1) in let n = Array.length scalar in @@ -70,11 +72,12 @@ struct else let state = go [] p Int.(n - 2) in assert_ - [ { annotation= Some __LOC__ - ; basic= - Zexe_backend_common.Plonk_constraint_system.Plonk_constraint - .T - (EC_scale {state}) } ] ; + [ { annotation = Some __LOC__ + ; basic = + Zexe_backend_common.Plonk_constraint_system.Plonk_constraint.T + (EC_scale { state }) + } + ] ; let fin = state.(Int.(n - 2)) in (fin.xs, fin.ys) in @@ -106,14 +109,16 @@ struct G.typ (fun (g, s) -> make_checked (fun () -> - scale_fast g (`Plus_two_to_len (Array.of_list s)) ) ) + scale_fast g (`Plus_two_to_len (Array.of_list s)))) (fun (g, s) -> let open G.Constant.Scalar in - let shift = project (List.init n ~f:(fun _ -> false) @ [true]) in + let shift = + project (List.init n ~f:(fun _ -> false) @ [ true ]) + in let x = project s + shift in - G.Constant.scale g x ) + G.Constant.scale g x) (random_point, xs) with e -> Core.eprintf !"Input %{sexp: bool list}\n%!" xs ; - raise e ) + raise e) end diff --git a/src/lib/pickles/precomputed/gen_values/gen_values.ml b/src/lib/pickles/precomputed/gen_values/gen_values.ml index 88acbcd845b..690b522fedd 100644 --- a/src/lib/pickles/precomputed/gen_values/gen_values.ml +++ b/src/lib/pickles/precomputed/gen_values/gen_values.ml @@ -36,9 +36,9 @@ let vesta = ksprintf time "vesta %d" i (fun () -> Marlin_plonk_bindings.Pasta_fp_urs.lagrange_commitment (Vesta_based_plonk.Keypair.load_urs ()) - ~domain_size i ) + ~domain_size i) |> Zexe_backend.Pasta.Fp_poly_comm.of_backend_without_degree_bound - |> unwrap ) ) + |> unwrap)) let pallas = let max_domain_log2 = Nat.to_int Pallas_based_plonk.Rounds.n in @@ -51,9 +51,9 @@ let pallas = ksprintf time "pallas %d" i (fun () -> Marlin_plonk_bindings.Pasta_fq_urs.lagrange_commitment (Pallas_based_plonk.Keypair.load_urs ()) - ~domain_size i ) + ~domain_size i) |> Zexe_backend.Pasta.Fq_poly_comm.of_backend_without_degree_bound - |> unwrap ) ) + |> unwrap)) let mk xss ~f = let module E = Ppxlib.Ast_builder.Make (struct @@ -62,7 +62,7 @@ let mk xss ~f = let open E in pexp_array (List.map xss ~f:(fun xs -> - pexp_array (List.map xs ~f:(fun g -> pexp_array (List.map g ~f))) )) + pexp_array (List.map xs ~f:(fun g -> pexp_array (List.map g ~f))))) let structure = let loc = Ppxlib.Location.none in @@ -86,11 +86,11 @@ let structure = let vesta = let f s = Fq.of_bigint (Bigint256.of_hex_string s) in - [%e mk vesta ~f:(fun (x, y) -> pexp_tuple [fq x; fq y])] + [%e mk vesta ~f:(fun (x, y) -> pexp_tuple [ fq x; fq y ])] let pallas = let f s = Fp.of_bigint (Bigint256.of_hex_string s) in - [%e mk pallas ~f:(fun (x, y) -> pexp_tuple [fp x; fp y])] + [%e mk pallas ~f:(fun (x, y) -> pexp_tuple [ fp x; fp y ])] end] let () = diff --git a/src/lib/pickles/proof.ml b/src/lib/pickles/proof.ml index ffd8eae1c1a..fc8427b489c 100644 --- a/src/lib/pickles/proof.ml +++ b/src/lib/pickles/proof.ml @@ -16,14 +16,15 @@ module Base = struct , 'dlog_me_onlys , 'prev_evals ) t = - { statement: + { statement : ( 'unfinalized_proofs , ('s, 'sgs, 'bp_chals) Me_only.Pairing_based.t , 'dlog_me_onlys ) Types.Pairing_based.Statement.t - ; index: Index.t - ; prev_evals: 'prev_evals - ; proof: Tick.Proof.t } + ; index : Index.t + ; prev_evals : 'prev_evals + ; proof : Tick.Proof.t + } end module Double = struct @@ -42,7 +43,7 @@ module Base = struct module V1 = struct type ('dlog_me_only, 'pairing_me_only) t = - { statement: + { statement : ( Limb_vector.Constant.Hex64.Stable.V1.t Vector.Vector_2.Stable.V1.t , Limb_vector.Constant.Hex64.Stable.V1.t @@ -60,19 +61,20 @@ module Base = struct Step_bp_vec.Stable.V1.t , Index.Stable.V1.t ) Types.Dlog_based.Statement.Minimal.Stable.V1.t - ; prev_evals: + ; prev_evals : Tick.Field.Stable.V1.t Dlog_plonk_types.Pc_array.Stable.V1.t Dlog_plonk_types.Evals.Stable.V1.t Double.Stable.V1.t - ; prev_x_hat: Tick.Field.Stable.V1.t Double.Stable.V1.t - ; proof: Tock.Proof.Stable.V1.t } + ; prev_x_hat : Tick.Field.Stable.V1.t Double.Stable.V1.t + ; proof : Tock.Proof.Stable.V1.t + } [@@deriving compare, sexp, yojson, hash, equal] end end] type ('dlog_me_only, 'pairing_me_only) t = ('dlog_me_only, 'pairing_me_only) Stable.Latest.t = - { statement: + { statement : ( Challenge.Constant.t , Challenge.Constant.t Scalar_challenge.t , Tick.Field.t Shifted_value.t @@ -84,11 +86,12 @@ module Base = struct Step_bp_vec.t , Index.t ) Types.Dlog_based.Statement.Minimal.t - ; prev_evals: + ; prev_evals : Tick.Field.t Dlog_plonk_types.Pc_array.t Dlog_plonk_types.Evals.t Double.t - ; prev_x_hat: Tick.Field.t Double.t - ; proof: Tock.Proof.t } + ; prev_x_hat : Tick.Field.t Double.t + ; proof : Tock.Proof.t + } [@@deriving compare, sexp, yojson, hash, equal] end end @@ -124,60 +127,72 @@ let dummy (type w h r) (_w : w Nat.t) (h : h Nat.t) Commitment_lengths.of_domains wrap_domains ~max_degree:Max_degree.wrap in T - { statement= - { proof_state= - { deferred_values= - { xi= scalar_chal () - ; combined_inner_product= Shifted_value (tick ()) - ; b= Shifted_value (tick ()) - ; which_branch= Option.value_exn (Index.of_int 0) - ; bulletproof_challenges= Dummy.Ipa.Step.challenges - ; plonk= - { alpha= scalar_chal () - ; beta= chal () - ; gamma= chal () - ; zeta= scalar_chal () } } - ; sponge_digest_before_evaluations= + { statement = + { proof_state = + { deferred_values = + { xi = scalar_chal () + ; combined_inner_product = Shifted_value (tick ()) + ; b = Shifted_value (tick ()) + ; which_branch = Option.value_exn (Index.of_int 0) + ; bulletproof_challenges = Dummy.Ipa.Step.challenges + ; plonk = + { alpha = scalar_chal () + ; beta = chal () + ; gamma = chal () + ; zeta = scalar_chal () + } + } + ; sponge_digest_before_evaluations = Digest.Constant.of_tock_field Tock.Field.zero - ; me_only= - { sg= Lazy.force Dummy.Ipa.Step.sg - ; old_bulletproof_challenges= - Vector.init h ~f:(fun _ -> Dummy.Ipa.Wrap.challenges) } } - ; pass_through= - { app_state= () - ; old_bulletproof_challenges= + ; me_only = + { sg = Lazy.force Dummy.Ipa.Step.sg + ; old_bulletproof_challenges = + Vector.init h ~f:(fun _ -> Dummy.Ipa.Wrap.challenges) + } + } + ; pass_through = + { app_state = () + ; old_bulletproof_challenges = (* Not sure if this should be w or h honestly ...*) Vector.init most_recent_width ~f:(fun _ -> - Dummy.Ipa.Step.challenges ) + Dummy.Ipa.Step.challenges) (* TODO: Should this be wrap? *) - ; sg= + ; sg = Vector.init most_recent_width ~f:(fun _ -> - Lazy.force Dummy.Ipa.Wrap.sg ) } } - ; proof= - { messages= - { l_comm= g lengths.l - ; r_comm= g lengths.r - ; o_comm= g lengths.o - ; z_comm= g lengths.z - ; t_comm= - { unshifted= + Lazy.force Dummy.Ipa.Wrap.sg) + } + } + ; proof = + { messages = + { l_comm = g lengths.l + ; r_comm = g lengths.r + ; o_comm = g lengths.o + ; z_comm = g lengths.z + ; t_comm = + { unshifted = Array.map (g lengths.t) ~f:(fun x -> Or_infinity.Finite x) - ; shifted= Finite g0 } } - ; openings= - { proof= - { lr= + ; shifted = Finite g0 + } + } + ; openings = + { proof = + { lr = Array.init (Nat.to_int Tock.Rounds.n) ~f:(fun _ -> (g0, g0)) - ; z_1= Ro.tock () - ; z_2= Ro.tock () - ; delta= g0 - ; sg= g0 } - ; evals= + ; z_1 = Ro.tock () + ; z_2 = Ro.tock () + ; delta = g0 + ; sg = g0 + } + ; evals = (let e () = Dlog_plonk_types.Evals.map lengths ~f:tock in - (e (), e ())) } } - ; prev_evals= + (e (), e ())) + } + } + ; prev_evals = (let e () = Dlog_plonk_types.Evals.map lengths ~f:tick_arr in (e (), e ())) - ; prev_x_hat= (tick (), tick ()) } + ; prev_x_hat = (tick (), tick ()) + } module Make (W : Nat.Intf) (MLMB : Nat.Intf) = struct module Max_branching_at_most = At_most.With_length (W) @@ -203,14 +218,16 @@ module Make (W : Nat.Intf) (MLMB : Nat.Intf) = struct let to_repr (T t) : Repr.t = let lte = Nat.lte_exn (Vector.length t.statement.pass_through.sg) W.n in { t with - statement= + statement = { t.statement with - pass_through= + pass_through = { t.statement.pass_through with - sg= At_most.of_vector t.statement.pass_through.sg lte - ; old_bulletproof_challenges= + sg = At_most.of_vector t.statement.pass_through.sg lte + ; old_bulletproof_challenges = At_most.of_vector - t.statement.pass_through.old_bulletproof_challenges lte } } + t.statement.pass_through.old_bulletproof_challenges lte + } + } } let of_repr (r : Repr.t) : t = @@ -223,10 +240,11 @@ module Make (W : Nat.Intf) (MLMB : Nat.Intf) = struct in T { r with - statement= + statement = { r.statement with - pass_through= - {r.statement.pass_through with sg; old_bulletproof_challenges} } + pass_through = + { r.statement.pass_through with sg; old_bulletproof_challenges } + } } let compare t1 t2 = Repr.compare (to_repr t1) (to_repr t2) @@ -268,8 +286,7 @@ module Branching_2 = struct Dlog_based.Proof_state.Me_only.Stable.V1.t , ( unit , Tock.Curve.Affine.t At_most.At_most_2.Stable.V1.t - , Limb_vector.Constant.Hex64.Stable.V1.t - Vector.Vector_2.Stable.V1.t + , Limb_vector.Constant.Hex64.Stable.V1.t Vector.Vector_2.Stable.V1.t Scalar_challenge.Stable.V1.t Bulletproof_challenge.Stable.V1.t Step_bp_vec.Stable.V1.t @@ -338,8 +355,7 @@ module Branching_max = struct , ( unit , Tock.Curve.Affine.t Side_loaded_verification_key.Width.Max_at_most.Stable.V1.t - , Limb_vector.Constant.Hex64.Stable.V1.t - Vector.Vector_2.Stable.V1.t + , Limb_vector.Constant.Hex64.Stable.V1.t Vector.Vector_2.Stable.V1.t Scalar_challenge.Stable.V1.t Bulletproof_challenge.Stable.V1.t Step_bp_vec.Stable.V1.t diff --git a/src/lib/pickles/pseudo/pseudo.ml b/src/lib/pickles/pseudo/pseudo.ml index 23aa694911f..3ec060a5da6 100644 --- a/src/lib/pickles/pseudo/pseudo.ml +++ b/src/lib/pickles/pseudo/pseudo.ml @@ -32,7 +32,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.Run) = struct let pow = Field.(Pcs_batch.pow ~one ~mul) in choose t ~f:(fun deg -> let d = deg mod crs_max_degree in - pow x (crs_max_degree - d) ) + pow x (crs_max_degree - d)) end module Domain = struct @@ -40,7 +40,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.Run) = struct let shifts = Vector.map log2s ~f:(fun d -> shifts ~log2_size:d) in let open Marlin_plonk_bindings.Types.Plonk_verification_shifts in let mk f = mask which (Vector.map shifts ~f) in - {r= mk (fun {r; _} -> r); o= mk (fun {o; _} -> o)} + { r = mk (fun { r; _ } -> r); o = mk (fun { o; _ } -> o) } let generator (type n) ((which, log2s) : (int, n) t) ~domain_generator = mask which (Vector.map log2s ~f:(fun d -> domain_generator ~log2_size:d)) @@ -57,7 +57,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.Run) = struct let max_log2 = let _, ds = t in List.fold (Vector.to_list ds) ~init:0 ~f:(fun acc d -> - Int.max acc (Domain.log2_size d) ) + Int.max acc (Domain.log2_size d)) in object method size = size diff --git a/src/lib/pickles/reduced_me_only.ml b/src/lib/pickles/reduced_me_only.ml index 405a51d3f2b..511b9c87171 100644 --- a/src/lib/pickles/reduced_me_only.ml +++ b/src/lib/pickles/reduced_me_only.ml @@ -14,17 +14,18 @@ module Pairing_based = struct module Stable = struct module V1 = struct type ('s, 'sgs, 'bpcs) t = - {app_state: 's; sg: 'sgs; old_bulletproof_challenges: 'bpcs} + { app_state : 's; sg : 'sgs; old_bulletproof_challenges : 'bpcs } [@@deriving sexp, yojson, sexp, compare, hash, equal] end end] - let prepare ~dlog_plonk_index {app_state; sg; old_bulletproof_challenges} = + let prepare ~dlog_plonk_index { app_state; sg; old_bulletproof_challenges } = { Pairing_based.Proof_state.Me_only.app_state ; sg ; dlog_plonk_index - ; old_bulletproof_challenges= - Vector.map ~f:Ipa.Step.compute_challenges old_bulletproof_challenges } + ; old_bulletproof_challenges = + Vector.map ~f:Ipa.Step.compute_challenges old_bulletproof_challenges + } end module Dlog_based = struct @@ -73,8 +74,9 @@ module Dlog_based = struct Dlog_based.Proof_state.Me_only.t end - let prepare ({sg; old_bulletproof_challenges} : _ t) = + let prepare ({ sg; old_bulletproof_challenges } : _ t) = { Dlog_based.Proof_state.Me_only.sg - ; old_bulletproof_challenges= - Vector.map ~f:Ipa.Wrap.compute_challenges old_bulletproof_challenges } + ; old_bulletproof_challenges = + Vector.map ~f:Ipa.Wrap.compute_challenges old_bulletproof_challenges + } end diff --git a/src/lib/pickles/requests.ml b/src/lib/pickles/requests.ml index a0c218b91e2..761b460260e 100644 --- a/src/lib/pickles/requests.ml +++ b/src/lib/pickles/requests.ml @@ -31,8 +31,7 @@ module Wrap = struct ( ( ( Challenge.Constant.t , Challenge.Constant.t Scalar_challenge.t , Field.Constant.t Shifted_value.t - , ( Challenge.Constant.t Scalar_challenge.t - Bulletproof_challenge.t + , ( Challenge.Constant.t Scalar_challenge.t Bulletproof_challenge.t , Tock.Rounds.n ) Vector.t , Digest.Constant.t @@ -136,8 +135,8 @@ module Step = struct | App_state : statement t end - let create - : type local_signature local_branches statement prev_values max_branching. + let create : + type local_signature local_branches statement prev_values max_branching. unit -> (module S with type local_signature = local_signature diff --git a/src/lib/pickles/scalar_challenge.ml b/src/lib/pickles/scalar_challenge.ml index d478819f8c7..03ba7d6e320 100644 --- a/src/lib/pickles/scalar_challenge.ml +++ b/src/lib/pickles/scalar_challenge.ml @@ -24,8 +24,8 @@ let to_field_checked (type f) let zero = F.Constant.zero in for i = (128 / 2) - 1 downto 0 do (* s = -1 + 2 * r_2i - a_next = - if r_2i1 + a_next = + if r_2i1 then 2 a_prev + s else 2 a_prev = @@ -58,9 +58,9 @@ let to_field_checked (type f) <-> two_b_prev_minus_b_next = 2 b_prev - b_next && - 0 + 0 = two_b_prev_minus_b_next - 1 + 2 r_2i + r_2i1 - 2 r_2i1 r_2i - = 2 r_2i + r_2i1 - 2 r_2i1 r_2i + two_b_prev_minus_b_next + -1 + = 2 r_2i + r_2i1 - 2 r_2i1 r_2i + two_b_prev_minus_b_next + -1 *) let open Impl in let a_next = @@ -102,8 +102,10 @@ let to_field_checked (type f) in let open Zexe_backend_common.Plonk_constraint_system.Plonk_constraint in let p l r o m c = - [ { Snarky_backendless.Constraint.annotation= None - ; basic= T (Basic {l; r; o; m; c}) } ] + [ { Snarky_backendless.Constraint.annotation = None + ; basic = T (Basic { l; r; o; m; c }) + } + ] in let two = F.Constant.of_int 2 in let r_2i = (bits.(2 * i) :> F.t) in @@ -122,15 +124,16 @@ let to_field_checked (type f) (* 2 r_2i + r_2i1 - 2 r_2i1 r_2i + two_b_prev_minus_b_next + -1 *) ; p (two, r_2i) (one, r_2i1) (one, two_b_prev_minus_b_next) - (F.Constant.negate two) neg_one ] ; + (F.Constant.negate two) neg_one + ] ; a := a_next ; b := b_next done ; F.(scale !a endo + !b) let to_field_constant (type f) ~endo - (module F : Plonk_checks.Field_intf with type t = f) - (SC.Scalar_challenge c) = + (module F : Plonk_checks.Field_intf with type t = f) (SC.Scalar_challenge c) + = let bits = Array.of_list (Challenge.Constant.to_bits c) in let a = ref (F.of_int 2) in let b = ref (F.of_int 2) in @@ -162,25 +165,24 @@ let test (type f) Field.typ (fun s -> make_checked (fun () -> - to_field_checked (module Impl) ~endo (SC.Scalar_challenge s) ) - ) + to_field_checked (module Impl) ~endo (SC.Scalar_challenge s))) (fun s -> to_field_constant (module Field.Constant) ~endo - (Scalar_challenge (Challenge.Constant.of_bits s)) ) + (Scalar_challenge (Challenge.Constant.of_bits s))) xs with e -> Core.eprintf !"Input %{sexp: bool list}\n%!" xs ; - raise e ) + raise e) module Make (Impl : Snarky_backendless.Snark_intf.Run with type prover_state = unit) (G : Intf.Group(Impl).S with type t = Impl.Field.t * Impl.Field.t) (Challenge : Challenge.S with module Impl := Impl) (Endo : sig - val base : Impl.Field.Constant.t + val base : Impl.Field.Constant.t - val scalar : G.Constant.Scalar.t + val scalar : G.Constant.Scalar.t end) = struct open Impl @@ -250,26 +252,29 @@ struct (!yp - ((!b2il + !b2il - one) * !yt)) / (!xp - !xq)) in let row = - { Zexe_backend_common.Endoscale_round.b2i1= b2i1l + { Zexe_backend_common.Endoscale_round.b2i1 = b2i1l ; xt - ; b2i= b2il + ; b2i = b2il ; xq ; yt ; xp ; l1 ; yp ; xs - ; ys } + ; ys + } in go (row :: rows) s (i - 1) in let p = G.double (G.( + ) (Field.scale xt endo, yt) (xt, yt)) in let state = go [] p Int.(n - 1) in assert_ - [ { basic= + [ { basic = Zexe_backend_common.Plonk_constraint_system.Plonk_constraint.T - (EC_endoscale {state}) - ; annotation= None } ] ; + (EC_endoscale { state }) + ; annotation = None + } + ] ; let finish = state.(Int.(n - 1)) in (finish.xs, finish.ys) @@ -293,17 +298,17 @@ struct (Typ.tuple2 G.typ (Typ.list ~length:n Boolean.typ)) G.typ (fun (g, s) -> - make_checked (fun () -> endo g (SC.Scalar_challenge s)) ) + make_checked (fun () -> endo g (SC.Scalar_challenge s))) (fun (g, s) -> let x = Constant.to_field (Scalar_challenge (Challenge.Constant.of_bits s)) in - G.Constant.scale g x ) + G.Constant.scale g x) (random_point, xs) with e -> Core.eprintf !"Input %{sexp: bool list}\n%!" xs ; - raise e ) + raise e) let endo_inv ((gx, gy) as g) chal = let res = diff --git a/src/lib/pickles/side_loaded_verification_key.ml b/src/lib/pickles/side_loaded_verification_key.ml index 29aa64692f5..910819e2646 100644 --- a/src/lib/pickles/side_loaded_verification_key.ml +++ b/src/lib/pickles/side_loaded_verification_key.ml @@ -7,8 +7,8 @@ module V = Pickles_base.Side_loaded_verification_key include ( V : module type of V - with module Width := V.Width - and module Domains := V.Domains ) + with module Width := V.Width + and module Domains := V.Domains ) let bits = V.bits @@ -19,7 +19,7 @@ let input_size ~of_int ~add ~mul w = let (T (typ, conv)) = Impls.Step.input ~branching:a ~wrap_rounds:Backend.Tock.Rounds.n in - Impls.Step.Data_spec.size [typ] + Impls.Step.Data_spec.size [ typ ] in let f0 = size Nat.N0.n in let slope = size Nat.N1.n - f0 in @@ -87,11 +87,11 @@ end = struct (Vector.typ Boolean.typ Length.n) ~there:(fun x -> let x = to_int x in - Vector.init Length.n ~f:(fun i -> (x lsr i) land 1 = 1) ) + Vector.init Length.n ~f:(fun i -> (x lsr i) land 1 = 1)) ~back:(fun v -> Vector.foldi v ~init:0 ~f:(fun i acc b -> - if b then acc lor (1 lsl i) else acc ) - |> of_int_exn ) + if b then acc lor (1 lsl i) else acc) + |> of_int_exn) end module Domain = struct @@ -108,18 +108,18 @@ module Domains = struct let dom = Typ.transport Typ.field ~there:(fun (Plonk_checks.Domain.Pow_2_roots_of_unity n) -> - Field.Constant.of_int n ) + Field.Constant.of_int n) ~back:(fun _ -> assert false) |> Typ.transport_var ~there:(fun (Domain.Pow_2_roots_of_unity n) -> n) ~back:(fun n -> Domain.Pow_2_roots_of_unity n) in - Typ.of_hlistable [dom] ~var_to_hlist:to_hlist ~value_to_hlist:to_hlist + Typ.of_hlistable [ dom ] ~var_to_hlist:to_hlist ~value_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_of_hlist:of_hlist end (* TODO: Probably better to have these match the step rounds. *) -let max_domains = {Domains.h= Domain.Pow_2_roots_of_unity 20} +let max_domains = { Domains.h = Domain.Pow_2_roots_of_unity 20 } let max_domains_with_x = let conv (Domain.Pow_2_roots_of_unity n) = @@ -131,10 +131,10 @@ let max_domains_with_x = (input_size ~of_int:Fn.id ~add:( + ) ~mul:( * ) (Nat.to_int Width.Max.n))) in - {Ds.h= conv max_domains.h; x} + { Ds.h = conv max_domains.h; x } module Vk = struct - type t = Impls.Wrap.Verification_key.t [@sexp.opaque] [@@deriving sexp] + type t = (Impls.Wrap.Verification_key.t[@sexp.opaque]) [@@deriving sexp] let to_yojson _ = `String "opaque" @@ -177,99 +177,107 @@ module Stable = struct include Vk let of_repr - {Repr.Stable.V1.step_data; max_width; wrap_index= c} : + { Repr.Stable.V1.step_data; max_width; wrap_index = c } : Impls.Wrap.Verification_key.t = let d = Common.wrap_domains.h in let log2_size = Import.Domain.log2_size d in let max_quot_size = Common.max_quot_size_int (Import.Domain.size d) in - { domain= - { log_size_of_group= log2_size - ; group_gen= - Backend.Tock.Field.domain_generator log2_size } - ; max_poly_size= 1 lsl Nat.to_int Backend.Tock.Rounds.n + { domain = + { log_size_of_group = log2_size + ; group_gen = + Backend.Tock.Field.domain_generator log2_size + } + ; max_poly_size = 1 lsl Nat.to_int Backend.Tock.Rounds.n ; max_quot_size - ; urs= Backend.Tock.Keypair.load_urs () - ; evals= + ; urs = Backend.Tock.Keypair.load_urs () + ; evals = Plonk_verification_key_evals.map c ~f:(fun unshifted -> - { Marlin_plonk_bindings.Types.Poly_comm.shifted= + { Marlin_plonk_bindings.Types.Poly_comm.shifted = None - ; unshifted= + ; unshifted = Array.of_list_map unshifted ~f:(fun x -> - Or_infinity.Finite x ) } ) - ; shifts= Common.tock_shifts ~log2_size } + Or_infinity.Finite x) + }) + ; shifts = Common.tock_shifts ~log2_size + } end let to_binable - {Poly.step_data; max_width; wrap_index; wrap_vk= _} = - {Repr.Stable.V1.step_data; max_width; wrap_index} + { Poly.step_data; max_width; wrap_index; wrap_vk = _ } = + { Repr.Stable.V1.step_data; max_width; wrap_index } let of_binable - ({Repr.Stable.V1.step_data; max_width; wrap_index= c} as t) - = + ( { Repr.Stable.V1.step_data; max_width; wrap_index = c } as + t ) = { Poly.step_data ; max_width - ; wrap_index= c - ; wrap_vk= Some (Repr_conv.of_repr t) } + ; wrap_index = c + ; wrap_vk = Some (Repr_conv.of_repr t) + } end) end end] let dummy : t = - { step_data= At_most.[] - ; max_width= Width.zero - ; wrap_index= - (let g = [Backend.Tock.Curve.(to_affine_exn one)] in - { sigma_comm_0= g - ; sigma_comm_1= g - ; sigma_comm_2= g - ; ql_comm= g - ; qr_comm= g - ; qo_comm= g - ; qm_comm= g - ; qc_comm= g - ; rcm_comm_0= g - ; rcm_comm_1= g - ; rcm_comm_2= g - ; psm_comm= g - ; add_comm= g - ; mul1_comm= g - ; mul2_comm= g - ; emul1_comm= g - ; emul2_comm= g - ; emul3_comm= g }) - ; wrap_vk= None } + { step_data = At_most.[] + ; max_width = Width.zero + ; wrap_index = + (let g = [ Backend.Tock.Curve.(to_affine_exn one) ] in + { sigma_comm_0 = g + ; sigma_comm_1 = g + ; sigma_comm_2 = g + ; ql_comm = g + ; qr_comm = g + ; qo_comm = g + ; qm_comm = g + ; qc_comm = g + ; rcm_comm_0 = g + ; rcm_comm_1 = g + ; rcm_comm_2 = g + ; psm_comm = g + ; add_comm = g + ; mul1_comm = g + ; mul2_comm = g + ; emul1_comm = g + ; emul2_comm = g + ; emul3_comm = g + }) + ; wrap_vk = None + } module Checked = struct open Step_main_inputs open Impl type t = - { step_domains: (Field.t Domain.t Domains.t, Max_branches.n) Vector.t - ; step_widths: (Width.Checked.t, Max_branches.n) Vector.t - ; max_width: Width.Checked.t - ; wrap_index: Inner_curve.t array Plonk_verification_key_evals.t - ; num_branches: (Boolean.var, Max_branches.Log2.n) Vector.t } + { step_domains : (Field.t Domain.t Domains.t, Max_branches.n) Vector.t + ; step_widths : (Width.Checked.t, Max_branches.n) Vector.t + ; max_width : Width.Checked.t + ; wrap_index : Inner_curve.t array Plonk_verification_key_evals.t + ; num_branches : (Boolean.var, Max_branches.Log2.n) Vector.t + } [@@deriving hlist, fields] let to_input = let open Random_oracle_input in let map_reduce t ~f = Array.map t ~f |> Array.reduce_exn ~f:append in - fun {step_domains; step_widths; max_width; wrap_index; num_branches} -> - ( List.reduce_exn ~f:append - [ map_reduce (Vector.to_array step_domains) ~f:(fun {Domains.h} -> - map_reduce [|h|] ~f:(fun (Domain.Pow_2_roots_of_unity x) -> - bitstring (Field.unpack x ~length:max_log2_degree) ) ) - ; Array.map (Vector.to_array step_widths) ~f:Width.Checked.to_bits - |> bitstrings - ; bitstring (Width.Checked.to_bits max_width) - ; wrap_index_to_input - (Array.concat_map - ~f:(Fn.compose Array.of_list Inner_curve.to_field_elements)) - wrap_index - ; bitstring (Vector.to_list num_branches) ] - : _ Random_oracle_input.t ) + fun { step_domains; step_widths; max_width; wrap_index; num_branches } : + _ Random_oracle_input.t -> + List.reduce_exn ~f:append + [ map_reduce (Vector.to_array step_domains) ~f:(fun { Domains.h } -> + map_reduce [| h |] ~f:(fun (Domain.Pow_2_roots_of_unity x) -> + bitstring (Field.unpack x ~length:max_log2_degree))) + ; Array.map (Vector.to_array step_widths) ~f:Width.Checked.to_bits + |> bitstrings + ; bitstring (Width.Checked.to_bits max_width) + ; wrap_index_to_input + (Array.concat_map + ~f:(Fn.compose Array.of_list Inner_curve.to_field_elements)) + wrap_index + ; bitstring (Vector.to_list num_branches) + ] end let%test_unit "input_size" = @@ -282,7 +290,7 @@ let%test_unit "input_size" = let (T (typ, conv)) = Impls.Step.input ~branching:a ~wrap_rounds:Backend.Tock.Rounds.n in - Impls.Step.Data_spec.size [typ]) ) + Impls.Step.Data_spec.size [ typ ])) let typ : (Checked.t, t) Impls.Step.Typ.t = let open Step_main_inputs in @@ -296,11 +304,12 @@ let typ : (Checked.t, t) Impls.Step.Typ.t = ~length: (index_commitment_length ~max_degree:Max_degree.wrap Common.wrap_domains.h)) - ; Vector.typ Boolean.typ Max_branches.Log2.n ] + ; Vector.typ Boolean.typ Max_branches.Log2.n + ] ~var_to_hlist:Checked.to_hlist ~var_of_hlist:Checked.of_hlist ~value_of_hlist:(fun _ -> - failwith "Side_loaded_verification_key: value_of_hlist" ) - ~value_to_hlist:(fun {Poly.step_data; wrap_index; max_width; _} -> + failwith "Side_loaded_verification_key: value_of_hlist") + ~value_to_hlist:(fun { Poly.step_data; wrap_index; max_width; _ } -> [ At_most.extend_to_vector (At_most.map step_data ~f:fst) dummy_domains Max_branches.n @@ -310,5 +319,5 @@ let typ : (Checked.t, t) Impls.Step.Typ.t = ; max_width ; Plonk_verification_key_evals.map ~f:Array.of_list wrap_index ; (let n = At_most.length step_data in - Vector.init Max_branches.Log2.n ~f:(fun i -> (n lsr i) land 1 = 1)) ] - ) + Vector.init Max_branches.Log2.n ~f:(fun i -> (n lsr i) land 1 = 1)) + ]) diff --git a/src/lib/pickles/sponge_inputs.ml b/src/lib/pickles/sponge_inputs.ml index 0cd22fa84ab..11c8010331e 100644 --- a/src/lib/pickles/sponge_inputs.ml +++ b/src/lib/pickles/sponge_inputs.ml @@ -8,31 +8,31 @@ end module Make (Impl : Snarky_backendless.Snark_intf.Run) (B : sig - open Impl + open Impl - val params : field Sponge.Params.t + val params : field Sponge.Params.t - val to_the_alpha : field -> field + val to_the_alpha : field -> field - module Operations : sig - val apply_affine_map : - field array array * field array -> field array -> field array - end + module Operations : sig + val apply_affine_map : + field array array * field array -> field array -> field array + end end) = struct include Make_sponge.Rounds (* TODO: This is wronge. A round should be - ark -> sbox -> mds + ark -> sbox -> mds - instead of + instead of - sbox -> mds -> ark + sbox -> mds -> ark - which is what's implemented. + which is what's implemented. *) let round_table start = - let ({round_constants; mds} : _ Sponge.Params.t) = B.params in + let ({ round_constants; mds } : _ Sponge.Params.t) = B.params in (* sbox -> mds -> ark *) let apply_round i s = let s' = Array.map s ~f:B.to_the_alpha in @@ -40,7 +40,7 @@ struct in let res = Array.init (rounds_full + 1) ~f:(fun _ -> - Array.create ~len:3 Impl.Field.Constant.zero ) + Array.create ~len:3 Impl.Field.Constant.zero) in res.(0) <- start ; for i = 1 to rounds_full do @@ -71,9 +71,11 @@ struct (let open Zexe_backend_common.Plonk_constraint_system.Plonk_constraint in with_label __LOC__ (fun () -> Impl.assert_ - [ { basic= T (Poseidon {state= t}) - ; annotation= Some "plonk-poseidon" } ] )) ; - t.(Int.(Array.length t - 1)) ) + [ { basic = T (Poseidon { state = t }) + ; annotation = Some "plonk-poseidon" + } + ])) ; + t.(Int.(Array.length t - 1))) (* TODO: experiment with sealing version of this *) let add_assign ~state i x = diff --git a/src/lib/pickles/step.ml b/src/lib/pickles/step.ml index 9b1ac6ddd98..12c70a4970e 100644 --- a/src/lib/pickles/step.ml +++ b/src/lib/pickles/step.ml @@ -14,9 +14,9 @@ open Common module Make (A : T0) (A_value : sig - type t + type t - val to_field_elements : t -> Tick.Field.t array + val to_field_elements : t -> Tick.Field.t array end) (Max_branching : Nat.Add.Intf_transparent) = struct @@ -40,9 +40,10 @@ struct , local_widths , local_heights ) Step_branch_data.t) (next_state : A_value.t) - ~maxes:(module Maxes : Pickles_types.Hlist.Maxes.S - with type length = Max_branching.n - and type ns = max_local_max_branchings) + ~maxes: + (module Maxes : Pickles_types.Hlist.Maxes.S + with type length = Max_branching.n + and type ns = max_local_max_branchings) ~(prevs_length : (prev_vars, prevs_length) Length.t) ~self ~step_domains ~self_dlog_plonk_index pk self_dlog_vk (prev_with_proofs : @@ -96,14 +97,15 @@ struct end in let b_poly = Tock.Field.(Dlog_main.b_poly ~add ~mul ~one) in let sgs, unfinalized_proofs, statements_with_hashes, x_hats, witnesses = - let f : type var value max local_max_branching m. + let f : + type var value max local_max_branching m. max Nat.t -> Impls.Wrap.Verification_key.t -> 'a -> (value, local_max_branching, m) P.With_data.t -> (var, value, local_max_branching, m) Tag.t -> must_verify:bool - -> [`Sg of Tock.Curve.Affine.t] + -> [ `Sg of Tock.Curve.Affine.t ] * Unfinalized.Constant.t * Statement_with_hashes.t * X_hat.t @@ -113,8 +115,7 @@ struct let plonk = let domain = (Vector.to_array (Types_map.lookup_step_domains tag)).(Index.to_int - t - .statement + t.statement .proof_state .deferred_values .which_branch) @@ -142,13 +143,14 @@ struct ~domain_generator:Backend.Tick.Field.domain_generator) { zeta ; alpha - ; beta= Challenge.Constant.to_tick_field plonk0.beta - ; gamma= Challenge.Constant.to_tick_field plonk0.gamma } + ; beta = Challenge.Constant.to_tick_field plonk0.beta + ; gamma = Challenge.Constant.to_tick_field plonk0.gamma + } (Plonk_checks.evals_of_split_evals (module Tick.Field) t.prev_evals ~rounds:(Nat.to_int Tick.Rounds.n) ~zeta ~zetaw) (fst t.prev_x_hat) - |> fst ) + |> fst) in let data = Types_map.lookup_basic tag in let (module Local_max_branching) = data.max_branching in @@ -160,28 +162,33 @@ struct statement.proof_state.me_only.old_bulletproof_challenges in let prev_statement_with_hashes : _ Dlog_based.Statement.In_circuit.t = - { pass_through= + { pass_through = Common.hash_pairing_me_only (Reduced_me_only.Pairing_based.prepare ~dlog_plonk_index:dlog_index statement.pass_through) ~app_state:data.value_to_field_elements - ; proof_state= + ; proof_state = { statement.proof_state with - deferred_values= + deferred_values = { statement.proof_state.deferred_values with - plonk= + plonk = { plonk with - zeta= plonk0.zeta - ; alpha= plonk0.alpha - ; beta= plonk0.beta - ; gamma= plonk0.gamma } } - ; me_only= + zeta = plonk0.zeta + ; alpha = plonk0.alpha + ; beta = plonk0.beta + ; gamma = plonk0.gamma + } + } + ; me_only = Common.hash_dlog_me_only Max_branching.n - { old_bulletproof_challenges= + { old_bulletproof_challenges = (* TODO: Get rid of this padding *) Vector.extend_exn prev_challenges Max_branching.n Dummy.Ipa.Wrap.challenges_computed - ; sg= statement.proof_state.me_only.sg } } } + ; sg = statement.proof_state.me_only.sg + } + } + } in let module O = Tock.Oracles in let o = @@ -198,7 +205,8 @@ struct prev_challenges ~f:(fun commitment chals -> { Tock.Proof.Challenge_polynomial.commitment - ; challenges= Vector.to_array chals } ) + ; challenges = Vector.to_array chals + }) |> to_list) public_input t.proof in @@ -207,11 +215,12 @@ struct Scalar_challenge.map ~f:Challenge.Constant.of_tock_field (f o) in let plonk0 = - { Types.Dlog_based.Proof_state.Deferred_values.Plonk.Minimal.alpha= + { Types.Dlog_based.Proof_state.Deferred_values.Plonk.Minimal.alpha = scalar_chal O.alpha - ; beta= O.beta o - ; gamma= O.gamma o - ; zeta= scalar_chal O.zeta } + ; beta = O.beta o + ; gamma = O.gamma o + ; zeta = scalar_chal O.zeta + } in let xi = scalar_chal O.v in let r = scalar_chal O.u in @@ -238,7 +247,7 @@ struct let new_bulletproof_challenges, b = let prechals = Array.map (O.opening_prechallenges o) ~f:(fun x -> - Scalar_challenge.map ~f:Challenge.Constant.of_tock_field x ) + Scalar_challenge.map ~f:Challenge.Constant.of_tock_field x) in let chals = Array.map prechals ~f:(fun x -> Ipa.Wrap.compute_challenge x) @@ -252,20 +261,19 @@ struct let prechals = Vector.of_list_and_length_exn ( Array.map prechals ~f:(fun x -> - {Bulletproof_challenge.prechallenge= x} ) + { Bulletproof_challenge.prechallenge = x }) |> Array.to_list ) Tock.Rounds.n in (prechals, b) in let sg = - if not must_verify then - Ipa.Wrap.compute_sg new_bulletproof_challenges + if not must_verify then Ipa.Wrap.compute_sg new_bulletproof_challenges else t.proof.openings.proof.sg in let witness : _ Per_proof_witness.Constant.t = ( t.P.Base.Dlog_based.statement.pass_through.app_state - , {prev_statement_with_hashes.proof_state with me_only= ()} + , { prev_statement_with_hashes.proof_state with me_only = () } , Double.map2 t.prev_evals t.prev_x_hat ~f:Tuple.T2.create , Vector.extend_exn t.statement.pass_through.sg Local_max_branching.n (Lazy.force Dummy.Ipa.Wrap.sg) @@ -274,7 +282,7 @@ struct (Vector.map t.statement.pass_through.old_bulletproof_challenges ~f:Ipa.Step.compute_challenges) Local_max_branching.n Dummy.Ipa.Step.challenges_computed - , ({t.proof.openings.proof with sg}, t.proof.messages) ) + , ({ t.proof.openings.proof with sg }, t.proof.messages) ) in let combined_inner_product = let e1, e2 = t.proof.openings.evals in @@ -288,8 +296,8 @@ struct let a, b = Dlog_plonk_types.Evals.(to_vectors (e : _ array t)) in let v : (Tock.Field.t array, _) Vector.t = Vector.append - (Vector.map b_polys ~f:(fun f -> [|f pt|])) - ([|x_hat|] :: a) + (Vector.map b_polys ~f:(fun f -> [| f pt |])) + ([| x_hat |] :: a) (snd (Local_max_branching.add Nat.N8.n)) in let open Tock.Field in @@ -304,7 +312,7 @@ struct ~evaluation_point:pt ~shifted_pow:(fun deg x -> Pcs_batch.pow ~one ~mul x - Int.(Max_degree.wrap - (deg mod Max_degree.wrap)) ) + Int.(Max_degree.wrap - (deg mod Max_degree.wrap))) v b in let open Tock.Field in @@ -321,7 +329,7 @@ struct (module Tock.Field) data.wrap_domains.h ~shifts:Common.tock_shifts ~domain_generator:Backend.Tock.Field.domain_generator) - {plonk0 with zeta= As_field.zeta; alpha= As_field.alpha} + { plonk0 with zeta = As_field.zeta; alpha = As_field.alpha } (Plonk_checks.evals_of_split_evals (module Tock.Field) t.proof.openings.evals ~rounds:(Nat.to_int Tock.Rounds.n) @@ -333,25 +341,29 @@ struct Shifted_value.of_field (module Tock.Field) ~shift:Shifts.tock in ( `Sg sg - , { Types.Pairing_based.Proof_state.Per_proof.deferred_values= - { plonk= + , { Types.Pairing_based.Proof_state.Per_proof.deferred_values = + { plonk = { plonk with - zeta= plonk0.zeta - ; alpha= plonk0.alpha - ; beta= chal plonk0.beta - ; gamma= chal plonk0.gamma } - ; combined_inner_product= shifted_value combined_inner_product + zeta = plonk0.zeta + ; alpha = plonk0.alpha + ; beta = chal plonk0.beta + ; gamma = chal plonk0.gamma + } + ; combined_inner_product = shifted_value combined_inner_product ; xi - ; bulletproof_challenges= new_bulletproof_challenges - ; b= shifted_value b } - ; should_finalize= must_verify - ; sponge_digest_before_evaluations= - Digest.Constant.of_tock_field sponge_digest_before_evaluations } + ; bulletproof_challenges = new_bulletproof_challenges + ; b = shifted_value b + } + ; should_finalize = must_verify + ; sponge_digest_before_evaluations = + Digest.Constant.of_tock_field sponge_digest_before_evaluations + } , prev_statement_with_hashes , x_hat , witness ) in - let rec go : type vars values ns ms maxes k. + let rec go : + type vars values ns ms maxes k. (values, ns, ms) H3.T(P.With_data).t -> maxes H1.T(Nat).t -> (vars, values, ns, ms) H4.T(Tag).t @@ -392,7 +404,8 @@ struct let module M = H3.Map2_to_H1 (P.With_data) (P.Base.Me_only.Dlog_based) (struct - let f : type a b c. + let f : + type a b c. (a, b, c) P.With_data.t -> b P.Base.Me_only.Dlog_based.t = fun (T t) -> t.statement.proof_state.me_only end) @@ -419,17 +432,19 @@ struct in let me_only : _ Reduced_me_only.Pairing_based.t = (* Have the sg be available in the opening proof and verify it. *) - {app_state= next_state; sg= sgs; old_bulletproof_challenges} + { app_state = next_state; sg = sgs; old_bulletproof_challenges } in - { proof_state= {unfinalized_proofs= unfinalized_proofs_extended; me_only} - ; pass_through } + { proof_state = + { unfinalized_proofs = unfinalized_proofs_extended; me_only } + ; pass_through + } in let next_me_only_prepared = Reduced_me_only.Pairing_based.prepare ~dlog_plonk_index:self_dlog_plonk_index next_statement.proof_state.me_only in - let handler (Snarky_backendless.Request.With {request; respond} as r) = + let handler (Snarky_backendless.Request.With { request; respond } as r) = let k x = respond (Provide x) in match request with | Req.Proof_with_datas -> @@ -439,17 +454,18 @@ struct | Req.App_state -> k next_me_only_prepared.app_state | _ -> ( - match handler with - | Some f -> - f r - | None -> - Snarky_backendless.Request.unhandled ) + match handler with + | Some f -> + f r + | None -> + Snarky_backendless.Request.unhandled ) in let%map.Async.Deferred (next_proof : Tick.Proof.t) = let (T (input, conv)) = Impls.Step.input ~branching:Max_branching.n ~wrap_rounds:Tock.Rounds.n in - let rec pad : type n k maxes pvals lws lhs. + let rec pad : + type n k maxes pvals lws lhs. (Digest.Constant.t, k) Vector.t -> maxes H1.T(Nat).t -> (maxes, n) Hlist.Length.t @@ -464,14 +480,15 @@ struct x :: pad xs ms n | [], m :: ms, S n -> let t : _ Types.Dlog_based.Proof_state.Me_only.t = - { sg= Lazy.force Dummy.Ipa.Step.sg - ; old_bulletproof_challenges= + { sg = Lazy.force Dummy.Ipa.Step.sg + ; old_bulletproof_challenges = Vector.init Max_branching.n ~f:(fun _ -> - Dummy.Ipa.Wrap.challenges_computed ) } + Dummy.Ipa.Wrap.challenges_computed) + } in Common.hash_dlog_me_only Max_branching.n t :: pad [] ms n in - let {Domains.h; x} = + let { Domains.h; x } = List.nth_exn (Vector.to_list step_domains) (Index.to_int branch_data.index) @@ -482,8 +499,7 @@ struct (P.With_data) (E03 (Tick.Curve.Affine)) (struct - let f (T t : _ P.With_data.t) = - t.statement.proof_state.me_only.sg + let f (T t : _ P.With_data.t) = t.statement.proof_state.me_only.sg end) in let module V = H3.To_vector (Tick.Curve.Affine) in @@ -494,7 +510,7 @@ struct (fun () -> Impls.Step.generate_witness_conv ~f: - (fun {Impls.Step.Proof_inputs.auxiliary_inputs; public_inputs} -> + (fun { Impls.Step.Proof_inputs.auxiliary_inputs; public_inputs } -> Backend.Tick.Proof.create_async ~primary:public_inputs ~auxiliary:auxiliary_inputs ~message: @@ -504,28 +520,29 @@ struct next_me_only_prepared.old_bulletproof_challenges ~f:(fun commitment chals -> { Tick.Proof.Challenge_polynomial.commitment - ; challenges= Vector.to_array chals } ) + ; challenges = Vector.to_array chals + }) |> to_list) - pk ) - [input] - (fun x () -> - ( Impls.Step.handle - (fun () -> (branch_data.main ~step_domains (conv x) : unit)) - handler - : unit ) ) + pk) + [ input ] + (fun x () : unit -> + Impls.Step.handle + (fun () : unit -> branch_data.main ~step_domains (conv x)) + handler) () - { proof_state= + { proof_state = { next_statement.proof_state with - me_only= + me_only = Common.hash_pairing_me_only - ~app_state:A_value.to_field_elements - next_me_only_prepared } - ; pass_through= + ~app_state:A_value.to_field_elements next_me_only_prepared + } + ; pass_through = (* TODO: Use the same pad_pass_through function as in wrap *) pad (Vector.map statements_with_hashes ~f:(fun s -> - s.proof_state.me_only )) - Maxes.maxes Maxes.length } ) + s.proof_state.me_only)) + Maxes.maxes Maxes.length + }) in let prev_evals = let module M = @@ -539,12 +556,13 @@ struct let module V = H3.To_vector (E) in V.f prev_values_length (M.f prev_with_proofs) in - { P.Base.Pairing_based.proof= next_proof - ; statement= next_statement - ; index= branch_data.index - ; prev_evals= + { P.Base.Pairing_based.proof = next_proof + ; statement = next_statement + ; index = branch_data.index + ; prev_evals = Vector.extend (Vector.map2 prev_evals x_hats ~f:(fun es x_hat -> - double_zip es x_hat )) - lte Max_branching.n Dummy.evals } + double_zip es x_hat)) + lte Max_branching.n Dummy.evals + } end diff --git a/src/lib/pickles/step_branch_data.ml b/src/lib/pickles/step_branch_data.ml index 99dc44273c3..b80ab595d26 100644 --- a/src/lib/pickles/step_branch_data.ml +++ b/src/lib/pickles/step_branch_data.ml @@ -15,11 +15,11 @@ type ( 'a_var , 'local_heights ) t = | T : - { branching: 'branching Nat.t * ('prev_vars, 'branching) Hlist.Length.t - ; index: Types.Index.t - ; lte: ('branching, 'max_branching) Nat.Lte.t - ; domains: Domains.t - ; rule: + { branching : 'branching Nat.t * ('prev_vars, 'branching) Hlist.Length.t + ; index : Types.Index.t + ; lte : ('branching, 'max_branching) Nat.Lte.t + ; domains : Domains.t + ; rule : ( 'prev_vars , 'prev_values , 'local_widths @@ -27,20 +27,21 @@ type ( 'a_var , 'a_avar , 'a_value ) Inductive_rule.t - ; main: + ; main : step_domains:(Domains.t, 'branches) Vector.t -> ( (Unfinalized.t, 'max_branching) Vector.t , Impls.Step.Field.t , (Impls.Step.Field.t, 'max_branching) Vector.t ) Types.Pairing_based.Statement.t -> unit - ; requests: + ; requests : (module Requests.Step.S with type statement = 'a_value and type max_branching = 'max_branching and type prev_values = 'prev_values and type local_signature = 'local_widths - and type local_branches = 'local_heights) } + and type local_branches = 'local_heights) + } -> ( 'a_var , 'a_value , 'max_branching @@ -62,7 +63,8 @@ let create Timer.clock __LOC__ ; let module HT = H4.T (Tag) in let (T (self_width, branching)) = HT.length rule.prevs in - let rec extract_lengths : type a b n m k. + let rec extract_lengths : + type a b n m k. (a, b, n, m) HT.t -> (a, k) Length.t -> n H1.T(Nat).t * m H1.T(Nat).t * (n, k) Length.t * (m, k) Length.t = @@ -105,7 +107,8 @@ let create ; var_to_field_elements ; value_to_field_elements ; wrap_domains - ; step_domains } + ; step_domains + } ~self_branches:branches ~branching ~local_signature:widths ~local_signature_length ~local_branches:heights ~local_branches_length ~lte ~self @@ -126,10 +129,11 @@ let create in Timer.clock __LOC__ ; T - { branching= (self_width, branching) + { branching = (self_width, branching) ; index ; lte ; rule - ; domains= own_domains - ; main= step - ; requests } + ; domains = own_domains + ; main = step + ; requests + } diff --git a/src/lib/pickles/step_main.ml b/src/lib/pickles/step_main.ml index ad86c6debdd..2d3efdd1a72 100644 --- a/src/lib/pickles/step_main.ml +++ b/src/lib/pickles/step_main.ml @@ -10,8 +10,8 @@ open Step_main_inputs module B = Inductive_rule.B (* The SNARK function corresponding to the input inductive rule. *) -let step_main - : type branching self_branches prev_vars prev_values a_var a_value max_branching local_branches local_signature. +let step_main : + type branching self_branches prev_vars prev_values a_var a_value max_branching local_branches local_signature. (module Requests.Step.S with type local_signature = local_signature and type local_branches = local_branches @@ -22,16 +22,14 @@ let step_main -> self_branches:self_branches Nat.t -> local_signature:local_signature H1.T(Nat).t -> local_signature_length:(local_signature, branching) Hlist.Length.t - -> local_branches:(* For each inner proof of type T , the number of branches that type T has. *) - local_branches H1.T(Nat).t + -> local_branches: + (* For each inner proof of type T , the number of branches that type T has. *) + local_branches H1.T(Nat).t -> local_branches_length:(local_branches, branching) Hlist.Length.t -> branching:(prev_vars, branching) Hlist.Length.t -> lte:(branching, max_branching) Nat.Lte.t - -> basic:( a_var - , a_value - , max_branching - , self_branches ) - Types_map.Compiled.basic + -> basic: + (a_var, a_value, max_branching, self_branches) Types_map.Compiled.basic -> self:(a_var, a_value, max_branching, self_branches) Tag.t -> ( prev_vars , prev_values @@ -64,7 +62,8 @@ let step_main Typ.t end in let prev_typs = - let rec join : type e pvars pvals ns1 ns2 br. + let rec join : + type e pvars pvals ns1 ns2 br. (pvars, pvals, ns1, ns2) H4.T(Tag).t -> ns1 H1.T(Nat).t -> ns2 H1.T(Nat).t @@ -84,20 +83,20 @@ let step_main | Some T -> (basic.step_domains, basic.typ) | None -> ( - (* TODO: Abstract this into a function in Types_map *) - match d.kind with - | Compiled -> - let d = Types_map.lookup_compiled d.id in - (d.step_domains, d.typ) - | Side_loaded -> - let d = Types_map.lookup_side_loaded d.id in - (* TODO: This replication to please the type checker is - pointless... *) - ( Vector.init d.permanent.branches ~f:(fun _ -> - Side_loaded_verification_key.max_domains_with_x ) - , d.permanent.typ ) ) + (* TODO: Abstract this into a function in Types_map *) + match d.kind with + | Compiled -> + let d = Types_map.lookup_compiled d.id in + (d.step_domains, d.typ) + | Side_loaded -> + let d = Types_map.lookup_side_loaded d.id in + (* TODO: This replication to please the type checker is + pointless... *) + ( Vector.init d.permanent.branches ~f:(fun _ -> + Side_loaded_verification_key.max_domains_with_x) + , d.permanent.typ ) ) in - typ ) + typ) d in let t = Per_proof_witness.typ ~step_domains typ n1 n2 in @@ -134,7 +133,7 @@ let step_main let app_state = exists basic.typ ~request:(fun () -> Req.App_state) in let prevs = exists (Prev_typ.f prev_typs) ~request:(fun () -> - Req.Proof_with_datas ) + Req.Proof_with_datas) in let prev_statements = let module M = @@ -156,34 +155,36 @@ let step_main , max_branching , self_branches ) Types_map.For_step.t = - { branches= self_branches - ; branchings= Vector.map basic.branchings ~f:Field.of_int - ; max_branching= (module Max_branching) - ; max_width= None - ; typ= basic.typ - ; var_to_field_elements= basic.var_to_field_elements - ; value_to_field_elements= basic.value_to_field_elements - ; wrap_domains= basic.wrap_domains - ; step_domains= `Known basic.step_domains - ; wrap_key= dlog_plonk_index } + { branches = self_branches + ; branchings = Vector.map basic.branchings ~f:Field.of_int + ; max_branching = (module Max_branching) + ; max_width = None + ; typ = basic.typ + ; var_to_field_elements = basic.var_to_field_elements + ; value_to_field_elements = basic.value_to_field_elements + ; wrap_domains = basic.wrap_domains + ; step_domains = `Known basic.step_domains + ; wrap_key = dlog_plonk_index + } in let module M = H4.Map (Tag) (Types_map.For_step) (struct - let f : type a b n m. + let f : + type a b n m. (a, b, n, m) Tag.t -> (a, b, n, m) Types_map.For_step.t = fun tag -> match Type_equal.Id.same_witness self.id tag.id with | Some T -> self_data | None -> ( - match tag.kind with - | Compiled -> - Types_map.For_step.of_compiled - (Types_map.lookup_compiled tag.id) - | Side_loaded -> - Types_map.For_step.of_side_loaded - (Types_map.lookup_side_loaded tag.id) ) + match tag.kind with + | Compiled -> + Types_map.For_step.of_compiled + (Types_map.lookup_compiled tag.id) + | Side_loaded -> + Types_map.For_step.of_side_loaded + (Types_map.lookup_side_loaded tag.id) ) end) in M.f rule.prevs @@ -200,7 +201,7 @@ let step_main let pass_throughs = with_label "pass_throughs" (fun () -> let module V = H1.Of_vector (Digest) in - V.f branching (Vector.trim stmt.pass_through lte) ) + V.f branching (Vector.trim stmt.pass_through lte)) in let sgs = let module M = @@ -208,8 +209,8 @@ let step_main (Per_proof_witness) (E03 (Inner_curve)) (struct - let f : type a b c. - (a, b, c) Per_proof_witness.t -> Inner_curve.t = + let f : + type a b c. (a, b, c) Per_proof_witness.t -> Inner_curve.t = fun (_, _, _, _, _, (opening, _)) -> opening.sg end) in @@ -218,7 +219,8 @@ let step_main in let bulletproof_challenges = with_label "prevs_verified" (fun () -> - let rec go : type vars vals ns1 ns2 n. + let rec go : + type vars vals ns1 ns2 n. (vars, ns1, ns2) H3.T(Per_proof_witness).t -> (vars, vals, ns1, ns2) H4.T(Types_map.For_step).t -> vars H1.T(E01(Digest)).t @@ -269,20 +271,22 @@ let step_main ~max_width:d.max_width ~step_widths:d.branchings ~step_domains:d.step_domains ~sponge ~old_bulletproof_challenges state.deferred_values - prev_evals ) + prev_evals) in let which_branch = state.deferred_values.which_branch in let state = with_label __LOC__ (fun () -> { state with - deferred_values= + deferred_values = { state.deferred_values with - which_branch= + which_branch = Pseudo.choose ( state.deferred_values.which_branch , Vector.init d.branches ~f:Field.of_int ) ~f:Fn.id - |> Types.Index.of_field (module Impl) } } ) + |> Types.Index.of_field (module Impl) + } + }) in let statement = let prev_me_only = @@ -298,12 +302,14 @@ let step_main ~which_branch (* Use opt sponge for cutting off the bulletproof challenges early *) { app_state - ; dlog_plonk_index= d.wrap_key - ; sg= sg_old - ; old_bulletproof_challenges } ) + ; dlog_plonk_index = d.wrap_key + ; sg = sg_old + ; old_bulletproof_challenges + }) in - { Types.Dlog_based.Statement.pass_through= prev_me_only - ; proof_state= {state with me_only= pass_through} } + { Types.Dlog_based.Statement.pass_through = prev_me_only + ; proof_state = { state with me_only = pass_through } + } in let verified = with_label __LOC__ (fun () -> @@ -311,7 +317,7 @@ let step_main ~wrap_domain:d.wrap_domains.h ~is_base_case:should_verify ~sg_old ~opening ~messages ~wrap_verification_key:d.wrap_key - statement unfinalized ) + statement unfinalized) in if debug then as_prover @@ -337,7 +343,7 @@ let step_main go prevs datas pass_throughs unfinalized_proofs proofs_should_verify branching in - Boolean.Assert.all vs ; chalss ) + Boolean.Assert.all vs ; chalss) in let () = with_label "hash_me_only" (fun () -> @@ -350,11 +356,12 @@ let step_main (hash_me_only { app_state ; dlog_plonk_index - ; sg= sgs - ; old_bulletproof_challenges= + ; sg = sgs + ; old_bulletproof_challenges = (* Note: the bulletproof_challenges here are unpadded! *) - bulletproof_challenges }) ) + bulletproof_challenges + })) in - () ) + ()) in stage main diff --git a/src/lib/pickles/step_main_inputs.ml b/src/lib/pickles/step_main_inputs.ml index 2caf72ca8b5..38f2e7b3122 100644 --- a/src/lib/pickles/step_main_inputs.ml +++ b/src/lib/pickles/step_main_inputs.ml @@ -75,8 +75,7 @@ module Sponge = struct end let%test_unit "sponge" = - let module T = Make_sponge.Test (Impl) (Tick_field_sponge.Field) (Sponge.S) - in + let module T = Make_sponge.Test (Impl) (Tick_field_sponge.Field) (Sponge.S) in T.test Tick_field_sponge.params module Input_domain = struct @@ -94,7 +93,7 @@ module Input_domain = struct .unshifted in assert (Array.length v = 1) ; - v.(0) |> Or_infinity.finite_exn ) )) + v.(0) |> Or_infinity.finite_exn))) end module Inner_curve = struct @@ -171,7 +170,7 @@ module Inner_curve = struct include ( T : module type of T - with module Scaling_precomputation := T.Scaling_precomputation ) + with module Scaling_precomputation := T.Scaling_precomputation ) module Scaling_precomputation = T.Scaling_precomputation @@ -179,9 +178,9 @@ module Inner_curve = struct let scale t bs = with_label __LOC__ (fun () -> - T.scale t (Bitstring_lib.Bitstring.Lsb_first.of_list bs) ) + T.scale t (Bitstring_lib.Bitstring.Lsb_first.of_list bs)) - let to_field_elements (x, y) = [x; y] + let to_field_elements (x, y) = [ x; y ] let assert_equal (x1, y1) (x2, y2) = Field.Assert.equal x1 x2 ; Field.Assert.equal y1 y2 diff --git a/src/lib/pickles/tag.ml b/src/lib/pickles/tag.ml index 1ecb55a67ce..8acf3d5231b 100644 --- a/src/lib/pickles/tag.ml +++ b/src/lib/pickles/tag.ml @@ -4,8 +4,9 @@ type ('var, 'value, 'n1, 'n2) tag = ('var * 'value * 'n1 * 'n2) Type_equal.Id.t type kind = Side_loaded | Compiled -type ('var, 'value, 'n1, 'n2) t = {kind: kind; id: ('var, 'value, 'n1, 'n2) tag} +type ('var, 'value, 'n1, 'n2) t = + { kind : kind; id : ('var, 'value, 'n1, 'n2) tag } [@@deriving fields] let create ~name = - {kind= Compiled; id= Type_equal.Id.create ~name sexp_of_opaque} + { kind = Compiled; id = Type_equal.Id.create ~name sexp_of_opaque } diff --git a/src/lib/pickles/tag.mli b/src/lib/pickles/tag.mli index b0252388033..79325162f1f 100644 --- a/src/lib/pickles/tag.mli +++ b/src/lib/pickles/tag.mli @@ -4,7 +4,8 @@ type ('var, 'value, 'n1, 'n2) tag = ('var * 'value * 'n1 * 'n2) Type_equal.Id.t type kind = Side_loaded | Compiled -type ('var, 'value, 'n1, 'n2) t = {kind: kind; id: ('var, 'value, 'n1, 'n2) tag} +type ('var, 'value, 'n1, 'n2) t = + { kind : kind; id : ('var, 'value, 'n1, 'n2) tag } [@@deriving fields] val create : name:string -> ('var, 'value, 'n1, 'n2) t diff --git a/src/lib/pickles/tick_field_sponge.ml b/src/lib/pickles/tick_field_sponge.ml index c06c2346e72..32a50e0e89b 100644 --- a/src/lib/pickles/tick_field_sponge.ml +++ b/src/lib/pickles/tick_field_sponge.ml @@ -7,4 +7,4 @@ let params = map pasta_p ~f:(fun s -> Backend.Tick.Field.of_bits (List.init Backend.Tick.Field.size_in_bits - (testbit (Bigint.of_string s))) )) + (testbit (Bigint.of_string s))))) diff --git a/src/lib/pickles/timer.ml b/src/lib/pickles/timer.ml index 17f5accc273..12cd2398ffb 100644 --- a/src/lib/pickles/timer.ml +++ b/src/lib/pickles/timer.ml @@ -8,7 +8,7 @@ let start = Common.when_profiling (fun loc -> r := Time.now () ; - l := loc ) + l := loc) ignore let clock = @@ -18,5 +18,5 @@ let clock = Core.printf "%s -> %s: %s\n%!" !l loc (Time.Span.to_string_hum (Time.diff t !r)) ; r := t ; - l := loc ) + l := loc) ignore diff --git a/src/lib/pickles/tock_field_sponge.ml b/src/lib/pickles/tock_field_sponge.ml index 078d49c9f90..a6bebaa4566 100644 --- a/src/lib/pickles/tock_field_sponge.ml +++ b/src/lib/pickles/tock_field_sponge.ml @@ -7,4 +7,4 @@ let params = map pasta_q ~f:(fun s -> Backend.Tock.Field.of_bits (List.init Backend.Tock.Field.size_in_bits - (testbit (Bigint.of_string s))) )) + (testbit (Bigint.of_string s))))) diff --git a/src/lib/pickles/type.ml b/src/lib/pickles/type.ml index a6cbfd68fd9..aa8feb92989 100644 --- a/src/lib/pickles/type.ml +++ b/src/lib/pickles/type.ml @@ -1,11 +1,11 @@ open Pickles_types.Dlog_plonk_types.Poly_comm type (_, _) t = - | PC : ('g1, < g1: 'g1 ; .. >) t - | Scalar : ('s, < scalar: 's ; .. >) t - | Without_degree_bound : ('g1 Without_degree_bound.t, < g1: 'g1 ; .. >) t + | PC : ('g1, < g1 : 'g1 ; .. >) t + | Scalar : ('s, < scalar : 's ; .. >) t + | Without_degree_bound : ('g1 Without_degree_bound.t, < g1 : 'g1 ; .. >) t | With_degree_bound - : ('g1_opt With_degree_bound.t, < g1_opt: 'g1_opt ; .. >) t + : ('g1_opt With_degree_bound.t, < g1_opt : 'g1_opt ; .. >) t | ( :: ) : ('a, 'e) t * ('b, 'e) t -> ('a * 'b, 'e) t let degree_bounded_pc = PC :: PC diff --git a/src/lib/pickles/types_map.ml b/src/lib/pickles/types_map.ml index aa0804749ce..1b8932889a0 100644 --- a/src/lib/pickles/types_map.ml +++ b/src/lib/pickles/types_map.ml @@ -14,39 +14,43 @@ type inner_curve_var = module Basic = struct type ('var, 'value, 'n1, 'n2) t = - { max_branching: (module Nat.Add.Intf with type n = 'n1) - ; value_to_field_elements: 'value -> Impls.Step.Field.Constant.t array - ; var_to_field_elements: 'var -> Impls.Step.Field.t array - ; typ: ('var, 'value) Impls.Step.Typ.t - ; branches: 'n2 Nat.t - ; wrap_domains: Domains.t - ; wrap_key: + { max_branching : (module Nat.Add.Intf with type n = 'n1) + ; value_to_field_elements : 'value -> Impls.Step.Field.Constant.t array + ; var_to_field_elements : 'var -> Impls.Step.Field.t array + ; typ : ('var, 'value) Impls.Step.Typ.t + ; branches : 'n2 Nat.t + ; wrap_domains : Domains.t + ; wrap_key : Tick.Inner_curve.Affine.t Dlog_plonk_types.Poly_comm.Without_degree_bound.t Plonk_verification_key_evals.t - ; wrap_vk: Impls.Wrap.Verification_key.t } + ; wrap_vk : Impls.Wrap.Verification_key.t + } end module Side_loaded = struct module Ephemeral = struct type t = - { index: + { index : [ `In_circuit of Side_loaded_verification_key.Checked.t - | `In_prover of Side_loaded_verification_key.t ] } + | `In_prover of Side_loaded_verification_key.t ] + } end module Permanent = struct type ('var, 'value, 'n1, 'n2) t = - { max_branching: (module Nat.Add.Intf with type n = 'n1) - ; value_to_field_elements: 'value -> Impls.Step.Field.Constant.t array - ; var_to_field_elements: 'var -> Impls.Step.Field.t array - ; typ: ('var, 'value) Impls.Step.Typ.t - ; branches: 'n2 Nat.t } + { max_branching : (module Nat.Add.Intf with type n = 'n1) + ; value_to_field_elements : 'value -> Impls.Step.Field.Constant.t array + ; var_to_field_elements : 'var -> Impls.Step.Field.t array + ; typ : ('var, 'value) Impls.Step.Typ.t + ; branches : 'n2 Nat.t + } end type ('var, 'value, 'n1, 'n2) t = - { ephemeral: Ephemeral.t option - ; permanent: ('var, 'value, 'n1, 'n2) Permanent.t } + { ephemeral : Ephemeral.t option + ; permanent : ('var, 'value, 'n1, 'n2) Permanent.t + } type packed = | T : @@ -54,16 +58,18 @@ module Side_loaded = struct -> packed let to_basic - { permanent= + { permanent = { max_branching ; value_to_field_elements ; var_to_field_elements ; typ - ; branches } - ; ephemeral } = + ; branches + } + ; ephemeral + } = let wrap_key, wrap_vk = match ephemeral with - | Some {index= `In_prover i} -> + | Some { index = `In_prover i } -> (i.wrap_index, i.wrap_vk) | _ -> failwithf "Side_loaded.to_basic: Expected `In_prover (%s)" __LOC__ () @@ -75,41 +81,44 @@ module Side_loaded = struct ; var_to_field_elements ; typ ; branches - ; wrap_domains= Common.wrap_domains - ; wrap_key= Plonk_verification_key_evals.map ~f:Array.of_list wrap_key } + ; wrap_domains = Common.wrap_domains + ; wrap_key = Plonk_verification_key_evals.map ~f:Array.of_list wrap_key + } end module Compiled = struct type f = Impls.Wrap.field type ('a_var, 'a_value, 'max_branching, 'branches) basic = - { typ: ('a_var, 'a_value) Impls.Step.Typ.t - ; branchings: (int, 'branches) Vector.t + { typ : ('a_var, 'a_value) Impls.Step.Typ.t + ; branchings : (int, 'branches) Vector.t (* For each branch in this rule, how many predecessor proofs does it have? *) - ; var_to_field_elements: 'a_var -> Impls.Step.Field.t array - ; value_to_field_elements: 'a_value -> Tick.Field.t array - ; wrap_domains: Domains.t - ; step_domains: (Domains.t, 'branches) Vector.t } + ; var_to_field_elements : 'a_var -> Impls.Step.Field.t array + ; value_to_field_elements : 'a_value -> Tick.Field.t array + ; wrap_domains : Domains.t + ; step_domains : (Domains.t, 'branches) Vector.t + } (* This is the data associated to an inductive proof system with statement type - ['a_var], which has ['branches] many "variants" each of which depends on at most - ['max_branching] many previous statements. *) + ['a_var], which has ['branches] many "variants" each of which depends on at most + ['max_branching] many previous statements. *) type ('a_var, 'a_value, 'max_branching, 'branches) t = - { branches: 'branches Nat.t - ; max_branching: (module Nat.Add.Intf with type n = 'max_branching) - ; branchings: (int, 'branches) Vector.t + { branches : 'branches Nat.t + ; max_branching : (module Nat.Add.Intf with type n = 'max_branching) + ; branchings : (int, 'branches) Vector.t (* For each branch in this rule, how many predecessor proofs does it have? *) - ; typ: ('a_var, 'a_value) Impls.Step.Typ.t - ; value_to_field_elements: 'a_value -> Tick.Field.t array - ; var_to_field_elements: 'a_var -> Impls.Step.Field.t array - ; wrap_key: + ; typ : ('a_var, 'a_value) Impls.Step.Typ.t + ; value_to_field_elements : 'a_value -> Tick.Field.t array + ; var_to_field_elements : 'a_var -> Impls.Step.Field.t array + ; wrap_key : Tick.Inner_curve.Affine.t Dlog_plonk_types.Poly_comm.Without_degree_bound.t Plonk_verification_key_evals.t Lazy.t - ; wrap_vk: Impls.Wrap.Verification_key.t Lazy.t - ; wrap_domains: Domains.t - ; step_domains: (Domains.t, 'branches) Vector.t } + ; wrap_vk : Impls.Wrap.Verification_key.t Lazy.t + ; wrap_domains : Domains.t + ; step_domains : (Domains.t, 'branches) Vector.t + } type packed = | T : @@ -126,68 +135,73 @@ module Compiled = struct ; wrap_vk ; wrap_domains ; step_domains - ; wrap_key } = + ; wrap_key + } = { Basic.max_branching ; wrap_domains ; value_to_field_elements ; var_to_field_elements ; typ - ; branches= Vector.length step_domains - ; wrap_key= Lazy.force wrap_key - ; wrap_vk= Lazy.force wrap_vk } + ; branches = Vector.length step_domains + ; wrap_key = Lazy.force wrap_key + ; wrap_vk = Lazy.force wrap_vk + } end module For_step = struct type ('a_var, 'a_value, 'max_branching, 'branches) t = - { branches: 'branches Nat.t - ; max_branching: (module Nat.Add.Intf with type n = 'max_branching) - ; branchings: (Impls.Step.Field.t, 'branches) Vector.t - ; typ: ('a_var, 'a_value) Impls.Step.Typ.t - ; value_to_field_elements: 'a_value -> Tick.Field.t array - ; var_to_field_elements: 'a_var -> Impls.Step.Field.t array - ; wrap_key: + { branches : 'branches Nat.t + ; max_branching : (module Nat.Add.Intf with type n = 'max_branching) + ; branchings : (Impls.Step.Field.t, 'branches) Vector.t + ; typ : ('a_var, 'a_value) Impls.Step.Typ.t + ; value_to_field_elements : 'a_value -> Tick.Field.t array + ; var_to_field_elements : 'a_var -> Impls.Step.Field.t array + ; wrap_key : inner_curve_var Dlog_plonk_types.Poly_comm.Without_degree_bound.t Plonk_verification_key_evals.t - ; wrap_domains: Domains.t - ; step_domains: + ; wrap_domains : Domains.t + ; step_domains : [ `Known of (Domains.t, 'branches) Vector.t | `Side_loaded of ( Impls.Step.Field.t Side_loaded_verification_key.Domain.t Side_loaded_verification_key.Domains.t , 'branches ) Vector.t ] - ; max_width: Side_loaded_verification_key.Width.Checked.t option } + ; max_width : Side_loaded_verification_key.Width.Checked.t option + } let of_side_loaded (type a b c d) ({ ephemeral - ; permanent= + ; permanent = { branches ; max_branching ; typ ; value_to_field_elements - ; var_to_field_elements } } : + ; var_to_field_elements + } + } : (a, b, c, d) Side_loaded.t) : (a, b, c, d) t = let index = match ephemeral with - | Some {index= `In_circuit i} -> + | Some { index = `In_circuit i } -> i | _ -> - failwithf "For_step.side_loaded: Expected `In_circuit (%s)" __LOC__ - () + failwithf "For_step.side_loaded: Expected `In_circuit (%s)" __LOC__ () in let T = Nat.eq_exn branches Side_loaded_verification_key.Max_branches.n in { branches ; max_branching - ; branchings= + ; branchings = Vector.map index.step_widths ~f:Side_loaded_verification_key.Width.Checked.to_field ; typ ; value_to_field_elements ; var_to_field_elements - ; wrap_key= index.wrap_index - ; wrap_domains= Common.wrap_domains - ; step_domains= `Side_loaded index.step_domains - ; max_width= Some index.max_width } + ; wrap_key = index.wrap_index + ; wrap_domains = Common.wrap_domains + ; step_domains = `Side_loaded index.step_domains + ; max_width = Some index.max_width + } let of_compiled ({ branches @@ -198,42 +212,46 @@ module For_step = struct ; var_to_field_elements ; wrap_key ; wrap_domains - ; step_domains } : + ; step_domains + } : _ Compiled.t) = { branches - ; max_width= None + ; max_width = None ; max_branching - ; branchings= Vector.map branchings ~f:Impls.Step.Field.of_int + ; branchings = Vector.map branchings ~f:Impls.Step.Field.of_int ; typ ; value_to_field_elements ; var_to_field_elements - ; wrap_key= + ; wrap_key = Plonk_verification_key_evals.map (Lazy.force wrap_key) ~f:(Array.map ~f:Step_main_inputs.Inner_curve.constant) ; wrap_domains - ; step_domains= `Known step_domains } + ; step_domains = `Known step_domains + } end type t = - { compiled: Compiled.packed Type_equal.Id.Uid.Table.t - ; side_loaded: Side_loaded.packed Type_equal.Id.Uid.Table.t } + { compiled : Compiled.packed Type_equal.Id.Uid.Table.t + ; side_loaded : Side_loaded.packed Type_equal.Id.Uid.Table.t + } let univ : t = - { compiled= Type_equal.Id.Uid.Table.create () - ; side_loaded= Type_equal.Id.Uid.Table.create () } + { compiled = Type_equal.Id.Uid.Table.create () + ; side_loaded = Type_equal.Id.Uid.Table.create () + } let find t k = match Hashtbl.find t k with None -> failwith "key not found" | Some x -> x -let lookup_compiled : type a b n m. - (a, b, n, m) Tag.tag -> (a, b, n, m) Compiled.t = +let lookup_compiled : + type a b n m. (a, b, n, m) Tag.tag -> (a, b, n, m) Compiled.t = fun t -> let (T (other_id, d)) = find univ.compiled (Type_equal.Id.uid t) in let T = Type_equal.Id.same_witness_exn t other_id in d -let lookup_side_loaded : type a b n m. - (a, b, n, m) Tag.tag -> (a, b, n, m) Side_loaded.t = +let lookup_side_loaded : + type a b n m. (a, b, n, m) Tag.tag -> (a, b, n, m) Side_loaded.t = fun t -> let (T (other_id, d)) = find univ.side_loaded (Type_equal.Id.uid t) in let T = Type_equal.Id.same_witness_exn t other_id in @@ -247,8 +265,8 @@ let lookup_basic : type a b n m. (a, b, n, m) Tag.t -> (a, b, n, m) Basic.t = | Side_loaded -> Side_loaded.to_basic (lookup_side_loaded t.id) -let lookup_step_domains : type a b n m. - (a, b, n, m) Tag.t -> (Domain.t, m) Vector.t = +let lookup_step_domains : + type a b n m. (a, b, n, m) Tag.t -> (Domain.t, m) Vector.t = fun t -> let f = Vector.map ~f:Domains.h in match t.kind with @@ -257,17 +275,17 @@ let lookup_step_domains : type a b n m. | Side_loaded -> ( let t = lookup_side_loaded t.id in match t.ephemeral with - | Some {index= `In_circuit _} | None -> + | Some { index = `In_circuit _ } | None -> failwith __LOC__ - | Some {index= `In_prover k} -> + | Some { index = `In_prover k } -> let a = At_most.to_array (At_most.map k.step_data ~f:(fun (ds, _) -> ds.h)) in Vector.init t.permanent.branches ~f:(fun i -> - try a.(i) with _ -> Domain.Pow_2_roots_of_unity 0 ) ) + try a.(i) with _ -> Domain.Pow_2_roots_of_unity 0) ) -let max_branching : type n1. - (_, _, n1, _) Tag.t -> (module Nat.Add.Intf with type n = n1) = +let max_branching : + type n1. (_, _, n1, _) Tag.t -> (module Nat.Add.Intf with type n = n1) = fun tag -> match tag.kind with | Compiled -> @@ -275,8 +293,8 @@ let max_branching : type n1. | Side_loaded -> (lookup_side_loaded tag.id).permanent.max_branching -let typ : type var value. - (var, value, _, _) Tag.t -> (var, value) Impls.Step.Typ.t = +let typ : + type var value. (var, value, _, _) Tag.t -> (var, value) Impls.Step.Typ.t = fun tag -> match tag.kind with | Compiled -> @@ -284,8 +302,8 @@ let typ : type var value. | Side_loaded -> (lookup_side_loaded tag.id).permanent.typ -let value_to_field_elements : type a. - (_, a, _, _) Tag.t -> a -> Tick.Field.t array = +let value_to_field_elements : + type a. (_, a, _, _) Tag.t -> a -> Tick.Field.t array = fun t -> match t.kind with | Compiled -> @@ -302,31 +320,31 @@ let lookup_map (type a b c d) (t : (a, b, c, d) Tag.t) ~self ~default | Some _ -> default | None -> ( - match t.kind with - | Compiled -> - let (T (other_id, d)) = find univ.compiled (Type_equal.Id.uid t.id) in - let T = Type_equal.Id.same_witness_exn t.id other_id in - f (`Compiled d) - | Side_loaded -> - let (T (other_id, d)) = - find univ.side_loaded (Type_equal.Id.uid t.id) - in - let T = Type_equal.Id.same_witness_exn t.id other_id in - f (`Side_loaded d) ) + match t.kind with + | Compiled -> + let (T (other_id, d)) = find univ.compiled (Type_equal.Id.uid t.id) in + let T = Type_equal.Id.same_witness_exn t.id other_id in + f (`Compiled d) + | Side_loaded -> + let (T (other_id, d)) = + find univ.side_loaded (Type_equal.Id.uid t.id) + in + let T = Type_equal.Id.same_witness_exn t.id other_id in + f (`Side_loaded d) ) let add_side_loaded ~name permanent = let id = Type_equal.Id.create ~name sexp_of_opaque in Hashtbl.add_exn univ.side_loaded ~key:(Type_equal.Id.uid id) - ~data:(T (id, {ephemeral= None; permanent})) ; - {Tag.kind= Side_loaded; id} + ~data:(T (id, { ephemeral = None; permanent })) ; + { Tag.kind = Side_loaded; id } -let set_ephemeral {Tag.kind; id} eph = +let set_ephemeral { Tag.kind; id } eph = (match kind with Side_loaded -> () | _ -> failwith "Expected Side_loaded") ; Hashtbl.update univ.side_loaded (Type_equal.Id.uid id) ~f:(function | None -> assert false | Some (T (id, d)) -> - T (id, {d with ephemeral= Some eph}) ) + T (id, { d with ephemeral = Some eph })) let add_exn (type a b c d) (tag : (a, b, c, d) Tag.t) (data : (a, b, c, d) Compiled.t) = diff --git a/src/lib/pickles/unfinalized.ml b/src/lib/pickles/unfinalized.ml index 3b161ff1b45..2c1cb8dd7e2 100644 --- a/src/lib/pickles/unfinalized.ml +++ b/src/lib/pickles/unfinalized.ml @@ -40,8 +40,8 @@ module Constant = struct let beta = chal () in let gamma = chal () in let zeta = scalar_chal () in - { deferred_values= - { plonk= + { deferred_values = + { plonk = { ( Plonk_checks.derive_plonk (module Tock.Field) ~shift ~endo:Endo.Wrap_inner_curve.base @@ -51,21 +51,25 @@ module Constant = struct (module Tock.Field) wrap_domains.h ~shifts:Common.tock_shifts ~domain_generator:Tock.Field.domain_generator) - { alpha= Common.Ipa.Wrap.endo_to_field alpha - ; beta= Challenge.Constant.to_tock_field beta - ; gamma= Challenge.Constant.to_tock_field gamma - ; zeta= Common.Ipa.Wrap.endo_to_field zeta } + { alpha = Common.Ipa.Wrap.endo_to_field alpha + ; beta = Challenge.Constant.to_tock_field beta + ; gamma = Challenge.Constant.to_tock_field gamma + ; zeta = Common.Ipa.Wrap.endo_to_field zeta + } Dummy.evals_combined Tock.Field.zero |> fst ) with alpha ; beta ; gamma - ; zeta } - ; combined_inner_product= Shifted_value (tock ()) - ; xi= Scalar_challenge one_chal - ; bulletproof_challenges= Dummy.Ipa.Wrap.challenges - ; b= Shifted_value (tock ()) } - ; should_finalize= false - ; sponge_digest_before_evaluations= Digest.Constant.dummy } + ; zeta + } + ; combined_inner_product = Shifted_value (tock ()) + ; xi = Scalar_challenge one_chal + ; bulletproof_challenges = Dummy.Ipa.Wrap.challenges + ; b = Shifted_value (tock ()) + } + ; should_finalize = false + ; sponge_digest_before_evaluations = Digest.Constant.dummy + } end diff --git a/src/lib/pickles/util.ml b/src/lib/pickles/util.ml index 4f01f1171e2..19a06a9121a 100644 --- a/src/lib/pickles/util.ml +++ b/src/lib/pickles/util.ml @@ -3,12 +3,13 @@ open Pickles_types type m = Abc.Label.t = A | B | C -let rec absorb : type a g1 g1_opt f scalar. +let rec absorb : + type a g1 g1_opt f scalar. absorb_field:(f -> unit) -> absorb_scalar:(scalar -> unit) -> g1_to_field_elements:(g1 -> f list) -> mask_g1_opt:(g1_opt -> g1) - -> (a, < scalar: scalar ; g1: g1 ; g1_opt: g1_opt >) Type.t + -> (a, < scalar : scalar ; g1 : g1 ; g1_opt : g1_opt >) Type.t -> a -> unit = fun ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt ty t -> @@ -23,27 +24,27 @@ let rec absorb : type a g1 g1_opt f scalar. t | With_degree_bound -> Array.iter t.unshifted ~f:(fun t -> - absorb ~absorb_field ~absorb_scalar ~g1_to_field_elements - ~mask_g1_opt PC (mask_g1_opt t) ) ; + absorb ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt + PC (mask_g1_opt t)) ; absorb ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt PC (mask_g1_opt t.shifted) | ty1 :: ty2 -> let absorb t = - absorb t ~absorb_field ~absorb_scalar ~g1_to_field_elements - ~mask_g1_opt + absorb t ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt in let t1, t2 = t in absorb ty1 t1 ; absorb ty2 t2 -let ones_vector : type f n. +let ones_vector : + type f n. first_zero:f Snarky_backendless.Cvar.t -> (module Snarky_backendless.Snark_intf.Run with type field = f) -> n Nat.t -> (f Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t, n) Vector.t = fun ~first_zero (module Impl) n -> let open Impl in - let rec go : type m. - Boolean.var -> int -> m Nat.t -> (Boolean.var, m) Vector.t = + let rec go : + type m. Boolean.var -> int -> m Nat.t -> (Boolean.var, m) Vector.t = fun value i m -> match m with | Z -> @@ -58,7 +59,7 @@ let ones_vector : type f n. let split_last xs = let rec go acc = function - | [x] -> + | [ x ] -> (List.rev acc, x) | x :: xs -> go (x :: acc) xs @@ -79,15 +80,14 @@ let seal (type f) (x : Impl.Field.t) : Impl.Field.t = let open Impl in match Field.to_constant_and_terms x with - | None, [(x, i)] when Field.Constant.(equal x one) -> + | None, [ (x, i) ] when Field.Constant.(equal x one) -> Snarky_backendless.Cvar.Var (Impl.Var.index i) | _ -> let y = exists Field.typ ~compute:As_prover.(fun () -> read_var x) in Field.Assert.equal x y ; y let unsafe_unpack_with_partial_sum (type f) - (module Impl : Snarky_backendless.Snark_intf.Run with type field = f) x ~n - = + (module Impl : Snarky_backendless.Snark_intf.Run with type field = f) x ~n = let open Impl in let res = let length = Field.size_in_bits in diff --git a/src/lib/pickles/verification_key.ml b/src/lib/pickles/verification_key.ml index a848379716f..f18a14bde38 100644 --- a/src/lib/pickles/verification_key.ml +++ b/src/lib/pickles/verification_key.ml @@ -7,7 +7,7 @@ module Data = struct [%%versioned module Stable = struct module V1 = struct - type t = {constraints: int} + type t = { constraints : int } let to_latest = Fn.id end @@ -19,11 +19,12 @@ module Repr = struct module Stable = struct module V1 = struct type t = - { commitments: + { commitments : Backend.Tock.Curve.Affine.Stable.V1.t array Plonk_verification_key_evals.Stable.V1.t - ; step_domains: Domains.Stable.V1.t array - ; data: Data.Stable.V1.t } + ; step_domains : Domains.Stable.V1.t array + ; data : Data.Stable.V1.t + } let to_latest = Fn.id end @@ -34,42 +35,46 @@ end module Stable = struct module V1 = struct type t = - { commitments: + { commitments : Backend.Tock.Curve.Affine.t array Plonk_verification_key_evals.t - ; step_domains: Domains.t array - ; index: Impls.Wrap.Verification_key.t - ; data: Data.t } + ; step_domains : Domains.t array + ; index : Impls.Wrap.Verification_key.t + ; data : Data.t + } [@@deriving fields] let to_latest = Fn.id - let of_repr urs {Repr.commitments= c; step_domains; data= d} = + let of_repr urs { Repr.commitments = c; step_domains; data = d } = let t : Impls.Wrap.Verification_key.t = let log2_size = Int.ceil_log2 d.constraints in let d = Domain.Pow_2_roots_of_unity log2_size in let max_quot_size = Common.max_quot_size_int (Domain.size d) in - { domain= - { log_size_of_group= log2_size - ; group_gen= Backend.Tock.Field.domain_generator log2_size } - ; max_poly_size= 1 lsl Nat.to_int Rounds.Wrap.n + { domain = + { log_size_of_group = log2_size + ; group_gen = Backend.Tock.Field.domain_generator log2_size + } + ; max_poly_size = 1 lsl Nat.to_int Rounds.Wrap.n ; max_quot_size ; urs - ; evals= + ; evals = Plonk_verification_key_evals.map c ~f:(fun unshifted -> - { Marlin_plonk_bindings.Types.Poly_comm.shifted= None - ; unshifted= - Array.map unshifted ~f:(fun x -> Or_infinity.Finite x) } ) - ; shifts= Common.tock_shifts ~log2_size } + { Marlin_plonk_bindings.Types.Poly_comm.shifted = None + ; unshifted = + Array.map unshifted ~f:(fun x -> Or_infinity.Finite x) + }) + ; shifts = Common.tock_shifts ~log2_size + } in - {commitments= c; step_domains; data= d; index= t} + { commitments = c; step_domains; data = d; index = t } include Binable.Of_binable (Repr.Stable.V1) (struct type nonrec t = t - let to_binable {commitments; step_domains; data; index= _} = - {Repr.commitments; data; step_domains} + let to_binable { commitments; step_domains; data; index = _ } = + { Repr.commitments; data; step_domains } let of_binable r = of_repr (Backend.Tock.Keypair.load_urs ()) r end) @@ -77,24 +82,25 @@ module Stable = struct end] let dummy_commitments g = - { Plonk_verification_key_evals.sigma_comm_0= g - ; sigma_comm_1= g - ; sigma_comm_2= g - ; ql_comm= g - ; qr_comm= g - ; qo_comm= g - ; qm_comm= g - ; qc_comm= g - ; rcm_comm_0= g - ; rcm_comm_1= g - ; rcm_comm_2= g - ; psm_comm= g - ; add_comm= g - ; mul1_comm= g - ; mul2_comm= g - ; emul1_comm= g - ; emul2_comm= g - ; emul3_comm= g } + { Plonk_verification_key_evals.sigma_comm_0 = g + ; sigma_comm_1 = g + ; sigma_comm_2 = g + ; ql_comm = g + ; qr_comm = g + ; qo_comm = g + ; qm_comm = g + ; qc_comm = g + ; rcm_comm_0 = g + ; rcm_comm_1 = g + ; rcm_comm_2 = g + ; psm_comm = g + ; add_comm = g + ; mul1_comm = g + ; mul2_comm = g + ; emul1_comm = g + ; emul2_comm = g + ; emul3_comm = g + } let dummy = lazy @@ -106,7 +112,8 @@ let dummy = in Array.create ~len Backend.Tock.Curve.(to_affine_exn one) in - { Repr.commitments= dummy_commitments g - ; step_domains= [||] - ; data= {constraints= rows} } + { Repr.commitments = dummy_commitments g + ; step_domains = [||] + ; data = { constraints = rows } + } |> Stable.Latest.of_repr (Marlin_plonk_bindings.Pasta_fq_urs.create 1)) diff --git a/src/lib/pickles/verify.ml b/src/lib/pickles/verify.ml index 5123b74e172..68660154574 100644 --- a/src/lib/pickles/verify.ml +++ b/src/lib/pickles/verify.ml @@ -37,19 +37,23 @@ let verify_heterogenous (ts : Instance.t list) = in let in_circuit_plonks = List.map ts - ~f:(fun (T - ( _max_branching - , _statement - , key - , app_state - , T - { statement - ; prev_x_hat= (x_hat1, _) as prev_x_hat - ; prev_evals= evals } )) + ~f:(fun + (T + ( _max_branching + , _statement + , key + , app_state + , T + { statement + ; prev_x_hat = (x_hat1, _) as prev_x_hat + ; prev_evals = evals + } )) -> Timer.start __LOC__ ; let statement = - {statement with pass_through= {statement.pass_through with app_state}} + { statement with + pass_through = { statement.pass_through with app_state } + } in let open Pairing_marlin_types in let open Types.Dlog_based.Proof_state in @@ -58,10 +62,11 @@ let verify_heterogenous (ts : Instance.t list) = in Timer.clock __LOC__ ; let { Deferred_values.xi - ; plonk= plonk0 + ; plonk = plonk0 ; combined_inner_product ; which_branch - ; bulletproof_challenges } = + ; bulletproof_challenges + } = Deferred_values.map_challenges ~f:Challenge.Constant.to_tick_field ~scalar:sc statement.proof_state.deferred_values in @@ -86,7 +91,11 @@ let verify_heterogenous (ts : Instance.t list) = (module Tick.Field) step_domains.h ~shifts:Common.tick_shifts ~domain_generator:Backend.Tick.Field.domain_generator) - {zeta; beta= chal plonk0.beta; gamma= chal plonk0.gamma; alpha} + { zeta + ; beta = chal plonk0.beta + ; gamma = chal plonk0.gamma + ; alpha + } (Plonk_checks.evals_of_split_evals (module Tick.Field) evals ~rounds:(Nat.to_int Tick.Rounds.n) ~zeta ~zetaw) @@ -94,10 +103,11 @@ let verify_heterogenous (ts : Instance.t list) = in check (lazy "linearization_check", Tick.Field.equal lin1 lin2) ; { p with - zeta= plonk0.zeta - ; alpha= plonk0.alpha - ; beta= plonk0.beta - ; gamma= plonk0.gamma } + zeta = plonk0.zeta + ; alpha = plonk0.alpha + ; beta = plonk0.beta + ; gamma = plonk0.gamma + } in Timer.clock __LOC__ ; let absorb, squeeze = @@ -121,7 +131,7 @@ let verify_heterogenous (ts : Instance.t list) = let absorb_evals (x_hat, e) = let xs, ys = Dlog_plonk_types.Evals.to_vectors e in List.iter - Vector.([|x_hat|] :: (to_list xs @ to_list ys)) + Vector.([| x_hat |] :: (to_list xs @ to_list ys)) ~f:(Array.iter ~f:absorb) in Double.(iter ~f:absorb_evals (map2 prev_x_hat evals ~f:Tuple2.create)) ; @@ -156,14 +166,15 @@ let verify_heterogenous (ts : Instance.t list) = List.iter ~f:(fun (s, x, y) -> check_eq s x y) (* Both these values can actually be omitted from the proof on the wire since we recompute them - anyway. *) + anyway. *) [ ("xi", xi, xi_actual) ; ( "combined_inner_product" , Shifted_value.to_field (module Tick.Field) combined_inner_product ~shift:Shifts.tick - , combined_inner_product_actual ) ] ; - plonk ) + , combined_inner_product_actual ) + ] ; + plonk) in let open Backend.Tock.Proof in let open Async in @@ -172,39 +183,35 @@ let verify_heterogenous (ts : Instance.t list) = (List.map ts ~f:(fun (T (_, _, _, _, T t)) -> ( t.statement.proof_state.me_only.sg , Ipa.Step.compute_challenges - t.statement.proof_state.deferred_values.bulletproof_challenges - ) )) + t.statement.proof_state.deferred_values.bulletproof_challenges ))) in Common.time "batch_step_dlog_check" (fun () -> - check (lazy "batch_step_dlog_check", accumulator_check) ) ; + check (lazy "batch_step_dlog_check", accumulator_check)) ; let%map dlog_check = batch_verify (List.map2_exn ts in_circuit_plonks - ~f:(fun (T - ( ( module - Max_branching ) - , ( module - A_value ) - , key - , app_state - , T t )) - plonk + ~f:(fun + (T + ((module Max_branching), (module A_value), key, app_state, T t)) + plonk -> let prepared_statement : _ Types.Dlog_based.Statement.In_circuit.t = - { pass_through= + { pass_through = Common.hash_pairing_me_only ~app_state:A_value.to_field_elements (Reduced_me_only.Pairing_based.prepare ~dlog_plonk_index:key.commitments - {t.statement.pass_through with app_state}) - ; proof_state= + { t.statement.pass_through with app_state }) + ; proof_state = { t.statement.proof_state with - deferred_values= - {t.statement.proof_state.deferred_values with plonk} - ; me_only= + deferred_values = + { t.statement.proof_state.deferred_values with plonk } + ; me_only = Common.hash_dlog_me_only Max_branching.n (Reduced_me_only.Dlog_based.prepare - t.statement.proof_state.me_only) } } + t.statement.proof_state.me_only) + } + } in let input = tock_unpadded_public_input_of_statement prepared_statement @@ -216,14 +223,15 @@ let verify_heterogenous (ts : Instance.t list) = (Vector.to_list (Vector.map2 ~f:(fun g cs -> - { Challenge_polynomial.challenges= + { Challenge_polynomial.challenges = Vector.to_array (Ipa.Wrap.compute_challenges cs) - ; commitment= g } ) + ; commitment = g + }) (Vector.extend_exn t.statement.pass_through.sg Max_branching.n (Lazy.force Dummy.Ipa.Wrap.sg)) t.statement.proof_state.me_only.old_bulletproof_challenges)) - ) )) + ))) in Common.time "dlog_check" (fun () -> check (lazy "dlog_check", dlog_check)) ; match result () with @@ -238,4 +246,4 @@ let verify (type a n) (max_branching : (module Nat.Intf with type n = n)) (key : Verification_key.t) (ts : (a * (n, n) Proof.t) list) = verify_heterogenous (List.map ts ~f:(fun (x, p) -> - Instance.T (max_branching, a_value, key, x, p) )) + Instance.T (max_branching, a_value, key, x, p))) diff --git a/src/lib/pickles/wrap.ml b/src/lib/pickles/wrap.ml index a831cd2831e..e5b46b95ffb 100644 --- a/src/lib/pickles/wrap.ml +++ b/src/lib/pickles/wrap.ml @@ -36,8 +36,8 @@ let combined_inner_product (type actual_branching) let a, b = Dlog_plonk_types.Evals.(to_vectors (e : _ array t)) in let v : (Tick.Field.t array, _) Vector.t = Vector.append - (Vector.map b_polys ~f:(fun f -> [|f pt|])) - ([|x_hat|] :: a) (snd pi) + (Vector.map b_polys ~f:(fun f -> [| f pt |])) + ([| x_hat |] :: a) (snd pi) in let open Tick.Field in Pcs_batch.combine_split_evaluations @@ -49,7 +49,7 @@ let combined_inner_product (type actual_branching) ~last:Array.last ~evaluation_point:pt ~shifted_pow:(fun deg x -> Pcs_batch.pow ~one ~mul x - Int.(Max_degree.step - (deg mod Max_degree.step)) ) + Int.(Max_degree.step - (deg mod Max_degree.step))) v b in let open Tick.Field in @@ -66,7 +66,7 @@ let wrap (type actual_branching max_branching max_local_max_branchings) ((module Req) : (max_branching, max_local_max_branchings) Requests.Wrap.t) ~dlog_plonk_index wrap_main to_field_elements ~pairing_vk ~step_domains ~wrap_domains ~pairing_plonk_indices pk - ({statement= prev_statement; prev_evals; proof; index= which_index} : + ({ statement = prev_statement; prev_evals; proof; index = which_index } : ( _ , _ , (_, actual_branching) Vector.t @@ -91,16 +91,17 @@ let wrap (type actual_branching max_branching max_local_max_branchings) M.f prev_statement.pass_through in let prev_statement_with_hashes : _ Types.Pairing_based.Statement.t = - { proof_state= + { proof_state = { prev_statement.proof_state with - me_only= + me_only = (* TODO: Careful here... the length of old_buletproof_challenges inside the me_only might not be correct *) Common.hash_pairing_me_only ~app_state:to_field_elements (P.Base.Me_only.Pairing_based.prepare ~dlog_plonk_index - prev_statement.proof_state.me_only) } - ; pass_through= + prev_statement.proof_state.me_only) + } + ; pass_through = (let module M = H1.Map (P.Base.Me_only.Dlog_based.Prepared) @@ -115,9 +116,10 @@ let wrap (type actual_branching max_branching max_local_max_branchings) end) in let module V = H1.To_vector (Digest.Constant) in - V.f Max_local_max_branchings.length (M.f prev_me_only)) } + V.f Max_local_max_branchings.length (M.f prev_me_only)) + } in - let handler (Snarky_backendless.Request.With {request; respond}) = + let handler (Snarky_backendless.Request.With { request; respond }) = let open Req in let k x = respond (Provide x) in match request with @@ -129,7 +131,8 @@ let wrap (type actual_branching max_branching max_local_max_branchings) (P.Base.Me_only.Dlog_based.Prepared) (E01 (Pairing_acc)) (struct - let f : type a. + let f : + type a. a P.Base.Me_only.Dlog_based.Prepared.t -> Pairing_acc.t = fun t -> t.sg end) @@ -166,8 +169,7 @@ let wrap (type actual_branching max_branching max_local_max_branchings) in let actual_branching = Vector.length prev_challenges in let lte = - Nat.lte_exn actual_branching - (Length.to_nat Max_local_max_branchings.length) + Nat.lte_exn actual_branching (Length.to_nat Max_local_max_branchings.length) in let o = let sgs = @@ -187,7 +189,8 @@ let wrap (type actual_branching max_branching max_local_max_branchings) Vector.( map2 (Vector.trim sgs lte) prev_challenges ~f:(fun commitment cs -> { Tick.Proof.Challenge_polynomial.commitment - ; challenges= Vector.to_array cs } ) + ; challenges = Vector.to_array cs + }) |> to_list) public_input proof in @@ -198,11 +201,12 @@ let wrap (type actual_branching max_branching max_local_max_branchings) in let sponge_digest_before_evaluations = O.digest_before_evaluations o in let plonk0 = - { Types.Dlog_based.Proof_state.Deferred_values.Plonk.Minimal.alpha= + { Types.Dlog_based.Proof_state.Deferred_values.Plonk.Minimal.alpha = scalar_chal O.alpha - ; beta= O.beta o - ; gamma= O.gamma o - ; zeta= scalar_chal O.zeta } + ; beta = O.beta o + ; gamma = O.gamma o + ; zeta = scalar_chal O.zeta + } in let r = scalar_chal O.u in let xi = scalar_chal O.v in @@ -237,10 +241,11 @@ let wrap (type actual_branching max_branching max_local_max_branchings) ~old_bulletproof_challenges:prev_challenges in let me_only : _ P.Base.Me_only.Dlog_based.t = - { sg= proof.openings.proof.sg - ; old_bulletproof_challenges= + { sg = proof.openings.proof.sg + ; old_bulletproof_challenges = Vector.map prev_statement.proof_state.unfinalized_proofs ~f:(fun t -> - t.deferred_values.bulletproof_challenges ) } + t.deferred_values.bulletproof_challenges) + } in let chal = Challenge.Constant.of_tick_field in let new_bulletproof_challenges, b = @@ -249,7 +254,7 @@ let wrap (type actual_branching max_branching max_local_max_branchings) let x = Scalar_challenge.map ~f:Challenge.Constant.of_tick_field x in - x ) + x) in let chals = Array.map prechals ~f:(fun x -> Ipa.Step.compute_challenge x) @@ -261,7 +266,8 @@ let wrap (type actual_branching max_branching max_local_max_branchings) b_poly zeta + (r * b_poly zetaw) in let prechals = - Array.map prechals ~f:(fun x -> {Bulletproof_challenge.prechallenge= x}) + Array.map prechals ~f:(fun x -> + { Bulletproof_challenge.prechallenge = x }) in (prechals, b) in @@ -275,7 +281,7 @@ let wrap (type actual_branching max_branching max_local_max_branchings) (module Tick.Field) domain ~shifts:Common.tick_shifts ~domain_generator:Backend.Tick.Field.domain_generator) - {plonk0 with zeta= As_field.zeta; alpha= As_field.alpha} + { plonk0 with zeta = As_field.zeta; alpha = As_field.alpha } (Plonk_checks.evals_of_split_evals (module Tick.Field) proof.openings.evals ~rounds:(Nat.to_int Tick.Rounds.n) @@ -285,25 +291,29 @@ let wrap (type actual_branching max_branching max_local_max_branchings) let shift_value = Shifted_value.of_field (module Tick.Field) ~shift:Shifts.tick in - { proof_state= - { deferred_values= + { proof_state = + { deferred_values = { xi - ; b= shift_value b - ; bulletproof_challenges= + ; b = shift_value b + ; bulletproof_challenges = Vector.of_array_and_length_exn new_bulletproof_challenges Tick.Rounds.n - ; combined_inner_product= shift_value combined_inner_product - ; which_branch= which_index - ; plonk= + ; combined_inner_product = shift_value combined_inner_product + ; which_branch = which_index + ; plonk = { plonk with - zeta= plonk0.zeta - ; alpha= plonk0.alpha - ; beta= chal plonk0.beta - ; gamma= chal plonk0.gamma } } - ; sponge_digest_before_evaluations= + zeta = plonk0.zeta + ; alpha = plonk0.alpha + ; beta = chal plonk0.beta + ; gamma = chal plonk0.gamma + } + } + ; sponge_digest_before_evaluations = Digest.Constant.of_tick_field sponge_digest_before_evaluations - ; me_only } - ; pass_through= prev_statement.proof_state.me_only } + ; me_only + } + ; pass_through = prev_statement.proof_state.me_only + } in let me_only_prepared = P.Base.Me_only.Dlog_based.prepare next_statement.proof_state.me_only @@ -312,7 +322,7 @@ let wrap (type actual_branching max_branching max_local_max_branchings) let (T (input, conv)) = Impls.Wrap.input () in Common.time "wrap proof" (fun () -> Impls.Wrap.generate_witness_conv - ~f:(fun {Impls.Wrap.Proof_inputs.auxiliary_inputs; public_inputs} -> + ~f:(fun { Impls.Wrap.Proof_inputs.auxiliary_inputs; public_inputs } -> Backend.Tock.Proof.create_async ~primary:public_inputs ~auxiliary:auxiliary_inputs pk ~message: @@ -322,23 +332,25 @@ let wrap (type actual_branching max_branching max_local_max_branchings) (Lazy.force Dummy.Ipa.Wrap.sg)) me_only_prepared.old_bulletproof_challenges ~f:(fun sg chals -> - { Tock.Proof.Challenge_polynomial.commitment= sg - ; challenges= Vector.to_array chals } ) - |> Vector.to_list ) ) - [input] - (fun x () -> - ( Impls.Wrap.handle (fun () -> (wrap_main (conv x) : unit)) handler - : unit ) ) + { Tock.Proof.Challenge_polynomial.commitment = sg + ; challenges = Vector.to_array chals + }) + |> Vector.to_list )) + [ input ] + (fun x () : unit -> + Impls.Wrap.handle (fun () : unit -> wrap_main (conv x)) handler) () - { pass_through= prev_statement_with_hashes.proof_state.me_only - ; proof_state= + { pass_through = prev_statement_with_hashes.proof_state.me_only + ; proof_state = { next_statement.proof_state with - me_only= - Common.hash_dlog_me_only max_branching me_only_prepared } } - ) + me_only = + Common.hash_dlog_me_only max_branching me_only_prepared + } + }) in - ( { proof= next_proof - ; statement= Types.Dlog_based.Statement.to_minimal next_statement - ; prev_evals= proof.openings.evals - ; prev_x_hat= x_hat } + ( { proof = next_proof + ; statement = Types.Dlog_based.Statement.to_minimal next_statement + ; prev_evals = proof.openings.evals + ; prev_x_hat = x_hat + } : _ P.Base.Dlog_based.t ) diff --git a/src/lib/pickles/wrap_domains.ml b/src/lib/pickles/wrap_domains.ml index 7457cfa1410..9cfca0b3ff9 100644 --- a/src/lib/pickles/wrap_domains.ml +++ b/src/lib/pickles/wrap_domains.ml @@ -21,7 +21,7 @@ module Make (A : T0) (A_value : T0) = struct | `Side_loaded _ -> Common.wrap_domains | `Compiled d -> - d.wrap_domains ) + d.wrap_domains) end) in let module M = @@ -30,7 +30,8 @@ module Make (A : T0) (A_value : T0) = struct (H4.T (E04 (Domains))) (struct - let f : type vars values env widths heights. + let f : + type vars values env widths heights. (vars, values, widths, heights) I.t -> (vars, values, widths, heights) H4.T(E04(Domains)).t = fun rule -> M_inner.f rule.prevs @@ -43,9 +44,9 @@ module Make (A : T0) (A_value : T0) = struct (let x = let (T (typ, conv)) = Impls.Wrap.input () in Domain.Pow_2_roots_of_unity - (Int.ceil_log2 (Impls.Wrap.Data_spec.size [typ])) + (Int.ceil_log2 (Impls.Wrap.Data_spec.size [ typ ])) in - {Common.wrap_domains with x}) + { Common.wrap_domains with x }) let f_debug full_signature num_choices choices_length ~self ~choices ~max_branching = @@ -67,7 +68,7 @@ module Make (A : T0) (A_value : T0) = struct ~max_degree:Common.Max_degree.step Fix_domains.rough_domains.h) in - Verification_key.dummy_commitments g )) + Verification_key.dummy_commitments g)) in let prev_domains = prev ~self ~choices in Timer.clock __LOC__ ; diff --git a/src/lib/pickles/wrap_main.ml b/src/lib/pickles/wrap_main.ml index 8ee0bbee09f..d7f8df29bb5 100644 --- a/src/lib/pickles/wrap_main.ml +++ b/src/lib/pickles/wrap_main.ml @@ -70,7 +70,7 @@ let pad_domains (type prev_varss prev_valuess branches n) let dummy_domains = (* TODO: The dummy should really be equal to one of the already present domains. *) let d = Domain.Pow_2_roots_of_unity 1 in - {Domains.h= d; x= d} + { Domains.h = d; x = d } in let module M = H4.Map @@ -110,7 +110,7 @@ let pack_statement max_branching = let pack_fq (Shifted_value.Shifted_value (x : Field.t)) = with_label __LOC__ (fun () -> let lo, hi = Util.split_last (Unsafe.unpack_unboolean x) in - [|lo; [hi]|] ) + [| lo; [ hi ] |]) in fun t -> with_label __LOC__ (fun () -> @@ -119,7 +119,7 @@ let pack_statement max_branching = pack_fq (Types.Pairing_based.Statement.spec max_branching Backend.Tock.Rounds.n) - (Types.Pairing_based.Statement.to_data t) ) + (Types.Pairing_based.Statement.to_data t)) let shifts ~log2_size = Common.tock_shifts ~log2_size @@ -167,22 +167,25 @@ let wrap_main Requests.Wrap.((create () : (max_branching, max_local_max_branchings) t)) in Timer.clock __LOC__ ; - let {Full_signature.padded; maxes= (module Max_widths_by_slot)} = + let { Full_signature.padded; maxes = (module Max_widths_by_slot) } = full_signature in Timer.clock __LOC__ ; let main - ({ proof_state= - { deferred_values= + ({ proof_state = + { deferred_values = { plonk ; xi ; combined_inner_product ; b ; which_branch - ; bulletproof_challenges } + ; bulletproof_challenges + } ; sponge_digest_before_evaluations - ; me_only= me_only_digest } - ; pass_through } : + ; me_only = me_only_digest + } + ; pass_through + } : ( _ , _ , _ Shifted_value.t @@ -200,7 +203,7 @@ let wrap_main let typ = typ (module Impl) Max_branching.n (Shifted_value.typ Field.typ) in - exists typ ~request:(fun () -> Req.Proof_state) ) + exists typ ~request:(fun () -> Req.Proof_state)) in let pairing_plonk_index = with_label __LOC__ (fun () -> @@ -208,16 +211,16 @@ let wrap_main (Vector.map (Lazy.force step_keys) ~f: (Plonk_verification_key_evals.map ~f:(function - | [|g|] -> + | [| g |] -> Inner_curve.constant g | xs -> failwithf "Expected commitment to have length 1. Got %d" - (Array.length xs) () ))) ) + (Array.length xs) ())))) in let prev_step_accs = with_label __LOC__ (fun () -> exists (Vector.typ Inner_curve.typ Max_branching.n) - ~request:(fun () -> Req.Step_accs) ) + ~request:(fun () -> Req.Step_accs)) in let old_bp_chals = with_label __LOC__ (fun () -> @@ -247,17 +250,17 @@ let wrap_main Z.f Max_widths_by_slot.maxes (exists typ ~request:(fun () -> Req.Old_bulletproof_challenges)) |> M.f - |> V.f Max_widths_by_slot.length ) + |> V.f Max_widths_by_slot.length) in let domainses = with_label __LOC__ (fun () -> - pad_domains (module Max_branching) pi_branches prev_wrap_domains ) + pad_domains (module Max_branching) pi_branches prev_wrap_domains) in let eval_lengths = with_label __LOC__ (fun () -> Vector.map domainses ~f:(fun v -> Commitment_lengths.generic Vector.map - ~h:(Vector.map v ~f:(fun {h; _} -> Domain.size h)) ) ) + ~h:(Vector.map v ~f:(fun { h; _ } -> Domain.size h)))) in let new_bulletproof_challenges = with_label __LOC__ (fun () -> @@ -276,8 +279,7 @@ let wrap_main exists ty ~request:(fun () -> Req.Evals) in let chals = - let (wrap_domains : (_, Max_branching.n) Vector.t), max_quot_sizes - = + let (wrap_domains : (_, Max_branching.n) Vector.t), max_quot_sizes = Vector.map domainses ~f:(fun ds -> let h = Plonk_checks.domain @@ -287,7 +289,7 @@ let wrap_main ( h , ( which_branch , Vector.map ds ~f:(fun d -> - Common.max_quot_size_int (Domain.size d.h) ) ) ) ) + Common.max_quot_size_int (Domain.size d.h)) ) )) |> Vector.unzip in let actual_branchings = @@ -295,7 +297,7 @@ let wrap_main |> Vector.map ~f:(fun branchings_in_slot -> Pseudo.choose (which_branch, branchings_in_slot) - ~f:Field.of_int ) + ~f:Field.of_int) in Vector.mapn [ (* This is padded to max_branching for the benefit of wrapping with dummy unfinalized proofs *) @@ -305,30 +307,35 @@ let wrap_main ; evals ; eval_lengths ; wrap_domains - ; max_quot_sizes ] - ~f:(fun [ { deferred_values - ; sponge_digest_before_evaluations - ; should_finalize } - ; old_bulletproof_challenges - ; actual_branching - ; evals - ; eval_lengths - ; domain - ; max_quot_size ] + ; max_quot_sizes + ] + ~f:(fun + [ { deferred_values + ; sponge_digest_before_evaluations + ; should_finalize + } + ; old_bulletproof_challenges + ; actual_branching + ; evals + ; eval_lengths + ; domain + ; max_quot_size + ] -> let sponge = let s = Sponge.create sponge_params in Sponge.absorb s sponge_digest_before_evaluations ; s in + (* the type of the local max branching depends on - which kind of step proof we are wrapping. *) - (* For each i in [0..max_branching-1], we have - Max_local_max_branching, which is the largest - Local_max_branching which is the i^th inner proof of a step proof. - - Need to compute this value from the which_branch. - *) + which kind of step proof we are wrapping. *) + (* For each i in [0..max_branching-1], we have + Max_local_max_branching, which is the largest + Local_max_branching which is the i^th inner proof of a step proof. + + Need to compute this value from the which_branch. + *) let (T (max_local_max_branching, old_bulletproof_challenges)) = old_bulletproof_challenges in @@ -339,12 +346,12 @@ let wrap_main ~max_quot_size ~actual_branching ~domain:(domain :> _ Plonk_checks.plonk_domain) ~sponge deferred_values ~old_bulletproof_challenges - evals ) + evals) in - Boolean.(Assert.any [finalized; not should_finalize]) ; - chals ) + Boolean.(Assert.any [ finalized; not should_finalize ]) ; + chals) in - chals ) + chals) in let prev_statement = let prev_me_onlys = @@ -355,10 +362,11 @@ let wrap_main will have to fix this. *) let T = Nat.eq_exn max_local_max_branching Max_branching.n in hash_me_only Max_branching.n - {sg= sacc; old_bulletproof_challenges= chals} ) + { sg = sacc; old_bulletproof_challenges = chals }) in - { Types.Pairing_based.Statement.pass_through= prev_me_onlys - ; proof_state= prev_proof_state } + { Types.Pairing_based.Statement.pass_through = prev_me_onlys + ; proof_state = prev_proof_state + } in let openings_proof = let shift = Shifts.tick in @@ -371,11 +379,11 @@ let wrap_main Shifted_value.of_field (module Backend.Tick.Field) ~shift x with | Shifted_value x -> - x ) + x) ~back:(fun x -> Shifted_value.to_field (module Backend.Tick.Field) - ~shift (Shifted_value x) ) + ~shift (Shifted_value x)) (* When reading, unshift *) |> Typ.transport_var (* For the var, we just wrap the now shifted underlying value. *) @@ -398,46 +406,46 @@ let wrap_main ~h: (Vector.map step_domains ~f:(Fn.compose Domain.size Domains.h)))) - ~request:(fun () -> Req.Messages) ) + ~request:(fun () -> Req.Messages)) in let sponge = Opt.create sponge_params in let xi = with_label __LOC__ (fun () -> Pickles_types.Scalar_challenge.map xi - ~f:(Unsafe.unpack_unboolean ~length:Challenge.length) ) + ~f:(Unsafe.unpack_unboolean ~length:Challenge.length)) in with_label __LOC__ (fun () -> incrementally_verify_proof (module Max_branching) - ~step_widths ~step_domains ~verification_key:pairing_plonk_index - ~xi ~sponge + ~step_widths ~step_domains ~verification_key:pairing_plonk_index ~xi + ~sponge ~public_input:(pack_statement Max_branching.n prev_statement) - ~sg_old:prev_step_accs ~combined_inner_product ~advice:{b} + ~sg_old:prev_step_accs ~combined_inner_product ~advice:{ b } ~messages ~which_branch ~openings_proof ~plonk: (with_label __LOC__ (fun () -> - Types.Dlog_based.Proof_state.Deferred_values.Plonk - .In_circuit + Types.Dlog_based.Proof_state.Deferred_values.Plonk.In_circuit .map_fields plonk (* We don't use a boolean-constraining unpacking function. It's not - necessary with PLONK. *) - ~f: - (Shifted_value.map ~f:Other_field.Packed.to_bits_unsafe) - )) ) + necessary with PLONK. *) + ~f:(Shifted_value.map ~f:Other_field.Packed.to_bits_unsafe)))) in Boolean.Assert.is_true bulletproof_success ; Field.Assert.equal me_only_digest (hash_me_only Max_branching.n - { Types.Dlog_based.Proof_state.Me_only.sg= openings_proof.sg - ; old_bulletproof_challenges= new_bulletproof_challenges }) ; + { Types.Dlog_based.Proof_state.Me_only.sg = openings_proof.sg + ; old_bulletproof_challenges = new_bulletproof_challenges + }) ; Field.Assert.equal sponge_digest_before_evaluations sponge_digest_before_evaluations_actual ; Array.iter2_exn bulletproof_challenges_actual (Vector.to_array bulletproof_challenges) - ~f:(fun {prechallenge= Scalar_challenge x1} - ({prechallenge= Scalar_challenge x2} : _ SC.t Bulletproof_challenge.t) - -> Field.Assert.equal x1 x2 ) ; + ~f:(fun + { prechallenge = Scalar_challenge x1 } + ({ prechallenge = Scalar_challenge x2 } : + _ SC.t Bulletproof_challenge.t) + -> Field.Assert.equal x1 x2) ; () in Timer.clock __LOC__ ; diff --git a/src/lib/pickles/wrap_main_inputs.ml b/src/lib/pickles/wrap_main_inputs.ml index 79d50219940..caf08c9a289 100644 --- a/src/lib/pickles/wrap_main_inputs.ml +++ b/src/lib/pickles/wrap_main_inputs.ml @@ -70,8 +70,7 @@ module Sponge = struct end let%test_unit "sponge" = - let module T = Make_sponge.Test (Impl) (Tock_field_sponge.Field) (Sponge.S) - in + let module T = Make_sponge.Test (Impl) (Tock_field_sponge.Field) (Sponge.S) in T.test Tock_field_sponge.params module Input_domain = struct @@ -82,7 +81,7 @@ module Input_domain = struct (Marlin_plonk_bindings.Pasta_fp_urs.lagrange_commitment (Tick.Keypair.load_urs ()) domain_size i) .unshifted.(0) - |> Or_infinity.finite_exn ) ) + |> Or_infinity.finite_exn)) let domain = Domain.Pow_2_roots_of_unity 7 end @@ -161,7 +160,7 @@ module Inner_curve = struct include ( T : module type of T - with module Scaling_precomputation := T.Scaling_precomputation ) + with module Scaling_precomputation := T.Scaling_precomputation ) module Scaling_precomputation = T.Scaling_precomputation @@ -169,9 +168,9 @@ module Inner_curve = struct let scale t bs = with_label __LOC__ (fun () -> - T.scale t (Bitstring_lib.Bitstring.Lsb_first.of_list bs) ) + T.scale t (Bitstring_lib.Bitstring.Lsb_first.of_list bs)) - let to_field_elements (x, y) = [x; y] + let to_field_elements (x, y) = [ x; y ] let assert_equal (x1, y1) (x2, y2) = Field.Assert.equal x1 x2 ; Field.Assert.equal y1 y2 diff --git a/src/lib/pickles/wrap_proof.ml b/src/lib/pickles/wrap_proof.ml index c6a2791482e..74ce05faf5e 100644 --- a/src/lib/pickles/wrap_proof.ml +++ b/src/lib/pickles/wrap_proof.ml @@ -3,9 +3,7 @@ open Import open Backend type dlog_opening = - ( Tock.Curve.Affine.t - , Tock.Field.t ) - Types.Pairing_based.Openings.Bulletproof.t + (Tock.Curve.Affine.t, Tock.Field.t) Types.Pairing_based.Openings.Bulletproof.t type t = dlog_opening @@ -35,11 +33,9 @@ let typ : (var, t) Typ.t = (* When storing, make it a shifted value *) match Shifted_value.of_field (module Tock.Field) ~shift x with | Shifted_value x -> - x ) + x) ~back:(fun x -> - Shifted_value.to_field - (module Tock.Field) - ~shift (Shifted_value x) ) + Shifted_value.to_field (module Tock.Field) ~shift (Shifted_value x)) (* When reading, unshift *) |> Typ.transport_var (* For the var, we just wrap the now shifted underlying value. *) @@ -50,7 +46,7 @@ let typ : (var, t) Typ.t = ~dummy:Inner_curve.Params.one ~commitment_lengths: (Dlog_plonk_types.Evals.map - ~f:(fun x -> Vector.[x]) + ~f:(fun x -> Vector.[ x ]) (Commitment_lengths.of_domains ~max_degree:Common.Max_degree.wrap Common.wrap_domains)) Inner_curve.typ) diff --git a/src/lib/pickles_base/domains.ml b/src/lib/pickles_base/domains.ml index d48e81d1554..590df985e5b 100644 --- a/src/lib/pickles_base/domains.ml +++ b/src/lib/pickles_base/domains.ml @@ -3,7 +3,7 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type t = {h: Domain.Stable.V1.t; x: Domain.Stable.V1.t} + type t = { h : Domain.Stable.V1.t; x : Domain.Stable.V1.t } [@@deriving fields, sexp, compare] let to_latest = Fn.id diff --git a/src/lib/pickles_base/side_loaded_verification_key.ml b/src/lib/pickles_base/side_loaded_verification_key.ml index ea5becf3e3d..8d0016767da 100644 --- a/src/lib/pickles_base/side_loaded_verification_key.ml +++ b/src/lib/pickles_base/side_loaded_verification_key.ml @@ -123,8 +123,7 @@ module Max_branches_vec = struct end] let () = - let _f : type a. unit -> (a t, (a, Max_branches.n) At_most.t) Type_equal.t - = + let _f : type a. unit -> (a t, (a, Max_branches.n) At_most.t) Type_equal.t = fun () -> Type_equal.T in () @@ -134,14 +133,14 @@ module Domains = struct [%%versioned module Stable = struct module V1 = struct - type 'a t = {h: 'a} + type 'a t = { h : 'a } [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] end end] - let iter {h} ~f = f h + let iter { h } ~f = f h - let map {h} ~f = {h= f h} + let map { h } ~f = { h = f h } end module Repr = struct @@ -149,11 +148,12 @@ module Repr = struct module Stable = struct module V1 = struct type 'g t = - { step_data: + { step_data : (Domain.Stable.V1.t Domains.Stable.V1.t * Width.Stable.V1.t) Max_branches_vec.Stable.V1.t - ; max_width: Width.Stable.V1.t - ; wrap_index: 'g list Plonk_verification_key_evals.Stable.V1.t } + ; max_width : Width.Stable.V1.t + ; wrap_index : 'g list Plonk_verification_key_evals.Stable.V1.t + } let to_latest = Fn.id end @@ -165,18 +165,19 @@ module Poly = struct module Stable = struct module V1 = struct type ('g, 'vk) t = - { step_data: + { step_data : (Domain.Stable.V1.t Domains.Stable.V1.t * Width.Stable.V1.t) Max_branches_vec.Stable.V1.t - ; max_width: Width.Stable.V1.t - ; wrap_index: 'g list Plonk_verification_key_evals.Stable.V1.t - ; wrap_vk: 'vk option } + ; max_width : Width.Stable.V1.t + ; wrap_index : 'g list Plonk_verification_key_evals.Stable.V1.t + ; wrap_vk : 'vk option + } [@@deriving sexp, equal, compare, hash, yojson] end end] end -let dummy_domains = {Domains.h= Domain.Pow_2_roots_of_unity 0} +let dummy_domains = { Domains.h = Domain.Pow_2_roots_of_unity 0 } let dummy_width = Width.zero @@ -200,7 +201,8 @@ let wrap_index_to_input (type gs f) (g : gs -> f array) = ; g15 ; g16 ; g17 - ; g18 ] = + ; g18 + ] = Plonk_verification_key_evals.to_hlist t in List.map @@ -221,34 +223,34 @@ let wrap_index_to_input (type gs f) (g : gs -> f array) = ; g15 ; g16 ; g17 - ; g18 ] + ; g18 + ] ~f:(Fn.compose field_elements g) |> List.reduce_exn ~f:append let to_input : _ Poly.t -> _ = let open Random_oracle_input in let map_reduce t ~f = Array.map t ~f |> Array.reduce_exn ~f:append in - fun {step_data; max_width; wrap_index} -> - ( let bits ~len n = bitstring (bits ~len n) in - let num_branches = - bits ~len:(Nat.to_int Max_branches.Log2.n) (At_most.length step_data) - in - let step_domains, step_widths = - At_most.extend_to_vector step_data - (dummy_domains, dummy_width) - Max_branches.n - |> Vector.unzip - in - List.reduce_exn ~f:append - [ map_reduce (Vector.to_array step_domains) ~f:(fun {Domains.h} -> - map_reduce [|h|] ~f:(fun (Pow_2_roots_of_unity x) -> - bits ~len:max_log2_degree x ) ) - ; Array.map (Vector.to_array step_widths) ~f:Width.to_bits - |> bitstrings - ; bitstring (Width.to_bits max_width) - ; wrap_index_to_input - (Fn.compose Array.of_list - (List.concat_map ~f:(fun (x, y) -> [x; y]))) - wrap_index - ; num_branches ] - : _ Random_oracle_input.t ) + fun { step_data; max_width; wrap_index } : _ Random_oracle_input.t -> + let bits ~len n = bitstring (bits ~len n) in + let num_branches = + bits ~len:(Nat.to_int Max_branches.Log2.n) (At_most.length step_data) + in + let step_domains, step_widths = + At_most.extend_to_vector step_data + (dummy_domains, dummy_width) + Max_branches.n + |> Vector.unzip + in + List.reduce_exn ~f:append + [ map_reduce (Vector.to_array step_domains) ~f:(fun { Domains.h } -> + map_reduce [| h |] ~f:(fun (Pow_2_roots_of_unity x) -> + bits ~len:max_log2_degree x)) + ; Array.map (Vector.to_array step_widths) ~f:Width.to_bits |> bitstrings + ; bitstring (Width.to_bits max_width) + ; wrap_index_to_input + (Fn.compose Array.of_list + (List.concat_map ~f:(fun (x, y) -> [ x; y ]))) + wrap_index + ; num_branches + ] diff --git a/src/lib/pickles_types/abc.ml b/src/lib/pickles_types/abc.ml index 3ecb8b29f52..5675e5f2a9d 100644 --- a/src/lib/pickles_types/abc.ml +++ b/src/lib/pickles_types/abc.ml @@ -3,24 +3,24 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type 'a t = {a: 'a; b: 'a; c: 'a} + type 'a t = { a : 'a; b : 'a; c : 'a } [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] end end] let typ (type a b f) (g : (a, b, f) Snarky_backendless.Typ.t) : (a t, b t, f) Snarky_backendless.Typ.t = - Snarky_backendless.Typ.of_hlistable [g; g; g] ~var_to_hlist:to_hlist + Snarky_backendless.Typ.of_hlistable [ g; g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist -let map {a; b; c} ~f = {a= f a; b= f b; c= f c} +let map { a; b; c } ~f = { a = f a; b = f b; c = f c } -let map2 t1 t2 ~f = {a= f t1.a t2.a; b= f t1.b t2.b; c= f t1.c t2.c} +let map2 t1 t2 ~f = { a = f t1.a t2.a; b = f t1.b t2.b; c = f t1.c t2.c } module Label = struct type t = A | B | C [@@deriving equal] - let all = [A; B; C] + let all = [ A; B; C ] end let abc a b c = function Label.A -> a | B -> b | C -> c diff --git a/src/lib/pickles_types/at_most.ml b/src/lib/pickles_types/at_most.ml index ffdc57c1934..bd078ea07cc 100644 --- a/src/lib/pickles_types/at_most.ml +++ b/src/lib/pickles_types/at_most.ml @@ -26,8 +26,8 @@ let rec to_vector : type a n. (a, n) t -> a Vector.e = function let rec map : type a b n. (a, n) t -> f:(a -> b) -> (b, n) t = fun xs ~f -> match xs with [] -> [] | x :: xs -> f x :: map xs ~f -let rec extend_to_vector : type a n. - (a, n) t -> a -> n Nat.t -> (a, n) Vector.t = +let rec extend_to_vector : type a n. (a, n) t -> a -> n Nat.t -> (a, n) Vector.t + = fun v a n -> match (v, n) with | [], Z -> @@ -118,13 +118,12 @@ module At_most_2 = struct include ( With_length (Nat.N2) : - module type of With_length (Nat.N2) with type 'a t := 'a t ) - end - end] + module type of With_length (Nat.N2) with type 'a t := 'a t ) + end + end] - type 'a t = 'a Stable.Latest.t - [@@deriving sexp, equal, compare, hash, yojson] -end + type 'a t = 'a Stable.Latest.t [@@deriving sexp, equal, compare, hash, yojson] + end module At_most_8 = struct [%%versioned_binable @@ -147,10 +146,9 @@ module At_most_8 = struct include ( With_length (Nat.N8) : - module type of With_length (Nat.N8) with type 'a t := 'a t ) - end - end] + module type of With_length (Nat.N8) with type 'a t := 'a t ) + end + end] - type 'a t = 'a Stable.Latest.t - [@@deriving sexp, equal, compare, hash, yojson] -end + type 'a t = 'a Stable.Latest.t [@@deriving sexp, equal, compare, hash, yojson] + end diff --git a/src/lib/pickles_types/dlog_marlin_types.ml b/src/lib/pickles_types/dlog_marlin_types.ml index 7becfbc287e..c1d3385ed42 100644 --- a/src/lib/pickles_types/dlog_marlin_types.ml +++ b/src/lib/pickles_types/dlog_marlin_types.ml @@ -18,19 +18,20 @@ module Evals = struct module Stable = struct module V1 = struct type 'a t = - { w_hat: 'a - ; z_hat_a: 'a - ; z_hat_b: 'a - ; h_1: 'a - ; h_2: 'a - ; h_3: 'a - ; row: 'a Abc.Stable.V1.t - ; col: 'a Abc.Stable.V1.t - ; value: 'a Abc.Stable.V1.t - ; rc: 'a Abc.Stable.V1.t - ; g_1: 'a - ; g_2: 'a - ; g_3: 'a } + { w_hat : 'a + ; z_hat_a : 'a + ; z_hat_b : 'a + ; h_1 : 'a + ; h_2 : 'a + ; h_3 : 'a + ; row : 'a Abc.Stable.V1.t + ; col : 'a Abc.Stable.V1.t + ; value : 'a Abc.Stable.V1.t + ; rc : 'a Abc.Stable.V1.t + ; g_1 : 'a + ; g_2 : 'a + ; g_3 : 'a + } [@@deriving fields, sexp, compare, yojson, hash, equal] end end] @@ -42,42 +43,45 @@ module Evals = struct ; h_1 ; h_2 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } ; g_1 ; g_2 - ; g_3 } : + ; g_3 + } : a t) ~(f : a -> b) : b t = - { w_hat= f w_hat - ; z_hat_a= f z_hat_a - ; z_hat_b= f z_hat_b - ; h_1= f h_1 - ; h_2= f h_2 - ; h_3= f h_3 - ; row= {a= f row_a; b= f row_b; c= f row_c} - ; col= {a= f col_a; b= f col_b; c= f col_c} - ; value= {a= f value_a; b= f value_b; c= f value_c} - ; rc= {a= f rc_a; b= f rc_b; c= f rc_c} - ; g_1= f g_1 - ; g_2= f g_2 - ; g_3= f g_3 } + { w_hat = f w_hat + ; z_hat_a = f z_hat_a + ; z_hat_b = f z_hat_b + ; h_1 = f h_1 + ; h_2 = f h_2 + ; h_3 = f h_3 + ; row = { a = f row_a; b = f row_b; c = f row_c } + ; col = { a = f col_a; b = f col_b; c = f col_c } + ; value = { a = f value_a; b = f value_b; c = f value_c } + ; rc = { a = f rc_a; b = f rc_b; c = f rc_c } + ; g_1 = f g_1 + ; g_2 = f g_2 + ; g_3 = f g_3 + } let map2 (type a b c) (t1 : a t) (t2 : b t) ~(f : a -> b -> c) : c t = - { w_hat= f t1.w_hat t2.w_hat - ; z_hat_a= f t1.z_hat_a t2.z_hat_a - ; z_hat_b= f t1.z_hat_b t2.z_hat_b - ; h_1= f t1.h_1 t2.h_1 - ; h_2= f t1.h_2 t2.h_2 - ; h_3= f t1.h_3 t2.h_3 - ; row= Abc.map2 t1.row t2.row ~f - ; col= Abc.map2 t1.col t2.col ~f - ; value= Abc.map2 t1.value t2.value ~f - ; rc= Abc.map2 t1.rc t2.rc ~f - ; g_1= f t1.g_1 t2.g_1 - ; g_2= f t1.g_2 t2.g_2 - ; g_3= f t1.g_3 t2.g_3 } + { w_hat = f t1.w_hat t2.w_hat + ; z_hat_a = f t1.z_hat_a t2.z_hat_a + ; z_hat_b = f t1.z_hat_b t2.z_hat_b + ; h_1 = f t1.h_1 t2.h_1 + ; h_2 = f t1.h_2 t2.h_2 + ; h_3 = f t1.h_3 t2.h_3 + ; row = Abc.map2 t1.row t2.row ~f + ; col = Abc.map2 t1.col t2.col ~f + ; value = Abc.map2 t1.value t2.value ~f + ; rc = Abc.map2 t1.rc t2.rc ~f + ; g_1 = f t1.g_1 t2.g_1 + ; g_2 = f t1.g_2 t2.g_2 + ; g_3 = f t1.g_3 t2.g_3 + } let to_vectors { w_hat @@ -86,13 +90,14 @@ module Evals = struct ; h_1 ; h_2 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } ; g_1 ; g_2 - ; g_3 } = + ; g_3 + } = Vector. ( [ w_hat ; z_hat_a @@ -111,8 +116,9 @@ module Evals = struct ; value_c ; rc_a ; rc_b - ; rc_c ] - , [g_1; g_2; g_3] ) + ; rc_c + ] + , [ g_1; g_2; g_3 ] ) let of_vectors (( [ w_hat @@ -132,8 +138,9 @@ module Evals = struct ; value_c ; rc_a ; rc_b - ; rc_c ] - , [g_1; g_2; g_3] ) : + ; rc_c + ] + , [ g_1; g_2; g_3 ] ) : ('a, _) Vector.t * ('a, _) Vector.t) : 'a t = { w_hat ; z_hat_a @@ -141,26 +148,27 @@ module Evals = struct ; h_1 ; h_2 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } ; g_1 ; g_2 - ; g_3 } + ; g_3 + } - let typ (lengths : int t) (g : ('a, 'b, 'f) Snarky_backendless.Typ.t) - ~default : ('a array t, 'b array t, 'f) Snarky_backendless.Typ.t = + let typ (lengths : int t) (g : ('a, 'b, 'f) Snarky_backendless.Typ.t) ~default + : ('a array t, 'b array t, 'f) Snarky_backendless.Typ.t = let v ls = Vector.map ls ~f:(fun length -> let t = Snarky_backendless.Typ.array ~length g in { t with - store= + store = (fun arr -> t.store (Array.append arr - (Array.create ~len:(length - Array.length arr) default)) - ) } ) + (Array.create ~len:(length - Array.length arr) default))) + }) in let t = let l1, l2 = to_vectors lengths in @@ -176,11 +184,12 @@ module Openings = struct module Stable = struct module V1 = struct type ('g, 'fq) t = - { lr: ('g * 'g) Pc_array.Stable.V1.t - ; z_1: 'fq - ; z_2: 'fq - ; delta: 'g - ; sg: 'g } + { lr : ('g * 'g) Pc_array.Stable.V1.t + ; z_1 : 'fq + ; z_2 : 'fq + ; delta : 'g + ; sg : 'g + } [@@deriving sexp, compare, yojson, hash, equal, hlist] end end] @@ -188,7 +197,7 @@ module Openings = struct let typ fq g ~length = let open Snarky_backendless.Typ in of_hlistable - [array ~length (g * g); fq; fq; g; g] + [ array ~length (g * g); fq; fq; g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -197,11 +206,12 @@ module Openings = struct module Stable = struct module V1 = struct type ('g, 'fq, 'fqv) t = - { proof: ('g, 'fq) Bulletproof.Stable.V1.t - ; evals: + { proof : ('g, 'fq) Bulletproof.Stable.V1.t + ; evals : 'fqv Evals.Stable.V1.t * 'fqv Evals.Stable.V1.t - * 'fqv Evals.Stable.V1.t } + * 'fqv Evals.Stable.V1.t + } [@@deriving sexp, compare, yojson, hash, equal, hlist] end end] @@ -212,7 +222,8 @@ module Openings = struct let triple x = tuple3 x x x in of_hlistable [ Bulletproof.typ fq g ~length:bulletproof_rounds - ; triple (Evals.typ ~default:dummy_group_element commitment_lengths g) ] + ; triple (Evals.typ ~default:dummy_group_element commitment_lengths g) + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -222,7 +233,7 @@ module Poly_comm = struct include Dlog_plonk_types.Poly_comm.With_degree_bound let typ ?(array = Snarky_backendless.Typ.array) g ~length = - Snarky_backendless.Typ.of_hlistable [array ~length g; g] + Snarky_backendless.Typ.of_hlistable [ array ~length g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -247,39 +258,40 @@ module Messages = struct module Stable = struct module V1 = struct type ('g, 'fq) t = - { w_hat: 'g Without_degree_bound.Stable.V1.t - ; z_hat_a: 'g Without_degree_bound.Stable.V1.t - ; z_hat_b: 'g Without_degree_bound.Stable.V1.t - ; gh_1: + { w_hat : 'g Without_degree_bound.Stable.V1.t + ; z_hat_a : 'g Without_degree_bound.Stable.V1.t + ; z_hat_b : 'g Without_degree_bound.Stable.V1.t + ; gh_1 : 'g With_degree_bound.Stable.V1.t * 'g Without_degree_bound.Stable.V1.t - ; sigma_gh_2: + ; sigma_gh_2 : 'fq * ( 'g With_degree_bound.Stable.V1.t * 'g Without_degree_bound.Stable.V1.t ) - ; sigma_gh_3: + ; sigma_gh_3 : 'fq * ( 'g With_degree_bound.Stable.V1.t - * 'g Without_degree_bound.Stable.V1.t ) } + * 'g Without_degree_bound.Stable.V1.t ) + } [@@deriving sexp, compare, yojson, fields, hash, equal, hlist] end end] - let typ (type n) fq g ~dummy - ~(commitment_lengths : (int, n) Vector.t Evals.t) = + let typ (type n) fq g ~dummy ~(commitment_lengths : (int, n) Vector.t Evals.t) + = let open Snarky_backendless.Typ in - let {Evals.w_hat; z_hat_a; z_hat_b; h_1; h_2; h_3; g_1; g_2; g_3; _} = + let { Evals.w_hat; z_hat_a; z_hat_b; h_1; h_2; h_3; g_1; g_2; g_3; _ } = commitment_lengths in let array ~length elt = let typ = Snarky_backendless.Typ.array ~length elt in { typ with - store= + store = (fun a -> let n = Array.length a in if n > length then failwithf "Expected %d <= %d" n length () ; - typ.store (Array.append a (Array.create ~len:(length - n) dummy)) - ) } + typ.store (Array.append a (Array.create ~len:(length - n) dummy))) + } in let wo n = array ~length:(Vector.reduce_exn n ~f:Int.max) g in let w n = @@ -291,7 +303,8 @@ module Messages = struct ; wo z_hat_b ; w g_1 * wo h_1 ; fq * (w g_2 * wo h_2) - ; fq * (w g_3 * wo h_3) ] + ; fq * (w g_3 * wo h_3) + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -301,8 +314,9 @@ module Proof = struct module Stable = struct module V1 = struct type ('g, 'fq, 'fqv) t = - { messages: ('g, 'fq) Messages.Stable.V1.t - ; openings: ('g, 'fq, 'fqv) Openings.Stable.V1.t } + { messages : ('g, 'fq) Messages.Stable.V1.t + ; openings : ('g, 'fq, 'fqv) Openings.Stable.V1.t + } [@@deriving sexp, compare, yojson, hash, equal] end end] diff --git a/src/lib/pickles_types/dlog_plonk_types.ml b/src/lib/pickles_types/dlog_plonk_types.ml index f21184453d3..e43467827c5 100644 --- a/src/lib/pickles_types/dlog_plonk_types.ml +++ b/src/lib/pickles_types/dlog_plonk_types.ml @@ -3,11 +3,12 @@ open Core_kernel let padded_array_typ ~length ~dummy elt = let typ = Snarky_backendless.Typ.array ~length elt in { typ with - store= + store = (fun a -> let n = Array.length a in if n > length then failwithf "Expected %d <= %d" n length () ; - typ.store (Array.append a (Array.create ~len:(length - n) dummy)) ) } + typ.store (Array.append a (Array.create ~len:(length - n) dummy))) + } module Pc_array = struct [%%versioned @@ -27,52 +28,62 @@ module Evals = struct module Stable = struct module V1 = struct type 'a t = - {l: 'a; r: 'a; o: 'a; z: 'a; t: 'a; f: 'a; sigma1: 'a; sigma2: 'a} + { l : 'a + ; r : 'a + ; o : 'a + ; z : 'a + ; t : 'a + ; f : 'a + ; sigma1 : 'a + ; sigma2 : 'a + } [@@deriving fields, sexp, compare, yojson, hash, equal] end end] - let map (type a b) ({l; r; o; z; t; f= f'; sigma1; sigma2} : a t) + let map (type a b) ({ l; r; o; z; t; f = f'; sigma1; sigma2 } : a t) ~(f : a -> b) : b t = - { l= f l - ; r= f r - ; o= f o - ; z= f z - ; t= f t - ; f= f f' - ; sigma1= f sigma1 - ; sigma2= f sigma2 } + { l = f l + ; r = f r + ; o = f o + ; z = f z + ; t = f t + ; f = f f' + ; sigma1 = f sigma1 + ; sigma2 = f sigma2 + } let map2 (type a b c) (t1 : a t) (t2 : b t) ~(f : a -> b -> c) : c t = - { l= f t1.l t2.l - ; r= f t1.r t2.r - ; o= f t1.o t2.o - ; z= f t1.z t2.z - ; t= f t1.t t2.t - ; f= f t1.f t2.f - ; sigma1= f t1.sigma1 t2.sigma1 - ; sigma2= f t1.sigma2 t2.sigma2 } - - let to_vectors {l; r; o; z; t; f; sigma1; sigma2} = - (Vector.[l; r; o; z; f; sigma1; sigma2], Vector.[t]) + { l = f t1.l t2.l + ; r = f t1.r t2.r + ; o = f t1.o t2.o + ; z = f t1.z t2.z + ; t = f t1.t t2.t + ; f = f t1.f t2.f + ; sigma1 = f t1.sigma1 t2.sigma1 + ; sigma2 = f t1.sigma2 t2.sigma2 + } + + let to_vectors { l; r; o; z; t; f; sigma1; sigma2 } = + (Vector.[ l; r; o; z; f; sigma1; sigma2 ], Vector.[ t ]) let of_vectors - (([l; r; o; z; f; sigma1; sigma2] : ('a, _) Vector.t), Vector.[t]) : 'a t - = - {l; r; o; z; t; f; sigma1; sigma2} + (([ l; r; o; z; f; sigma1; sigma2 ] : ('a, _) Vector.t), Vector.[ t ]) : + 'a t = + { l; r; o; z; t; f; sigma1; sigma2 } - let typ (lengths : int t) (g : ('a, 'b, 'f) Snarky_backendless.Typ.t) - ~default : ('a array t, 'b array t, 'f) Snarky_backendless.Typ.t = + let typ (lengths : int t) (g : ('a, 'b, 'f) Snarky_backendless.Typ.t) ~default + : ('a array t, 'b array t, 'f) Snarky_backendless.Typ.t = let v ls = Vector.map ls ~f:(fun length -> let t = Snarky_backendless.Typ.array ~length g in { t with - store= + store = (fun arr -> t.store (Array.append arr - (Array.create ~len:(length - Array.length arr) default)) - ) } ) + (Array.create ~len:(length - Array.length arr) default))) + }) in let t = let l1, l2 = to_vectors lengths in @@ -88,11 +99,12 @@ module Openings = struct module Stable = struct module V1 = struct type ('g, 'fq) t = - { lr: ('g * 'g) Pc_array.Stable.V1.t - ; z_1: 'fq - ; z_2: 'fq - ; delta: 'g - ; sg: 'g } + { lr : ('g * 'g) Pc_array.Stable.V1.t + ; z_1 : 'fq + ; z_2 : 'fq + ; delta : 'g + ; sg : 'g + } [@@deriving sexp, compare, yojson, hash, equal, hlist] end end] @@ -100,7 +112,7 @@ module Openings = struct let typ fq g ~length = let open Snarky_backendless.Typ in of_hlistable - [array ~length (g * g); fq; fq; g; g] + [ array ~length (g * g); fq; fq; g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -109,8 +121,9 @@ module Openings = struct module Stable = struct module V1 = struct type ('g, 'fq, 'fqv) t = - { proof: ('g, 'fq) Bulletproof.Stable.V1.t - ; evals: 'fqv Evals.Stable.V1.t * 'fqv Evals.Stable.V1.t } + { proof : ('g, 'fq) Bulletproof.Stable.V1.t + ; evals : 'fqv Evals.Stable.V1.t * 'fqv Evals.Stable.V1.t + } [@@deriving sexp, compare, yojson, hash, equal, hlist] end end] @@ -121,7 +134,8 @@ module Openings = struct let double x = tuple2 x x in of_hlistable [ Bulletproof.typ fq g ~length:bulletproof_rounds - ; double (Evals.typ ~default:dummy_group_element commitment_lengths g) ] + ; double (Evals.typ ~default:dummy_group_element commitment_lengths g) + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -132,13 +146,13 @@ module Poly_comm = struct module Stable = struct module V1 = struct type 'g_opt t = - {unshifted: 'g_opt Pc_array.Stable.V1.t; shifted: 'g_opt} + { unshifted : 'g_opt Pc_array.Stable.V1.t; shifted : 'g_opt } [@@deriving sexp, compare, yojson, hlist, hash, equal] end end] - let map {unshifted; shifted} ~f = - {unshifted= Array.map ~f unshifted; shifted= f shifted} + let map { unshifted; shifted } ~f = + { unshifted = Array.map ~f unshifted; shifted = f shifted } let padded_array_typ0 = padded_array_typ @@ -146,19 +160,18 @@ module Poly_comm = struct let open Snarky_backendless.Typ in let typ = array ~length (tuple2 bool elt) in { typ with - store= + store = (fun a -> let a = Array.map a ~f:(fun x -> (true, x)) in let n = Array.length a in if n > length then failwithf "Expected %d <= %d" n length () ; typ.store - (Array.append a (Array.create ~len:(length - n) (false, dummy))) - ) - ; read= + (Array.append a (Array.create ~len:(length - n) (false, dummy)))) + ; read = (fun a -> let open Snarky_backendless.Typ_monads.Read.Let_syntax in let%map a = typ.read a in - Array.filter_map a ~f:(fun (b, g) -> if b then Some g else None) ) + Array.filter_map a ~f:(fun (b, g) -> if b then Some g else None)) } let typ (type f g g_var bool_var) @@ -173,11 +186,11 @@ module Poly_comm = struct | Or_infinity.Infinity -> (false, dummy_group_element) | Finite x -> - (true, x) ) + (true, x)) ~back:(fun (b, x) -> if b then Infinity else Finite x) in let arr = padded_array_typ0 ~length ~dummy:Or_infinity.Infinity g_inf in - of_hlistable [arr; g_inf] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist + of_hlistable [ arr; g_inf ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -201,11 +214,12 @@ module Messages = struct module Stable = struct module V1 = struct type ('g, 'g_opt) t = - { l_comm: 'g Without_degree_bound.Stable.V1.t - ; r_comm: 'g Without_degree_bound.Stable.V1.t - ; o_comm: 'g Without_degree_bound.Stable.V1.t - ; z_comm: 'g Without_degree_bound.Stable.V1.t - ; t_comm: 'g_opt With_degree_bound.Stable.V1.t } + { l_comm : 'g Without_degree_bound.Stable.V1.t + ; r_comm : 'g Without_degree_bound.Stable.V1.t + ; o_comm : 'g Without_degree_bound.Stable.V1.t + ; z_comm : 'g Without_degree_bound.Stable.V1.t + ; t_comm : 'g_opt With_degree_bound.Stable.V1.t + } [@@deriving sexp, compare, yojson, fields, hash, equal, hlist] end end] @@ -213,7 +227,7 @@ module Messages = struct let typ (type n) g ~dummy ~(commitment_lengths : (int, n) Vector.t Evals.t) ~bool = let open Snarky_backendless.Typ in - let {Evals.l; r; o; z; t; _} = commitment_lengths in + let { Evals.l; r; o; z; t; _ } = commitment_lengths in let array ~length elt = padded_array_typ ~dummy ~length elt in let wo n = array ~length:(Vector.reduce_exn n ~f:Int.max) g in let w n = @@ -222,7 +236,7 @@ module Messages = struct ~dummy_group_element:dummy ~bool in of_hlistable - [wo l; wo r; wo o; wo z; w t] + [ wo l; wo r; wo o; wo z; w t ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -232,8 +246,9 @@ module Proof = struct module Stable = struct module V1 = struct type ('g, 'g_opt, 'fq, 'fqv) t = - { messages: ('g, 'g_opt) Messages.Stable.V1.t - ; openings: ('g, 'fq, 'fqv) Openings.Stable.V1.t } + { messages : ('g, 'g_opt) Messages.Stable.V1.t + ; openings : ('g, 'fq, 'fqv) Openings.Stable.V1.t + } [@@deriving sexp, compare, yojson, hash, equal] end end] @@ -245,10 +260,10 @@ module Shifts = struct module V1 = struct type 'field t = 'field Marlin_plonk_bindings_types.Plonk_verification_shifts.t = - {r: 'field; o: 'field} + { r : 'field; o : 'field } [@@deriving sexp, compare, yojson, hash, equal] end end] - let map ~f {r; o} = {r= f r; o= f o} + let map ~f { r; o } = { r = f r; o = f o } end diff --git a/src/lib/pickles_types/higher_kinded_poly.ml b/src/lib/pickles_types/higher_kinded_poly.ml index e0f26d6618e..c324f7e6a87 100644 --- a/src/lib/pickles_types/higher_kinded_poly.ml +++ b/src/lib/pickles_types/higher_kinded_poly.ml @@ -102,8 +102,7 @@ module P2 = struct end module T (M : T2) : - S with type ('a1, 'a2) p = ('a1, 'a2) M.t and type witness = W(M).t = - struct + S with type ('a1, 'a2) p = ('a1, 'a2) M.t and type witness = W(M).t = struct type ('a1, 'a2) p = ('a1, 'a2) M.t type witness = W(M).t @@ -151,8 +150,8 @@ module P3 = struct module T (M : T3) : S - with type ('a1, 'a2, 'a3) p = ('a1, 'a2, 'a3) M.t - and type witness = W(M).t = struct + with type ('a1, 'a2, 'a3) p = ('a1, 'a2, 'a3) M.t + and type witness = W(M).t = struct type ('a1, 'a2, 'a3) p = ('a1, 'a2, 'a3) M.t type witness = W(M).t @@ -201,8 +200,8 @@ module P4 = struct module T (M : T4) : S - with type ('a1, 'a2, 'a3, 'a4) p = ('a1, 'a2, 'a3, 'a4) M.t - and type witness = W(M).t = struct + with type ('a1, 'a2, 'a3, 'a4) p = ('a1, 'a2, 'a3, 'a4) M.t + and type witness = W(M).t = struct type ('a1, 'a2, 'a3, 'a4) p = ('a1, 'a2, 'a3, 'a4) M.t type witness = W(M).t @@ -254,8 +253,8 @@ module P5 = struct module T (M : T5) : S - with type ('a1, 'a2, 'a3, 'a4, 'a5) p = ('a1, 'a2, 'a3, 'a4, 'a5) M.t - and type witness = W(M).t = struct + with type ('a1, 'a2, 'a3, 'a4, 'a5) p = ('a1, 'a2, 'a3, 'a4, 'a5) M.t + and type witness = W(M).t = struct type ('a1, 'a2, 'a3, 'a4, 'a5) p = ('a1, 'a2, 'a3, 'a4, 'a5) M.t type witness = W(M).t @@ -306,7 +305,7 @@ let%test_module "Higher_kinded_poly" = module Poly_int_5 = P5.T (Int_5) - let ints = [1; 2; 3; 4; 5] + let ints = [ 1; 2; 3; 4; 5 ] let poly_ints_1 = List.map ~f:Poly_int_1.to_poly ints @@ -337,7 +336,7 @@ let%test_module "Higher_kinded_poly" = module Poly_option = P1.T (Option) - let options = [Some 1; None; None; Some 4; Some 5] + let options = [ Some 1; None; None; Some 4; Some 5 ] let poly_options = List.map ~f:Poly_option.to_poly options @@ -357,7 +356,7 @@ let%test_module "Higher_kinded_poly" = module Poly_ignore_2 = P2.T (Ignore_2) - let num_unit_tuples = [(1, ()); (2, ()); (3, ()); (4, ())] + let num_unit_tuples = [ (1, ()); (2, ()); (3, ()); (4, ()) ] let ignore_1s : (int, _, _) P2.t list = List.map ~f:Poly_ignore_1.to_poly num_unit_tuples diff --git a/src/lib/pickles_types/hlist.ml b/src/lib/pickles_types/hlist.ml index 2854e5866eb..ee3ea49fe41 100644 --- a/src/lib/pickles_types/hlist.ml +++ b/src/lib/pickles_types/hlist.ml @@ -70,7 +70,7 @@ module H1 = struct module Iter (F : T1) (C : sig - val f : 'a F.t -> unit + val f : 'a F.t -> unit end) = struct let rec f : type a. a T(F).t -> unit = function @@ -81,7 +81,8 @@ module H1 = struct end module Of_vector (X : T0) = struct - let rec f : type xs length. + let rec f : + type xs length. (xs, length) Length.t -> (X.t, length) Vector.t -> xs T(E01(X)).t = fun l1 v -> match (l1, v) with Z, [] -> [] | S n1, x :: xs -> x :: f n1 xs @@ -90,7 +91,7 @@ module H1 = struct module Map (F : T1) (G : T1) (C : sig - val f : 'a F.t -> 'a G.t + val f : 'a F.t -> 'a G.t end) = struct let rec f : type a. a T(F).t -> a T(G).t = function @@ -104,7 +105,7 @@ module H1 = struct module Fold (F : T1) (X : T0) (C : sig - val f : X.t -> 'a F.t -> X.t + val f : X.t -> 'a F.t -> X.t end) = struct let rec f : type a. init:X.t -> a T(F).t -> X.t = @@ -115,9 +116,9 @@ module H1 = struct module Map_reduce (F : T1) (X : T0) (C : sig - val reduce : X.t -> X.t -> X.t + val reduce : X.t -> X.t -> X.t - val map : 'a F.t -> X.t + val map : 'a F.t -> X.t end) = struct let rec f : type a. X.t -> a T(F).t -> X.t = @@ -133,7 +134,8 @@ module H1 = struct end module To_vector (X : T0) = struct - let rec f : type xs length. + let rec f : + type xs length. (xs, length) Length.t -> xs T(E01(X)).t -> (X.t, length) Vector.t = fun l1 v -> match (l1, v) with Z, [] -> [] | S n1, x :: xs -> x :: f n1 xs @@ -146,11 +148,7 @@ module H1 = struct module Zip (F : T1) (G : T1) = struct let rec f : type a. a T(F).t -> a T(G).t -> a T(Tuple2(F)(G)).t = fun xs ys -> - match (xs, ys) with - | [], [] -> - [] - | x :: xs, y :: ys -> - (x, y) :: f xs ys + match (xs, ys) with [], [] -> [] | x :: xs, y :: ys -> (x, y) :: f xs ys end module Typ (Impl : sig @@ -159,11 +157,11 @@ module H1 = struct (F : T1) (Var : T1) (Val : T1) (C : sig - val f : - 'a F.t -> ('a Var.t, 'a Val.t, Impl.field) Snarky_backendless.Typ.t + val f : 'a F.t -> ('a Var.t, 'a Val.t, Impl.field) Snarky_backendless.Typ.t end) = struct - let rec f : type xs. + let rec f : + type xs. xs T(F).t -> (xs T(Var).t, xs T(Val).t, Impl.field) Snarky_backendless.Typ.t = let transport, transport_var, tuple2, unit = @@ -182,8 +180,8 @@ module H1 = struct ~back:(fun (x, xs) -> x :: xs) | [] -> let there _ = () in - transport (unit ()) ~there ~back:(fun () -> ([] : _ T(Val).t)) - |> transport_var ~there ~back:(fun () -> ([] : _ T(Var).t)) + transport (unit ()) ~there ~back:(fun () : _ T(Val).t -> []) + |> transport_var ~there ~back:(fun () : _ T(Var).t -> []) end end @@ -211,20 +209,16 @@ module H2 = struct end module Zip (F : T2) (G : T2) = struct - let rec f : type a b. - (a, b) T(F).t -> (a, b) T(G).t -> (a, b) T(Tuple2(F)(G)).t = + let rec f : + type a b. (a, b) T(F).t -> (a, b) T(G).t -> (a, b) T(Tuple2(F)(G)).t = fun xs ys -> - match (xs, ys) with - | [], [] -> - [] - | x :: xs, y :: ys -> - (x, y) :: f xs ys + match (xs, ys) with [], [] -> [] | x :: xs, y :: ys -> (x, y) :: f xs ys end module Map (F : T2) (G : T2) (C : sig - val f : ('a, 'b) F.t -> ('a, 'b) G.t + val f : ('a, 'b) F.t -> ('a, 'b) G.t end) = struct let rec f : type a b. (a, b) T(F).t -> (a, b) T(G).t = function @@ -277,8 +271,9 @@ module H3_3 = struct ('a1, 'a2, 'a3, 's1, 's2, 's3) F.t * ('b1, 'b2, 'b3, 's1, 's2, 's3) t -> ('a1 * 'b1, 'a2 * 'b2, 'a3 * 'b3, 's1, 's2, 's3) t - let rec length : type t1 t2 t3 e1 e2 e3. - (t1, t2, t3, e1, e2, e3) t -> t1 Length.n = function + let rec length : + type t1 t2 t3 e1 e2 e3. (t1, t2, t3, e1, e2, e3) t -> t1 Length.n = + function | [] -> T (Z, Z) | _ :: xs -> @@ -299,8 +294,9 @@ module H3_4 = struct * ('b1, 'b2, 'b3, 's1, 's2, 's3, 's4) t -> ('a1 * 'b1, 'a2 * 'b2, 'a3 * 'b3, 's1, 's2, 's3, 's4) t - let rec length : type t1 t2 t3 e1 e2 e3 e4. - (t1, t2, t3, e1, e2, e3, e4) t -> t1 Length.n = function + let rec length : + type t1 t2 t3 e1 e2 e3 e4. (t1, t2, t3, e1, e2, e3, e4) t -> t1 Length.n + = function | [] -> T (Z, Z) | _ :: xs -> @@ -320,8 +316,8 @@ module H2_1 = struct ('a1, 'a2, 's) F.t * ('b1, 'b2, 's) t -> ('a1 * 'b1, 'a2 * 'b2, 's) t - let rec length : type tail1 tail2 e. (tail1, tail2, e) t -> tail1 Length.n - = function + let rec length : type tail1 tail2 e. (tail1, tail2, e) t -> tail1 Length.n = + function | [] -> T (Z, Z) | _ :: xs -> @@ -331,7 +327,7 @@ module H2_1 = struct module Iter (F : T3) (C : sig - val f : ('a, 'b, 'c) F.t -> unit + val f : ('a, 'b, 'c) F.t -> unit end) = struct let rec f : type a b c. (a, b, c) T(F).t -> unit = function @@ -344,7 +340,7 @@ module H2_1 = struct module Map_ (F : T3) (G : T3) (Env : sig - type t + type t end) (C : sig val f : ('a, 'b, Env.t) F.t -> ('a, 'b, Env.t) G.t end) = @@ -361,7 +357,7 @@ module H2_1 = struct module Map (F : T3) (G : T3) (C : sig - val f : ('a, 'b, 'c) F.t -> ('a, 'b, 'c) G.t + val f : ('a, 'b, 'c) F.t -> ('a, 'b, 'c) G.t end) = struct let f : type a b c. (a, b, c) T(F).t -> (a, b, c) T(G).t = @@ -379,18 +375,16 @@ module H2_1 = struct end module Zip (F : T3) (G : T3) = struct - let rec f : type a b c. + let rec f : + type a b c. (a, b, c) T(F).t -> (a, b, c) T(G).t -> (a, b, c) T(Tuple2(F)(G)).t = fun xs ys -> - match (xs, ys) with - | [], [] -> - [] - | x :: xs, y :: ys -> - (x, y) :: f xs ys + match (xs, ys) with [], [] -> [] | x :: xs, y :: ys -> (x, y) :: f xs ys end module Zip3 (F1 : T3) (F2 : T3) (F3 : T3) = struct - let rec f : type a b c. + let rec f : + type a b c. (a, b, c) T(F1).t -> (a, b, c) T(F2).t -> (a, b, c) T(F3).t @@ -404,7 +398,8 @@ module H2_1 = struct end module Unzip3 (F1 : T3) (F2 : T3) (F3 : T3) = struct - let rec f : type a b c. + let rec f : + type a b c. (a, b, c) T(Tuple3(F1)(F2)(F3)).t -> (a, b, c) T(F1).t * (a, b, c) T(F2).t * (a, b, c) T(F3).t = fun ts -> @@ -417,7 +412,8 @@ module H2_1 = struct end module Zip4 (F1 : T3) (F2 : T3) (F3 : T3) (F4 : T3) = struct - let rec f : type a b c. + let rec f : + type a b c. (a, b, c) T(F1).t -> (a, b, c) T(F2).t -> (a, b, c) T(F3).t @@ -432,7 +428,8 @@ module H2_1 = struct end module Zip5 (F1 : T3) (F2 : T3) (F3 : T3) (F4 : T3) (F5 : T3) = struct - let rec f : type a b c. + let rec f : + type a b c. (a, b, c) T(F1).t -> (a, b, c) T(F2).t -> (a, b, c) T(F3).t @@ -448,7 +445,8 @@ module H2_1 = struct end module Of_vector (X : T0) = struct - let rec f : type e xs ys length. + let rec f : + type e xs ys length. (xs, length) Length.t -> (ys, length) Length.t -> (X.t, length) Vector.t @@ -462,7 +460,8 @@ module H2_1 = struct end module To_vector (X : T0) = struct - let rec f : type e xs ys length. + let rec f : + type e xs ys length. (xs, length) Length.t -> (xs, ys, e) T(E03(X)).t -> (X.t, length) Vector.t = @@ -482,8 +481,9 @@ module H3 = struct ('a1, 'a2, 'a3) F.t * ('b1, 'b2, 'b3) t -> ('a1 * 'b1, 'a2 * 'b2, 'a3 * 'b3) t - let rec length : type tail1 tail2 tail3. - (tail1, tail2, tail3) t -> tail1 Length.n = function + let rec length : + type tail1 tail2 tail3. (tail1, tail2, tail3) t -> tail1 Length.n = + function | [] -> T (Z, Z) | _ :: xs -> @@ -492,7 +492,8 @@ module H3 = struct end module To_vector (X : T0) = struct - let rec f : type a b c length. + let rec f : + type a b c length. (a, length) Length.t -> (a, b, c) T(E03(X)).t -> (X.t, length) Vector.t = fun l1 v -> @@ -500,14 +501,11 @@ module H3 = struct end module Zip (F : T3) (G : T3) = struct - let rec f : type a b c. + let rec f : + type a b c. (a, b, c) T(F).t -> (a, b, c) T(G).t -> (a, b, c) T(Tuple2(F)(G)).t = fun xs ys -> - match (xs, ys) with - | [], [] -> - [] - | x :: xs, y :: ys -> - (x, y) :: f xs ys + match (xs, ys) with [], [] -> [] | x :: xs, y :: ys -> (x, y) :: f xs ys end module Fst = struct @@ -517,7 +515,7 @@ module H3 = struct module Map1_to_H1 (F : T3) (G : T1) (C : sig - val f : ('a, 'b, 'c) F.t -> 'a G.t + val f : ('a, 'b, 'c) F.t -> 'a G.t end) = struct let rec f : type a b c. (a, b, c) T(F).t -> a H1.T(G).t = function @@ -531,7 +529,7 @@ module H3 = struct module Map2_to_H1 (F : T3) (G : T1) (C : sig - val f : ('a, 'b, 'c) F.t -> 'b G.t + val f : ('a, 'b, 'c) F.t -> 'b G.t end) = struct let rec f : type a b c. (a, b, c) T(F).t -> b H1.T(G).t = function @@ -545,7 +543,7 @@ module H3 = struct module Map (F : T3) (G : T3) (C : sig - val f : ('a, 'b, 'c) F.t -> ('a, 'b, 'c) G.t + val f : ('a, 'b, 'c) F.t -> ('a, 'b, 'c) G.t end) = struct let rec f : type a b c. (a, b, c) T(F).t -> (a, b, c) T(G).t = function @@ -568,7 +566,8 @@ module H4 = struct ('a1, 'a2, 'a3, 'a4) F.t * ('b1, 'b2, 'b3, 'b4) t -> ('a1 * 'b1, 'a2 * 'b2, 'a3 * 'b3, 'a4 * 'b4) t - let rec length : type tail1 tail2 tail3 tail4. + let rec length : + type tail1 tail2 tail3 tail4. (tail1, tail2, tail3, tail4) t -> tail1 Length.n = function | [] -> T (Z, Z) @@ -580,7 +579,7 @@ module H4 = struct module Fold (F : T4) (X : T0) (C : sig - val f : X.t -> _ F.t -> X.t + val f : X.t -> _ F.t -> X.t end) = struct let rec f : type a b c d. init:X.t -> (a, b, c, d) T(F).t -> X.t = @@ -590,7 +589,7 @@ module H4 = struct module Iter (F : T4) (C : sig - val f : _ F.t -> unit + val f : _ F.t -> unit end) = struct let rec f : type a b c d. (a, b, c, d) T(F).t -> unit = @@ -600,7 +599,7 @@ module H4 = struct module Map (F : T4) (G : T4) (C : sig - val f : ('a, 'b, 'c, 'd) F.t -> ('a, 'b, 'c, 'd) G.t + val f : ('a, 'b, 'c, 'd) F.t -> ('a, 'b, 'c, 'd) G.t end) = struct let rec f : type a b c d. (a, b, c, d) T(F).t -> (a, b, c, d) T(G).t = @@ -613,7 +612,8 @@ module H4 = struct end module To_vector (X : T0) = struct - let rec f : type a b c d length. + let rec f : + type a b c d length. (a, length) Length.t -> (a, b, c, d) T(E04(X)).t -> (X.t, length) Vector.t = @@ -626,20 +626,18 @@ module H4 = struct end module Zip (F : T4) (G : T4) = struct - let rec f : type a b c d. + let rec f : + type a b c d. (a, b, c, d) T(F).t -> (a, b, c, d) T(G).t -> (a, b, c, d) T(Tuple2(F)(G)).t = fun xs ys -> - match (xs, ys) with - | [], [] -> - [] - | x :: xs, y :: ys -> - (x, y) :: f xs ys + match (xs, ys) with [], [] -> [] | x :: xs, y :: ys -> (x, y) :: f xs ys end module Length_1_to_2 (F : T4) = struct - let rec f : type n xs ys a b. + let rec f : + type n xs ys a b. (xs, ys, a, b) T(F).t -> (xs, n) Length.t -> (ys, n) Length.t = fun xs n -> match (xs, n) with [], Z -> Z | _ :: xs, S n -> S (f xs n) end @@ -650,18 +648,19 @@ module H4 = struct (F : T4) (Var : T3) (Val : T3) (C : sig - val f : - ('var, 'value, 'n1, 'n2) F.t - -> ( ('var, 'n1, 'n2) Var.t - , ('value, 'n1, 'n2) Val.t - , Impl.field ) - Snarky_backendless.Typ.t + val f : + ('var, 'value, 'n1, 'n2) F.t + -> ( ('var, 'n1, 'n2) Var.t + , ('value, 'n1, 'n2) Val.t + , Impl.field ) + Snarky_backendless.Typ.t end) = struct let transport, transport_var, tuple2, unit = Snarky_backendless.Typ.(transport, transport_var, tuple2, unit) - let rec f : type vars values ns1 ns2. + let rec f : + type vars values ns1 ns2. (vars, values, ns1, ns2) T(F).t -> ( (vars, ns1, ns2) H3.T(Var).t , (values, ns1, ns2) H3.T(Val).t @@ -671,8 +670,8 @@ module H4 = struct match ts with | [] -> let there _ = () in - transport (unit ()) ~there ~back:(fun () -> ([] : _ H3.T(Val).t)) - |> transport_var ~there ~back:(fun () -> ([] : _ H3.T(Var).t)) + transport (unit ()) ~there ~back:(fun () : _ H3.T(Val).t -> []) + |> transport_var ~there ~back:(fun () : _ H3.T(Var).t -> []) | t :: ts -> transport (tuple2 (C.f t) (f ts)) @@ -695,8 +694,9 @@ module H4_2 = struct ('a1, 'a2, 'a3, 'a4, 's1, 's2) F.t * ('b1, 'b2, 'b3, 'b4, 's1, 's2) t -> ('a1 * 'b1, 'a2 * 'b2, 'a3 * 'b3, 'a4 * 'b4, 's1, 's2) t - let rec length : type t1 t2 t3 t4 e1 e2. - (t1, t2, t3, t4, e1, e2) t -> t1 Length.n = function + let rec length : + type t1 t2 t3 t4 e1 e2. (t1, t2, t3, t4, e1, e2) t -> t1 Length.n = + function | [] -> T (Z, Z) | _ :: xs -> @@ -715,8 +715,9 @@ struct ('a1, 'a2, 'a3, 's) F.t * ('b1, 'b2, 'b3, 's) t -> ('a1 * 'b1, 'a2 * 'b2, 'a3 * 'b3, 's) t - let rec length : type tail1 tail2 tail3 e. - (tail1, tail2, tail3, e) t -> tail1 Length.n = function + let rec length : + type tail1 tail2 tail3 e. (tail1, tail2, tail3, e) t -> tail1 Length.n = + function | [] -> T (Z, Z) | _ :: xs -> @@ -730,11 +731,11 @@ module HlistId = Hlist0.HlistId module Map_1_specific (F : T2) (G : T2) (C : sig - type b1 + type b1 - type b2 + type b2 - val f : ('a, b1) F.t -> ('a, b2) G.t + val f : ('a, b1) F.t -> ('a, b2) G.t end) = struct let rec f : type a. (a, C.b1) Hlist_1(F).t -> (a, C.b2) Hlist_1(G).t = @@ -763,7 +764,7 @@ type 'ns max = (module Max_s with type ns = 'ns) let rec max : type n ns. (n * ns) H1.T(Nat).t -> (n * ns) max = fun xs -> match xs with - | [x] -> + | [ x ] -> let module M = struct type nonrec ns = n * ns @@ -771,7 +772,7 @@ let rec max : type n ns. (n * ns) H1.T(Nat).t -> (n * ns) max = let n = x - let p : (_, _) Hlist_1(Lte).t = [Lte.refl x] + let p : (_, _) Hlist_1(Lte).t = [ Lte.refl x ] end in (module M : Max_s with type ns = n * ns) | x :: (_ :: _ as ys) -> ( @@ -842,7 +843,8 @@ module Maxes = struct let m (type length) (vs : (_, length) Vector.t) : (module S with type length = length) = - let g : type length ns. + let g : + type length ns. ns H1.T(Nat).t -> (ns, length) Length.t -> (module S with type length = length) = @@ -862,7 +864,8 @@ module Maxes = struct end module Lengths = struct - let rec extract : type prev_varss ns env. + let rec extract : + type prev_varss ns env. (prev_varss, ns, env) H2_1.T(E23(Length)).t -> ns H1.T(Nat).t = function | [] -> [] diff --git a/src/lib/pickles_types/hlist0.ml b/src/lib/pickles_types/hlist0.ml index 44eb9d80146..3ee3fa5bf38 100644 --- a/src/lib/pickles_types/hlist0.ml +++ b/src/lib/pickles_types/hlist0.ml @@ -11,8 +11,8 @@ module Length = struct type 'xs n = T : 'n Nat.t * ('xs, 'n) t -> 'xs n - let rec contr : type xs n m. - (xs, n) t -> (xs, m) t -> (n, m) Core_kernel.Type_equal.t = + let rec contr : + type xs n m. (xs, n) t -> (xs, m) t -> (n, m) Core_kernel.Type_equal.t = fun t1 t2 -> match (t1, t2) with | Z, Z -> diff --git a/src/lib/pickles_types/matrix_evals.ml b/src/lib/pickles_types/matrix_evals.ml index b4981e2c26f..524fe3ff216 100644 --- a/src/lib/pickles_types/matrix_evals.ml +++ b/src/lib/pickles_types/matrix_evals.ml @@ -4,20 +4,21 @@ module H_list = Snarky_backendless.H_list [%%versioned module Stable = struct module V1 = struct - type 'a t = {row: 'a; col: 'a; value: 'a; rc: 'a} + type 'a t = { row : 'a; col : 'a; value : 'a; rc : 'a } [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] end end] let typ g = - Snarky_backendless.Typ.of_hlistable [g; g; g; g] ~var_to_hlist:to_hlist + Snarky_backendless.Typ.of_hlistable [ g; g; g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist -let map {row; col; value; rc} ~f = - {row= f row; col= f col; value= f value; rc= f rc} +let map { row; col; value; rc } ~f = + { row = f row; col = f col; value = f value; rc = f rc } let map2 t1 t2 ~f = - { row= f t1.row t2.row - ; col= f t1.col t2.col - ; value= f t1.value t2.value - ; rc= f t1.rc t2.rc } + { row = f t1.row t2.row + ; col = f t1.col t2.col + ; value = f t1.value t2.value + ; rc = f t1.rc t2.rc + } diff --git a/src/lib/pickles_types/nat.ml b/src/lib/pickles_types/nat.ml index 840c5676dd7..7510905b429 100644 --- a/src/lib/pickles_types/nat.ml +++ b/src/lib/pickles_types/nat.ml @@ -185,8 +185,9 @@ end open Core_kernel -let rec compare : type n m. - n t -> m t -> [`Lte of (n, m) Lte.t | `Gt of (n, m) Lte.t Not.t] = +let rec compare : + type n m. n t -> m t -> [ `Lte of (n, m) Lte.t | `Gt of (n, m) Lte.t Not.t ] + = fun n m -> match (n, m) with | Z, _ -> @@ -194,17 +195,17 @@ let rec compare : type n m. | S _, Z -> `Gt (function _ -> .) | S n, S m -> ( - match compare n m with - | `Lte pi -> - `Lte (S pi) - | `Gt gt -> - `Gt (function S pi -> gt pi) ) + match compare n m with + | `Lte pi -> + `Lte (S pi) + | `Gt gt -> + `Gt (function S pi -> gt pi) ) let lte_exn n m = match compare n m with `Lte pi -> pi | `Gt _gt -> failwith "lte_exn" -let rec gt_implies_gte : type n m. - n nat -> m nat -> (n, m) Lte.t Not.t -> (m, n) Lte.t = +let rec gt_implies_gte : + type n m. n nat -> m nat -> (n, m) Lte.t Not.t -> (m, n) Lte.t = fun n m not_lte -> match (n, m) with | Z, _ -> @@ -214,11 +215,12 @@ let rec gt_implies_gte : type n m. | S n, S m -> S (gt_implies_gte n m (fun pi -> not_lte (S pi))) -let rec eq : type n m. +let rec eq : + type n m. n nat -> m nat - -> [`Equal of (n, m) Type_equal.t | `Not_equal of (n, m) Type_equal.t Not.t] - = + -> [ `Equal of (n, m) Type_equal.t + | `Not_equal of (n, m) Type_equal.t Not.t ] = fun n m -> match (n, m) with | Z, Z -> @@ -228,11 +230,11 @@ let rec eq : type n m. | Z, S _ -> `Not_equal (function _ -> .) | S n, S m -> ( - match eq n m with - | `Equal T -> - `Equal T - | `Not_equal f -> - `Not_equal (function T -> f T) ) + match eq n m with + | `Equal T -> + `Equal T + | `Not_equal f -> + `Not_equal (function T -> f T) ) let eq_exn : type n m. n nat -> m nat -> (n, m) Type_equal.t = fun n m -> diff --git a/src/lib/pickles_types/pairing_marlin_types.ml b/src/lib/pickles_types/pairing_marlin_types.ml index b0f0ec030b0..c52428253a9 100644 --- a/src/lib/pickles_types/pairing_marlin_types.ml +++ b/src/lib/pickles_types/pairing_marlin_types.ml @@ -7,19 +7,20 @@ module Evals = struct module Stable = struct module V1 = struct type 'a t = - { w_hat: 'a - ; z_hat_a: 'a - ; z_hat_b: 'a - ; g_1: 'a - ; h_1: 'a - ; g_2: 'a - ; h_2: 'a - ; g_3: 'a - ; h_3: 'a - ; row: 'a Abc.Stable.V1.t - ; col: 'a Abc.Stable.V1.t - ; value: 'a Abc.Stable.V1.t - ; rc: 'a Abc.Stable.V1.t } + { w_hat : 'a + ; z_hat_a : 'a + ; z_hat_b : 'a + ; g_1 : 'a + ; h_1 : 'a + ; g_2 : 'a + ; h_2 : 'a + ; g_3 : 'a + ; h_3 : 'a + ; row : 'a Abc.Stable.V1.t + ; col : 'a Abc.Stable.V1.t + ; value : 'a Abc.Stable.V1.t + ; rc : 'a Abc.Stable.V1.t + } [@@deriving fields, sexp, compare, yojson] end end] @@ -35,13 +36,14 @@ module Evals = struct ; h_1 ; h_2 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } ; g_1 ; g_2 - ; g_3 } = + ; g_3 + } = Vector. [ w_hat ; z_hat_a @@ -63,7 +65,8 @@ module Evals = struct ; rc_c ; g_1 ; g_2 - ; g_3 ] + ; g_3 + ] let of_vector Vector. @@ -87,7 +90,8 @@ module Evals = struct ; rc_c ; g_1 ; g_2 - ; g_3 ] = + ; g_3 + ] = { w_hat ; z_hat_a ; z_hat_b @@ -97,10 +101,11 @@ module Evals = struct ; h_2 ; g_3 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} } + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } + } let to_vectors { w_hat @@ -112,13 +117,14 @@ module Evals = struct ; h_2 ; g_3 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} } ~x_hat = + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } + } ~x_hat = Vector. - ( ([x_hat; w_hat; z_hat_a; z_hat_b; h_1], [g_1]) - , ([h_2], [g_2]) + ( ([ x_hat; w_hat; z_hat_a; z_hat_b; h_1 ], [ g_1 ]) + , ([ h_2 ], [ g_2 ]) , ( [ h_3 ; row_a ; row_b @@ -131,8 +137,9 @@ module Evals = struct ; value_c ; rc_a ; rc_b - ; rc_c ] - , [g_3] ) ) + ; rc_c + ] + , [ g_3 ] ) ) let to_combined_vectors { w_hat @@ -144,13 +151,14 @@ module Evals = struct ; h_2 ; g_3 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} } ~x_hat = + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } + } ~x_hat = Vector. - ( [x_hat; w_hat; z_hat_a; z_hat_b; g_1; h_1] - , [g_2; h_2] + ( [ x_hat; w_hat; z_hat_a; z_hat_b; g_1; h_1 ] + , [ g_2; h_2 ] , [ g_3 ; h_3 ; row_a @@ -164,12 +172,13 @@ module Evals = struct ; value_c ; rc_a ; rc_b - ; rc_c ] ) + ; rc_c + ] ) let of_vectors Vector.( - ( ([w_hat; z_hat_a; z_hat_b; h_1], [g_1]) - , ([h_2], [g_2]) + ( ([ w_hat; z_hat_a; z_hat_b; h_1 ], [ g_1 ]) + , ([ h_2 ], [ g_2 ]) , ( [ h_3 ; row_a ; row_b @@ -182,8 +191,9 @@ module Evals = struct ; value_c ; rc_a ; rc_b - ; rc_c ] - , [g_3] ) )) = + ; rc_c + ] + , [ g_3 ] ) )) = { w_hat ; z_hat_a ; z_hat_b @@ -193,10 +203,11 @@ module Evals = struct ; h_2 ; g_3 ; h_3 - ; row= {a= row_a; b= row_b; c= row_c} - ; col= {a= col_a; b= col_b; c= col_c} - ; value= {a= value_a; b= value_b; c= value_c} - ; rc= {a= rc_a; b= rc_b; c= rc_c} } + ; row = { a = row_a; b = row_b; c = row_c } + ; col = { a = col_a; b = col_b; c = col_c } + ; value = { a = value_a; b = value_b; c = value_c } + ; rc = { a = rc_a; b = rc_b; c = rc_c } + } let typ fq = let there = to_vector in @@ -232,7 +243,7 @@ module Accumulator = struct module Stable = struct module V1 = struct type 'a t = 'a Shift.Map.t - [@@deriving sexp, version {asserted}, compare] + [@@deriving sexp, version { asserted }, compare] let to_yojson f t = Alist.to_yojson f (Map.to_alist t) @@ -247,15 +258,14 @@ module Accumulator = struct end end] - [%%define_locally - Stable.Latest.(to_yojson, of_yojson)] + [%%define_locally Stable.Latest.(to_yojson, of_yojson)] end [%%versioned module Stable = struct module V1 = struct type ('g, 'unshifted) t = - {shifted_accumulator: 'g; unshifted_accumulators: 'unshifted} + { shifted_accumulator : 'g; unshifted_accumulators : 'unshifted } [@@deriving fields, sexp, yojson, compare, hlist] end end] @@ -273,7 +283,8 @@ module Accumulator = struct Snarky_backendless.Typ.of_hlistable [ g ; Typ.transport (Typ.list ~length:(Set.length shifts) g) ~there ~back - |> Typ.transport_var ~there ~back ] + |> Typ.transport_var ~there ~back + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -281,21 +292,23 @@ module Accumulator = struct Vector.iter2 (unshifted_accumulators t1) (unshifted_accumulators t2) ~f:g ; on_both t1 t2 g shifted_accumulator - let map {shifted_accumulator; unshifted_accumulators} ~f = - { shifted_accumulator= f shifted_accumulator - ; unshifted_accumulators= Map.map ~f unshifted_accumulators } + let map { shifted_accumulator; unshifted_accumulators } ~f = + { shifted_accumulator = f shifted_accumulator + ; unshifted_accumulators = Map.map ~f unshifted_accumulators + } let map2 t1 t2 ~f = - { shifted_accumulator= f t1.shifted_accumulator t2.shifted_accumulator - ; unshifted_accumulators= + { shifted_accumulator = f t1.shifted_accumulator t2.shifted_accumulator + ; unshifted_accumulators = Int.Map.merge ~f:(fun ~key:_ -> function `Both (x, y) -> Some (f x y) | _ -> - failwith "map2: Key not present in both maps" ) - t1.unshifted_accumulators t2.unshifted_accumulators } + failwith "map2: Key not present in both maps") + t1.unshifted_accumulators t2.unshifted_accumulators + } let accumulate t add ~into = - { shifted_accumulator= add into.shifted_accumulator t.shifted_accumulator - ; unshifted_accumulators= + { shifted_accumulator = add into.shifted_accumulator t.shifted_accumulator + ; unshifted_accumulators = Int.Map.merge into.unshifted_accumulators t.unshifted_accumulators ~f:(fun ~key:_ -> function | `Both (x, y) -> @@ -303,7 +316,8 @@ module Accumulator = struct | `Left x -> Some x | `Right _y -> - failwith "shift not present in accumulating map" ) } + failwith "shift not present in accumulating map") + } end module Opening_check = struct @@ -320,35 +334,35 @@ module Accumulator = struct [%%versioned module Stable = struct module V1 = struct - type 'g t = {r_f_minus_r_v_plus_rz_pi: 'g; r_pi: 'g} + type 'g t = { r_f_minus_r_v_plus_rz_pi : 'g; r_pi : 'g } [@@deriving fields, sexp, compare, yojson, hlist] end end] let typ g = - Snarky_backendless.Typ.of_hlistable [g; g] ~var_to_hlist:to_hlist - ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist - ~value_of_hlist:of_hlist + Snarky_backendless.Typ.of_hlistable [ g; g ] ~var_to_hlist:to_hlist + ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist let assert_equal g t1 t2 = let on_both f = on_both t1 t2 f in - List.iter ~f:(on_both g) [r_f_minus_r_v_plus_rz_pi; r_pi] + List.iter ~f:(on_both g) [ r_f_minus_r_v_plus_rz_pi; r_pi ] - let map {r_f_minus_r_v_plus_rz_pi; r_pi} ~f = - {r_f_minus_r_v_plus_rz_pi= f r_f_minus_r_v_plus_rz_pi; r_pi= f r_pi} + let map { r_f_minus_r_v_plus_rz_pi; r_pi } ~f = + { r_f_minus_r_v_plus_rz_pi = f r_f_minus_r_v_plus_rz_pi; r_pi = f r_pi } let map2 t1 t2 ~f = - { r_f_minus_r_v_plus_rz_pi= + { r_f_minus_r_v_plus_rz_pi = f t1.r_f_minus_r_v_plus_rz_pi t2.r_f_minus_r_v_plus_rz_pi - ; r_pi= f t1.r_pi t2.r_pi } + ; r_pi = f t1.r_pi t2.r_pi + } end [%%versioned module Stable = struct module V1 = struct type ('g, 'unshifted) t = - { opening_check: 'g Opening_check.Stable.V1.t - ; degree_bound_checks: ('g, 'unshifted) Degree_bound_checks.Stable.V1.t + { opening_check : 'g Opening_check.Stable.V1.t + ; degree_bound_checks : ('g, 'unshifted) Degree_bound_checks.Stable.V1.t } [@@deriving fields, sexp, compare, yojson, hlist] end @@ -356,7 +370,7 @@ module Accumulator = struct let typ shifts g = Snarky_backendless.Typ.of_hlistable - [Opening_check.typ g; Degree_bound_checks.typ shifts g] + [ Opening_check.typ g; Degree_bound_checks.typ shifts g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -365,35 +379,38 @@ module Accumulator = struct on_both (Opening_check.assert_equal g) opening_check ; on_both (Degree_bound_checks.assert_equal g) degree_bound_checks - let map {opening_check; degree_bound_checks} ~f = - { opening_check= Opening_check.map ~f opening_check - ; degree_bound_checks= Degree_bound_checks.map ~f degree_bound_checks } + let map { opening_check; degree_bound_checks } ~f = + { opening_check = Opening_check.map ~f opening_check + ; degree_bound_checks = Degree_bound_checks.map ~f degree_bound_checks + } let map2 t1 t2 ~f = - { opening_check= Opening_check.map2 ~f t1.opening_check t2.opening_check - ; degree_bound_checks= + { opening_check = Opening_check.map2 ~f t1.opening_check t2.opening_check + ; degree_bound_checks = Degree_bound_checks.map2 ~f t1.degree_bound_checks - t2.degree_bound_checks } + t2.degree_bound_checks + } let accumulate t add ~into = - { opening_check= + { opening_check = Opening_check.map2 ~f:add t.opening_check into.opening_check - ; degree_bound_checks= + ; degree_bound_checks = Degree_bound_checks.accumulate t.degree_bound_checks add - ~into:into.degree_bound_checks } + ~into:into.degree_bound_checks + } end module Opening = struct [%%versioned module Stable = struct module V1 = struct - type ('proof, 'values) t = {proof: 'proof; values: 'values} + type ('proof, 'values) t = { proof : 'proof; values : 'values } [@@deriving fields, hlist] end end] let typ proof values = - Snarky_backendless.Typ.of_hlistable [proof; values] ~var_to_hlist:to_hlist + Snarky_backendless.Typ.of_hlistable [ proof; values ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -402,7 +419,7 @@ module Openings = struct module Stable = struct module V1 = struct type ('proof, 'fp) t = - {proofs: 'proof * 'proof * 'proof; evals: 'fp Evals.Stable.V1.t} + { proofs : 'proof * 'proof * 'proof; evals : 'fp Evals.Stable.V1.t } [@@deriving hlist] end end] @@ -410,7 +427,7 @@ module Openings = struct let typ proof fp = let open Snarky_backendless.Typ in of_hlistable - [tuple3 proof proof proof; Evals.typ fp] + [ tuple3 proof proof proof; Evals.typ fp ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -429,12 +446,13 @@ module Messages = struct module Stable = struct module V1 = struct type ('pc, 'fp) t = - { w_hat: 'pc - ; z_hat_a: 'pc - ; z_hat_b: 'pc - ; gh_1: 'pc Degree_bounded.Stable.V1.t * 'pc - ; sigma_gh_2: 'fp * ('pc Degree_bounded.Stable.V1.t * 'pc) - ; sigma_gh_3: 'fp * ('pc Degree_bounded.Stable.V1.t * 'pc) } + { w_hat : 'pc + ; z_hat_a : 'pc + ; z_hat_b : 'pc + ; gh_1 : 'pc Degree_bounded.Stable.V1.t * 'pc + ; sigma_gh_2 : 'fp * ('pc Degree_bounded.Stable.V1.t * 'pc) + ; sigma_gh_3 : 'fp * ('pc Degree_bounded.Stable.V1.t * 'pc) + } [@@deriving fields, sexp, compare, yojson, hlist] end end] @@ -443,7 +461,7 @@ module Messages = struct let open Snarky_backendless.Typ in let db = pc * pc in of_hlistable - [pc; pc; pc; db * pc; fp * (db * pc); fp * (db * pc)] + [ pc; pc; pc; db * pc; fp * (db * pc); fp * (db * pc) ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -453,14 +471,14 @@ module Proof = struct module Stable = struct module V1 = struct type ('pc, 'fp, 'openings) t = - {messages: ('pc, 'fp) Messages.Stable.V1.t; openings: 'openings} + { messages : ('pc, 'fp) Messages.Stable.V1.t; openings : 'openings } [@@deriving fields, sexp, compare, yojson, hlist] end end] let typ pc fp openings = Snarky_backendless.Typ.of_hlistable - [Messages.typ pc fp; openings] + [ Messages.typ pc fp; openings ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end diff --git a/src/lib/pickles_types/pcs_batch.ml b/src/lib/pickles_types/pcs_batch.ml index e8588491c53..d98ed7953d6 100644 --- a/src/lib/pickles_types/pcs_batch.ml +++ b/src/lib/pickles_types/pcs_batch.ml @@ -1,9 +1,9 @@ open Core_kernel type ('a, 'n, 'm) t = - {without_degree_bound: 'n Nat.t; with_degree_bound: ('a, 'm) Vector.t} + { without_degree_bound : 'n Nat.t; with_degree_bound : ('a, 'm) Vector.t } -let map t ~f = {t with with_degree_bound= Vector.map t.with_degree_bound ~f} +let map t ~f = { t with with_degree_bound = Vector.map t.with_degree_bound ~f } let num_bits n = Int.floor_log2 n + 1 @@ -19,7 +19,7 @@ let%test_unit "num_bits" = go 0 in Quickcheck.test (Int.gen_uniform_incl 0 Int.max_value) ~f:(fun n -> - [%test_eq: int] (num_bits n) (naive n) ) + [%test_eq: int] (num_bits n) (naive n)) let pow ~one ~mul x n = assert (n >= 0) ; @@ -35,7 +35,7 @@ let pow ~one ~mul x n = go one (k - 1) let create ~without_degree_bound ~with_degree_bound = - {without_degree_bound; with_degree_bound} + { without_degree_bound; with_degree_bound } let combine_commitments _t ~scale ~add ~xi (type n) (without_degree_bound : (_, n) Vector.t) with_degree_bound = @@ -46,12 +46,12 @@ let combine_commitments _t ~scale ~add ~xi (type n) let polys = Vector.to_list without_degree_bound @ List.concat_map (Vector.to_list with_degree_bound) - ~f:(fun (unshifted, shifted) -> [unshifted; shifted]) + ~f:(fun (unshifted, shifted) -> [ unshifted; shifted ]) in List.fold_left polys ~init ~f:(fun acc p -> add p (scale acc xi)) let combine_evaluations' (type a n m) - ({without_degree_bound= _; with_degree_bound} : (a, n Nat.s, m) t) + ({ without_degree_bound = _; with_degree_bound } : (a, n Nat.s, m) t) ~shifted_pow ~mul ~add ~one:_ ~evaluation_point ~xi (init :: evals0 : (_, n Nat.s) Vector.t) (evals1 : (_, m) Vector.t) = let evals = @@ -59,7 +59,7 @@ let combine_evaluations' (type a n m) @ List.concat (Vector.to_list (Vector.map2 with_degree_bound evals1 ~f:(fun deg fx -> - [fx; mul (shifted_pow deg evaluation_point) fx] ))) + [ fx; mul (shifted_pow deg evaluation_point) fx ]))) in List.fold_left evals ~init ~f:(fun acc fx -> add fx (mul acc xi)) @@ -86,18 +86,18 @@ let combine_split_commitments _t ~scale_and_add ~init:i ~xi (type n) let flat = List.concat_map (Vector.to_list without_degree_bound) ~f:Array.to_list @ List.concat_map (Vector.to_list with_degree_bound) - ~f:(fun {With_degree_bound.unshifted; shifted} -> - Array.to_list unshifted @ [shifted] ) + ~f:(fun { With_degree_bound.unshifted; shifted } -> + Array.to_list unshifted @ [ shifted ]) in match List.rev flat with | [] -> failwith "combine_split_commitments: empty" | init :: comms -> List.fold_left comms ~init:(i init) ~f:(fun acc p -> - scale_and_add ~acc ~xi p ) + scale_and_add ~acc ~xi p) let combine_split_evaluations (type a n m f f') - ({without_degree_bound= _; with_degree_bound} : (a, n, m) t) + ({ without_degree_bound = _; with_degree_bound } : (a, n, m) t) ~(shifted_pow : a -> f' -> f') ~(mul : f -> f' -> f) ~(mul_and_add : acc:f' -> xi:f' -> f -> f') ~(evaluation_point : f') ~init:(i : f -> f') ~(last : f array -> f) ~(xi : f') @@ -109,11 +109,11 @@ let combine_split_evaluations (type a n m f f') (Vector.map2 with_degree_bound evals1 ~f:(fun deg unshifted -> let u = last unshifted in Array.to_list unshifted - @ [mul u (shifted_pow deg evaluation_point)] ))) + @ [ mul u (shifted_pow deg evaluation_point) ]))) in match List.rev flat with | [] -> failwith "combine_split_evaluations: empty" | init :: es -> List.fold_left es ~init:(i init) ~f:(fun acc fx -> - mul_and_add ~acc ~xi fx ) + mul_and_add ~acc ~xi fx) diff --git a/src/lib/pickles_types/plonk_verification_key_evals.ml b/src/lib/pickles_types/plonk_verification_key_evals.ml index 82eeaa14762..d8e25048976 100644 --- a/src/lib/pickles_types/plonk_verification_key_evals.ml +++ b/src/lib/pickles_types/plonk_verification_key_evals.ml @@ -6,24 +6,25 @@ module Stable = struct module V1 = struct type 'comm t = 'comm Marlin_plonk_bindings_types.Plonk_verification_evals.t = - { sigma_comm_0: 'comm - ; sigma_comm_1: 'comm - ; sigma_comm_2: 'comm - ; ql_comm: 'comm - ; qr_comm: 'comm - ; qo_comm: 'comm - ; qm_comm: 'comm - ; qc_comm: 'comm - ; rcm_comm_0: 'comm - ; rcm_comm_1: 'comm - ; rcm_comm_2: 'comm - ; psm_comm: 'comm - ; add_comm: 'comm - ; mul1_comm: 'comm - ; mul2_comm: 'comm - ; emul1_comm: 'comm - ; emul2_comm: 'comm - ; emul3_comm: 'comm } + { sigma_comm_0 : 'comm + ; sigma_comm_1 : 'comm + ; sigma_comm_2 : 'comm + ; ql_comm : 'comm + ; qr_comm : 'comm + ; qo_comm : 'comm + ; qm_comm : 'comm + ; qc_comm : 'comm + ; rcm_comm_0 : 'comm + ; rcm_comm_1 : 'comm + ; rcm_comm_2 : 'comm + ; psm_comm : 'comm + ; add_comm : 'comm + ; mul1_comm : 'comm + ; mul2_comm : 'comm + ; emul1_comm : 'comm + ; emul2_comm : 'comm + ; emul3_comm : 'comm + } [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] end end] @@ -46,48 +47,51 @@ let map ; mul2_comm ; emul1_comm ; emul2_comm - ; emul3_comm } ~f = - { sigma_comm_0= f sigma_comm_0 - ; sigma_comm_1= f sigma_comm_1 - ; sigma_comm_2= f sigma_comm_2 - ; ql_comm= f ql_comm - ; qr_comm= f qr_comm - ; qo_comm= f qo_comm - ; qm_comm= f qm_comm - ; qc_comm= f qc_comm - ; rcm_comm_0= f rcm_comm_0 - ; rcm_comm_1= f rcm_comm_1 - ; rcm_comm_2= f rcm_comm_2 - ; psm_comm= f psm_comm - ; add_comm= f add_comm - ; mul1_comm= f mul1_comm - ; mul2_comm= f mul2_comm - ; emul1_comm= f emul1_comm - ; emul2_comm= f emul2_comm - ; emul3_comm= f emul3_comm } + ; emul3_comm + } ~f = + { sigma_comm_0 = f sigma_comm_0 + ; sigma_comm_1 = f sigma_comm_1 + ; sigma_comm_2 = f sigma_comm_2 + ; ql_comm = f ql_comm + ; qr_comm = f qr_comm + ; qo_comm = f qo_comm + ; qm_comm = f qm_comm + ; qc_comm = f qc_comm + ; rcm_comm_0 = f rcm_comm_0 + ; rcm_comm_1 = f rcm_comm_1 + ; rcm_comm_2 = f rcm_comm_2 + ; psm_comm = f psm_comm + ; add_comm = f add_comm + ; mul1_comm = f mul1_comm + ; mul2_comm = f mul2_comm + ; emul1_comm = f emul1_comm + ; emul2_comm = f emul2_comm + ; emul3_comm = f emul3_comm + } let map2 t1 t2 ~f = - { sigma_comm_0= f t1.sigma_comm_0 t2.sigma_comm_0 - ; sigma_comm_1= f t1.sigma_comm_1 t2.sigma_comm_1 - ; sigma_comm_2= f t1.sigma_comm_2 t2.sigma_comm_2 - ; ql_comm= f t1.ql_comm t2.ql_comm - ; qr_comm= f t1.qr_comm t2.qr_comm - ; qo_comm= f t1.qo_comm t2.qo_comm - ; qm_comm= f t1.qm_comm t2.qm_comm - ; qc_comm= f t1.qc_comm t2.qc_comm - ; rcm_comm_0= f t1.rcm_comm_0 t2.rcm_comm_0 - ; rcm_comm_1= f t1.rcm_comm_1 t2.rcm_comm_1 - ; rcm_comm_2= f t1.rcm_comm_2 t2.rcm_comm_2 - ; psm_comm= f t1.psm_comm t2.psm_comm - ; add_comm= f t1.add_comm t2.add_comm - ; mul1_comm= f t1.mul1_comm t2.mul1_comm - ; mul2_comm= f t1.mul2_comm t2.mul2_comm - ; emul1_comm= f t1.emul1_comm t2.emul1_comm - ; emul2_comm= f t1.emul2_comm t2.emul2_comm - ; emul3_comm= f t1.emul3_comm t2.emul3_comm } + { sigma_comm_0 = f t1.sigma_comm_0 t2.sigma_comm_0 + ; sigma_comm_1 = f t1.sigma_comm_1 t2.sigma_comm_1 + ; sigma_comm_2 = f t1.sigma_comm_2 t2.sigma_comm_2 + ; ql_comm = f t1.ql_comm t2.ql_comm + ; qr_comm = f t1.qr_comm t2.qr_comm + ; qo_comm = f t1.qo_comm t2.qo_comm + ; qm_comm = f t1.qm_comm t2.qm_comm + ; qc_comm = f t1.qc_comm t2.qc_comm + ; rcm_comm_0 = f t1.rcm_comm_0 t2.rcm_comm_0 + ; rcm_comm_1 = f t1.rcm_comm_1 t2.rcm_comm_1 + ; rcm_comm_2 = f t1.rcm_comm_2 t2.rcm_comm_2 + ; psm_comm = f t1.psm_comm t2.psm_comm + ; add_comm = f t1.add_comm t2.add_comm + ; mul1_comm = f t1.mul1_comm t2.mul1_comm + ; mul2_comm = f t1.mul2_comm t2.mul2_comm + ; emul1_comm = f t1.emul1_comm t2.emul1_comm + ; emul2_comm = f t1.emul2_comm t2.emul2_comm + ; emul3_comm = f t1.emul3_comm t2.emul3_comm + } let typ g = Snarky_backendless.Typ.of_hlistable - [g; g; g; g; g; g; g; g; g; g; g; g; g; g; g; g; g; g] + [ g; g; g; g; g; g; g; g; g; g; g; g; g; g; g; g; g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist diff --git a/src/lib/pickles_types/scalar_challenge.ml b/src/lib/pickles_types/scalar_challenge.ml index a9a271bebc3..89e1e150029 100644 --- a/src/lib/pickles_types/scalar_challenge.ml +++ b/src/lib/pickles_types/scalar_challenge.ml @@ -14,7 +14,6 @@ let create t = Scalar_challenge t let typ f = let there (Scalar_challenge x) = x in let back x = Scalar_challenge x in - Snarky_backendless.Typ.( - transport_var (transport f ~there ~back) ~there ~back) + Snarky_backendless.Typ.(transport_var (transport f ~there ~back) ~there ~back) let map (Scalar_challenge x) ~f = Scalar_challenge (f x) diff --git a/src/lib/pickles_types/shifted_value.ml b/src/lib/pickles_types/shifted_value.ml index 5a0b86895a0..8ee5c5e7ea2 100644 --- a/src/lib/pickles_types/shifted_value.ml +++ b/src/lib/pickles_types/shifted_value.ml @@ -36,8 +36,7 @@ end] let typ f = let there (Shifted_value x) = x in let back x = Shifted_value x in - Snarky_backendless.Typ.( - transport_var (transport f ~there ~back) ~there ~back) + Snarky_backendless.Typ.(transport_var (transport f ~there ~back) ~there ~back) let map (Shifted_value x) ~f = Shifted_value (f x) diff --git a/src/lib/pickles_types/vector.ml b/src/lib/pickles_types/vector.ml index 2ef7c57d4fb..cf9dfdb287d 100644 --- a/src/lib/pickles_types/vector.ml +++ b/src/lib/pickles_types/vector.ml @@ -18,8 +18,7 @@ include T let rec iter : type a n. (a, n) t -> f:(a -> unit) -> unit = fun t ~f -> match t with [] -> () | x :: xs -> f x ; iter xs ~f -let rec iter2 : type a b n. (a, n) t -> (b, n) t -> f:(a -> b -> unit) -> unit - = +let rec iter2 : type a b n. (a, n) t -> (b, n) t -> f:(a -> b -> unit) -> unit = fun t1 t2 ~f -> match (t1, t2) with | [], [] -> @@ -27,8 +26,8 @@ let rec iter2 : type a b n. (a, n) t -> (b, n) t -> f:(a -> b -> unit) -> unit | x :: xs, y :: ys -> f x y ; iter2 xs ys ~f -let rec map2 : type a b c n. - (a, n) t -> (b, n) t -> f:(a -> b -> c) -> (c, n) t = +let rec map2 : type a b c n. (a, n) t -> (b, n) t -> f:(a -> b -> c) -> (c, n) t + = fun t1 t2 ~f -> match (t1, t2) with | [], [] -> @@ -36,9 +35,10 @@ let rec map2 : type a b c n. | x :: xs, y :: ys -> f x y :: map2 xs ys ~f -let rec hhead_off : type xs n. - (xs, n s) Hlist0.H1_1(T).t - -> xs Hlist0.HlistId.t * (xs, n) Hlist0.H1_1(T).t = +let rec hhead_off : + type xs n. + (xs, n s) Hlist0.H1_1(T).t -> xs Hlist0.HlistId.t * (xs, n) Hlist0.H1_1(T).t + = fun xss -> match xss with | [] -> @@ -47,7 +47,8 @@ let rec hhead_off : type xs n. let hds, tls = hhead_off xss in (x :: hds, xs :: tls) -let rec mapn : type xs y n. +let rec mapn : + type xs y n. (xs, n) Hlist0.H1_1(T).t -> f:(xs Hlist0.HlistId.t -> y) -> (y, n) t = fun xss ~f -> match xss with @@ -81,7 +82,8 @@ let rec init : type a n. int -> n nat -> f:(int -> a) -> (a, n) t = let init n ~f = init 0 n ~f -let rec fold_map : type acc a b n. +let rec fold_map : + type acc a b n. (a, n) t -> f:(acc -> a -> acc * b) -> init:acc -> acc * (b, n) t = fun t ~f ~init -> match t with @@ -118,7 +120,7 @@ let rec of_list : type a. a list -> a e = function let to_sequence : type a n. (a, n) t -> a Sequence.t = fun t -> Sequence.unfold ~init:(T t) ~f:(fun (T t) -> - match t with [] -> None | x :: xs -> Some (x, T xs) ) + match t with [] -> None | x :: xs -> Some (x, T xs)) let rec of_list_and_length_exn : type a n. a list -> n nat -> (a, n) t = fun xs n -> @@ -162,9 +164,9 @@ let rec fold : type acc a n. (a, n) t -> f:(acc -> a -> acc) -> init:acc -> acc let for_all : type a n. (a, n) t -> f:(a -> bool) -> bool = fun v ~f -> - with_return (fun {return} -> + with_return (fun { return } -> iter v ~f:(fun x -> if not (f x) then return false) ; - true ) + true) let foldi t ~f ~init = snd (fold t ~f:(fun (i, acc) x -> (i + 1, f i acc x)) ~init:(0, init)) @@ -347,7 +349,8 @@ module With_length (N : Nat.Intf) = struct let to_list : 'a t -> 'a list = to_list end -let rec typ' : type f var value n. +let rec typ' : + type f var value n. ((var, value, f) Snarky_backendless.Typ.t, n) t -> ((var, n) t, (value, n) t, f) Snarky_backendless.Typ.t = let open Snarky_backendless.Typ in @@ -365,8 +368,9 @@ let rec typ' : type f var value n. let typ elt n = typ' (init n ~f:(fun _ -> elt)) -let rec append : type n m n_m a. - (a, n) t -> (a, m) t -> (n, m, n_m) Nat.Adds.t -> (a, n_m) t = +let rec append : + type n m n_m a. (a, n) t -> (a, m) t -> (n, m, n_m) Nat.Adds.t -> (a, n_m) t + = fun t1 t2 adds -> match (t1, adds) with | [], Z -> @@ -401,8 +405,8 @@ let rec extend_exn : type n m a. (a, n) t -> m Nat.t -> a -> (a, m) t = let extended = extend_exn xs m default in x :: extended -let rec extend : type a n m. - (a, n) t -> (n, m) Nat.Lte.t -> m Nat.t -> a -> (a, m) t = +let rec extend : + type a n m. (a, n) t -> (n, m) Nat.Lte.t -> m Nat.t -> a -> (a, m) t = fun v p m default -> match (v, p, m) with | _, Z, Z -> diff --git a/src/lib/pipe_lib/broadcast_pipe.ml b/src/lib/pipe_lib/broadcast_pipe.ml index 5d2ea99542e..e1be7fd1de3 100644 --- a/src/lib/pipe_lib/broadcast_pipe.ml +++ b/src/lib/pipe_lib/broadcast_pipe.ml @@ -2,22 +2,27 @@ open Core_kernel open Async_kernel type 'a t = - { root_pipe: 'a Pipe.Writer.t - ; mutable cache: 'a - ; mutable reader_id: int - ; pipes: 'a Pipe.Writer.t Int.Table.t } + { root_pipe : 'a Pipe.Writer.t + ; mutable cache : 'a + ; mutable reader_id : int + ; pipes : 'a Pipe.Writer.t Int.Table.t + } let create a = let root_r, root_w = Pipe.create () in let t = - {root_pipe= root_w; cache= a; reader_id= 0; pipes= Int.Table.create ()} + { root_pipe = root_w + ; cache = a + ; reader_id = 0 + ; pipes = Int.Table.create () + } in let downstream_flushed_v : unit Ivar.t ref = ref @@ Ivar.create () in let consumer = Pipe.add_consumer root_r ~downstream_flushed:(fun () -> let%map () = Ivar.read !downstream_flushed_v in (* Sub-pipes are never closed without closing the master pipe. *) - `Ok ) + `Ok) in don't_wait_for (Pipe.iter ~flushed:(Consumer consumer) root_r ~f:(fun v -> @@ -25,17 +30,17 @@ let create a = let inner_pipes = Int.Table.data t.pipes in let%bind () = Deferred.List.iter ~how:`Parallel inner_pipes ~f:(fun p -> - Pipe.write p v ) + Pipe.write p v) in Pipe.Consumer.values_sent_downstream consumer ; let%bind () = Deferred.List.iter ~how:`Parallel inner_pipes ~f:(fun p -> - Deferred.ignore_m @@ Pipe.downstream_flushed p ) + Deferred.ignore_m @@ Pipe.downstream_flushed p) in if Ivar.is_full !downstream_flushed_v then [%log' error (Logger.create ())] "Ivar.fill bug is here!" ; Ivar.fill !downstream_flushed_v () ; - Deferred.unit )) ; + Deferred.unit)) ; (t, t) exception Already_closed of string @@ -49,8 +54,7 @@ let guard_already_closed ~context t f = module Reader = struct type nonrec 'a t = 'a t - let peek t = - guard_already_closed ~context:"Reader.peek" t (fun () -> t.cache) + let peek t = guard_already_closed ~context:"Reader.peek" t (fun () -> t.cache) let fresh_reader_id t = t.reader_id <- t.reader_id + 1 ; @@ -69,7 +73,7 @@ module Reader = struct Int.Table.remove t.pipes reader_id ; b in - d ) + d) (* The sub-pipes have no downstream consumer, so the downstream flushed should always be determined and return `Ok. *) @@ -82,14 +86,14 @@ module Reader = struct Pipe.fold r ~init ~f:(fun acc v -> let%map res = f acc v in Pipe.Consumer.values_sent_downstream consumer ; - res ) ) + res)) let iter t ~f = prepare_pipe t ~default_value:() ~f:(fun r -> let consumer = add_trivial_consumer r in Pipe.iter ~flushed:(Consumer consumer) r ~f:(fun v -> let%map () = f v in - Pipe.Consumer.values_sent_downstream consumer ) ) + Pipe.Consumer.values_sent_downstream consumer)) let iter_until t ~f = let rec loop ~consumer reader = @@ -103,7 +107,7 @@ module Reader = struct in prepare_pipe t ~default_value:() ~f:(fun reader -> let consumer = add_trivial_consumer reader in - loop ~consumer reader ) + loop ~consumer reader) end module Writer = struct @@ -114,13 +118,13 @@ module Writer = struct t.cache <- x ; let%bind () = Pipe.write t.root_pipe x in let%bind _ = Pipe.downstream_flushed t.root_pipe in - Deferred.unit ) + Deferred.unit) let close t = guard_already_closed ~context:"Writer.close" t (fun () -> Pipe.close t.root_pipe ; Int.Table.iter t.pipes ~f:(fun w -> Pipe.close w) ; - Int.Table.clear t.pipes ) + Int.Table.clear t.pipes) end let map t ~f = @@ -134,7 +138,7 @@ let map t ~f = * 3. Multiple listeners receive updates after changes * 4. Peek sees the latest value * 5. If we close the broadcast pipe, all listeners stop -*) + *) let%test_unit "listeners properly receive updates" = let expect_pipe t expected = let got_rev = @@ -152,8 +156,8 @@ let%test_unit "listeners properly receive updates" = [%test_result: int] ~message:"Initial value not observed when peeking" ~expect:initial (Reader.peek r) ; (* 2-3 *) - let d1 = expect_pipe r [0; 1; 2] in - let d2 = expect_pipe r [0; 1; 2] in + let d1 = expect_pipe r [ 0; 1; 2 ] in + let d2 = expect_pipe r [ 0; 1; 2 ] in don't_wait_for d1 ; don't_wait_for d2 ; let next_value = 1 in @@ -167,14 +171,14 @@ let%test_unit "listeners properly receive updates" = ~expect:next_value (Reader.peek r) ; (*6*) Writer.close w ; - Deferred.both d1 d2 >>| Fn.ignore ) + Deferred.both d1 d2 >>| Fn.ignore) let%test_module _ = ( module struct type iter_counts = - {mutable immediate_iterations: int; mutable deferred_iterations: int} + { mutable immediate_iterations : int; mutable deferred_iterations : int } - let zero_counts () = {immediate_iterations= 0; deferred_iterations= 0} + let zero_counts () = { immediate_iterations = 0; deferred_iterations = 0 } let assert_immediate counts expected = [%test_eq: int] counts.immediate_iterations expected @@ -195,11 +199,10 @@ let%test_module _ = let setup_reader counts = don't_wait_for @@ Reader.iter pipe_r ~f:(fun () -> - counts.immediate_iterations - <- counts.immediate_iterations + 1 ; + counts.immediate_iterations <- + counts.immediate_iterations + 1 ; let%map () = Async.after @@ Time.Span.of_sec 1. in - counts.deferred_iterations <- counts.deferred_iterations + 1 - ) + counts.deferred_iterations <- counts.deferred_iterations + 1) in setup_reader counts1 ; (* The reader doesn't run until we yield. *) @@ -224,6 +227,5 @@ let%test_module _ = assert_immediate counts2 1 ; assert_deferred counts2 0 ; let%bind () = Writer.write pipe_w () in - assert_both counts1 3 ; assert_both counts2 2 ; Deferred.return true - ) + assert_both counts1 3 ; assert_both counts2 2 ; Deferred.return true) end ) diff --git a/src/lib/pipe_lib/linear_pipe.ml b/src/lib/pipe_lib/linear_pipe.ml index af1b5a580d6..1dbe719a3c3 100644 --- a/src/lib/pipe_lib/linear_pipe.ml +++ b/src/lib/pipe_lib/linear_pipe.ml @@ -3,25 +3,25 @@ open Async_kernel module Writer = Pipe.Writer module Reader = struct - type 'a t = {pipe: 'a Pipe.Reader.t; mutable has_reader: bool} + type 'a t = { pipe : 'a Pipe.Reader.t; mutable has_reader : bool } end let create () = let r, w = Pipe.create () in - ({Reader.pipe= r; has_reader= false}, w) + ({ Reader.pipe = r; has_reader = false }, w) -let wrap_reader reader = {Reader.pipe= reader; has_reader= false} +let wrap_reader reader = { Reader.pipe = reader; has_reader = false } let force_write_maybe_drop_head ~capacity writer reader x = if Pipe.length reader.Reader.pipe > capacity then ignore ( Pipe.read_now reader.Reader.pipe - : [`Eof | `Nothing_available | `Ok of 'a] ) ; + : [ `Eof | `Nothing_available | `Ok of 'a ] ) ; Pipe.write_without_pushback writer x let create_reader ~close_on_exception f = let r = Pipe.create_reader ~close_on_exception f in - {Reader.pipe= r; has_reader= false} + { Reader.pipe = r; has_reader = false } let write w x = ( if Pipe.is_closed w then @@ -49,8 +49,8 @@ let closed (reader : 'a Reader.t) = Pipe.closed reader.pipe let multiple_reads_error () = failwith - "Linear_pipe.bracket: the same reader has been used multiple times. If \ - you want to rebroadcast the reader, use fork" + "Linear_pipe.bracket: the same reader has been used multiple times. If you \ + want to rebroadcast the reader, use fork" let bracket (reader : 'a Reader.t) dx = if reader.has_reader then multiple_reads_error () @@ -131,7 +131,7 @@ let transfer_id reader writer = let merge_unordered rs = let merged_reader, merged_writer = create () in List.iter rs ~f:(fun reader -> - don't_wait_for (iter reader ~f:(fun x -> Pipe.write merged_writer x)) ) ; + don't_wait_for (iter reader ~f:(fun x -> Pipe.write merged_writer x))) ; don't_wait_for (let%map () = Deferred.List.iter rs ~f:closed in Pipe.close merged_writer) ; @@ -148,31 +148,31 @@ let fork reader n = (iter reader ~f:(fun x -> Deferred.List.iter writers ~f:(fun writer -> if not (Pipe.is_closed writer) then Pipe.write writer x - else return () ) )) ; + else return ()))) ; don't_wait_for (let%map () = Deferred.List.iter readers ~f:closed in close_read reader) ; readers let fork2 reader = - match fork reader 2 with [x; y] -> (x, y) | _ -> assert false + match fork reader 2 with [ x; y ] -> (x, y) | _ -> assert false let fork3 reader = - match fork reader 3 with [x; y; z] -> (x, y, z) | _ -> assert false + match fork reader 3 with [ x; y; z ] -> (x, y, z) | _ -> assert false let fork4 reader = - match fork reader 4 with [x; y; z; w] -> (x, y, z, w) | _ -> assert false + match fork reader 4 with [ x; y; z; w ] -> (x, y, z, w) | _ -> assert false let fork5 reader = match fork reader 5 with - | [x; y; z; w; v] -> + | [ x; y; z; w; v ] -> (x, y, z, w, v) | _ -> assert false let fork6 reader = match fork reader 6 with - | [x; y; z; w; v; u] -> + | [ x; y; z; w; v; u ] -> (x, y, z, w, v, u) | _ -> assert false @@ -185,7 +185,7 @@ let partition_map2 reader ~f = | `Fst x -> Pipe.write writer_a x | `Snd x -> - Pipe.write writer_b x )) ; + Pipe.write writer_b x)) ; don't_wait_for (let%map () = closed reader_a and () = closed reader_b in close_read reader) ; @@ -203,7 +203,7 @@ let partition_map3 reader ~f = | `Snd x -> Pipe.write writer_b x | `Trd x -> - Pipe.write writer_c x )) ; + Pipe.write writer_c x)) ; don't_wait_for (let%map () = closed reader_a and () = closed reader_b @@ -215,11 +215,7 @@ let filter_map_unordered ~max_concurrency t ~f = let reader, writer = create () in don't_wait_for (iter_unordered ~max_concurrency t ~f:(fun x -> - match%bind f x with - | Some y -> - Pipe.write writer y - | None -> - return () )) ; + match%bind f x with Some y -> Pipe.write writer y | None -> return ())) ; don't_wait_for (let%map () = closed reader in close_read t) ; @@ -230,9 +226,9 @@ let latest_ref t ~initial = don't_wait_for (iter t ~f:(fun a -> return (cell := a))) ; cell -let values_available ({pipe; _} : 'a Reader.t) = Pipe.values_available pipe +let values_available ({ pipe; _ } : 'a Reader.t) = Pipe.values_available pipe -let peek ({pipe; _} : 'a Reader.t) = Pipe.peek pipe +let peek ({ pipe; _ } : 'a Reader.t) = Pipe.peek pipe let release_has_reader (reader : 'a Reader.t) = if not reader.has_reader then @@ -244,10 +240,10 @@ let read_now reader = let res = Pipe.read_now reader.pipe in release_has_reader reader ; res -let read' ?max_queue_length ({pipe; _} : 'a Reader.t) = +let read' ?max_queue_length ({ pipe; _ } : 'a Reader.t) = Pipe.read' ?max_queue_length pipe -let read ({pipe; _} : 'a Reader.t) = Pipe.read pipe +let read ({ pipe; _ } : 'a Reader.t) = Pipe.read pipe let read_exn reader = match%map read reader with diff --git a/src/lib/pipe_lib/linear_pipe.mli b/src/lib/pipe_lib/linear_pipe.mli index 9876e9f78de..2eadaf2f90b 100644 --- a/src/lib/pipe_lib/linear_pipe.mli +++ b/src/lib/pipe_lib/linear_pipe.mli @@ -6,7 +6,7 @@ module Writer : sig end module Reader : sig - type 'a t = {pipe: 'a Pipe.Reader.t; mutable has_reader: bool} + type 'a t = { pipe : 'a Pipe.Reader.t; mutable has_reader : bool } end val create : unit -> 'a Reader.t * 'a Writer.t @@ -85,8 +85,7 @@ val fork2 : 'a Reader.t -> 'a Reader.t * 'a Reader.t val fork3 : 'a Reader.t -> 'a Reader.t * 'a Reader.t * 'a Reader.t -val fork4 : - 'a Reader.t -> 'a Reader.t * 'a Reader.t * 'a Reader.t * 'a Reader.t +val fork4 : 'a Reader.t -> 'a Reader.t * 'a Reader.t * 'a Reader.t * 'a Reader.t val fork5 : 'a Reader.t @@ -103,12 +102,12 @@ val fork6 : val partition_map2 : 'a Reader.t - -> f:('a -> [`Fst of 'b | `Snd of 'c]) + -> f:('a -> [ `Fst of 'b | `Snd of 'c ]) -> 'b Reader.t * 'c Reader.t val partition_map3 : 'a Reader.t - -> f:('a -> [`Fst of 'b | `Snd of 'c | `Trd of 'd]) + -> f:('a -> [ `Fst of 'b | `Snd of 'c | `Trd of 'd ]) -> 'b Reader.t * 'c Reader.t * 'd Reader.t val filter_map_unordered : @@ -119,15 +118,17 @@ val filter_map_unordered : val latest_ref : 'a Reader.t -> initial:'a -> 'a ref -val values_available : 'a Reader.t -> [`Eof | `Ok] Deferred.t +val values_available : 'a Reader.t -> [ `Eof | `Ok ] Deferred.t val peek : 'a Reader.t -> 'a option -val read_now : 'a Reader.t -> [`Eof | `Nothing_available | `Ok of 'a] +val read_now : 'a Reader.t -> [ `Eof | `Nothing_available | `Ok of 'a ] val read' : - ?max_queue_length:int -> 'a Reader.t -> [`Eof | `Ok of 'a Queue.t] Deferred.t + ?max_queue_length:int + -> 'a Reader.t + -> [ `Eof | `Ok of 'a Queue.t ] Deferred.t -val read : 'a Reader.t -> [`Eof | `Ok of 'a] Deferred.t +val read : 'a Reader.t -> [ `Eof | `Ok of 'a ] Deferred.t val read_exn : 'a Reader.t -> 'a Deferred.t diff --git a/src/lib/pipe_lib/strict_pipe.ml b/src/lib/pipe_lib/strict_pipe.ml index 04070139ab5..8f897492b14 100644 --- a/src/lib/pipe_lib/strict_pipe.ml +++ b/src/lib/pipe_lib/strict_pipe.ml @@ -23,17 +23,18 @@ type _ buffered = Type_buffered type (_, _, _) type_ = | Synchronous : ('a, synchronous, unit Deferred.t) type_ | Buffered : - [`Capacity of int] * [`Overflow of ('a, 'b, 'r) overflow_behavior] + [ `Capacity of int ] * [ `Overflow of ('a, 'b, 'r) overflow_behavior ] -> ('a, 'b buffered, 'r) type_ let value_or_empty = Option.value ~default:"" module Reader0 = struct type 't t = - { reader: 't Pipe.Reader.t - ; mutable has_reader: bool - ; mutable downstreams: downstreams - ; name: string option } + { reader : 't Pipe.Reader.t + ; mutable has_reader : bool + ; mutable downstreams : downstreams + ; name : string option + } and downstreams = | [] : downstreams @@ -46,18 +47,18 @@ module Reader0 = struct r :: downstreams_from_list rs (* TODO: See #1281 *) - let to_linear_pipe {reader= pipe; has_reader; _} = - {Linear_pipe.Reader.pipe; has_reader} + let to_linear_pipe { reader = pipe; has_reader; _ } = + { Linear_pipe.Reader.pipe; has_reader } - let of_linear_pipe ?name {Linear_pipe.Reader.pipe= reader; has_reader} = - {reader; has_reader; downstreams= []; name} + let of_linear_pipe ?name { Linear_pipe.Reader.pipe = reader; has_reader } = + { reader; has_reader; downstreams = []; name } let assert_not_read reader = if reader.has_reader then raise (Multiple_reads_attempted (value_or_empty reader.name)) let wrap_reader ?name reader = - {reader; has_reader= false; downstreams= []; name} + { reader; has_reader = false; downstreams = []; name } let enforce_single_reader reader deferred = assert_not_read reader ; @@ -115,7 +116,7 @@ module Reader0 = struct let strict_reader = wrap_reader ?name:reader.name (Pipe.map reader.reader ~f) in - reader.downstreams <- [strict_reader] ; + reader.downstreams <- [ strict_reader ] ; strict_reader let filter_map reader ~f = @@ -124,7 +125,7 @@ module Reader0 = struct let strict_reader = wrap_reader ?name:reader.name (Pipe.filter_map reader.reader ~f) in - reader.downstreams <- [strict_reader] ; + reader.downstreams <- [ strict_reader ] ; strict_reader let clear t = Pipe.clear t.reader @@ -144,25 +145,25 @@ module Reader0 = struct Deferred.choose (List.map readers ~f:(fun r -> Deferred.choice (Pipe.values_available r.reader) - (fun _ -> ()) )) + (fun _ -> ()))) in List.find readers ~f:not_empty in match ready_reader with | Some reader -> ( - match Pipe.read_now reader.reader with - | `Nothing_available -> - failwith "impossible" - | `Eof -> - Deferred.unit - | `Ok value -> - Deferred.bind (f value) ~f:(fun () -> read_deferred readers) ) + match Pipe.read_now reader.reader with + | `Nothing_available -> + failwith "impossible" + | `Eof -> + Deferred.unit + | `Ok value -> + Deferred.bind (f value) ~f:(fun () -> read_deferred readers) ) | None -> ( - match List.filter readers ~f:(fun r -> not @@ is_closed r) with - | [] -> - Deferred.unit - | open_readers -> - read_deferred open_readers ) + match List.filter readers ~f:(fun r -> not @@ is_closed r) with + | [] -> + Deferred.unit + | open_readers -> + read_deferred open_readers ) in List.iter readers ~f:assert_not_read ; read_deferred readers @@ -183,7 +184,7 @@ module Reader0 = struct (Pipe.iter reader.reader ~f:(fun x -> Deferred.List.iter writers ~f:(fun writer -> if not (Pipe.is_closed writer) then Pipe.write writer x - else return () ) )) ; + else return ()))) ; don't_wait_for (let%map () = Deferred.List.iter readers ~f:Pipe.closed in Pipe.close_read reader.reader) ; @@ -194,11 +195,11 @@ module Reader0 = struct strict_readers let two reader = - match n reader 2 with [a; b] -> (a, b) | _ -> failwith "unexpected" + match n reader 2 with [ a; b ] -> (a, b) | _ -> failwith "unexpected" let three reader = match n reader 3 with - | [a; b; c] -> + | [ a; b; c ] -> (a, b, c) | _ -> failwith "unexpected" @@ -218,16 +219,18 @@ end module Writer = struct type ('t, 'type_, 'write_return) t = - { type_: ('t, 'type_, 'write_return) type_ - ; strict_reader: 't Reader0.t - ; writer: 't Pipe.Writer.t - ; warn_on_drop: bool - ; name: string option } + { type_ : ('t, 'type_, 'write_return) type_ + ; strict_reader : 't Reader0.t + ; writer : 't Pipe.Writer.t + ; warn_on_drop : bool + ; name : string option + } (* TODO: See #1281 *) - let to_linear_pipe {writer= pipe; _} = pipe + let to_linear_pipe { writer = pipe; _ } = pipe - let handle_buffered_write : type type_ return. + let handle_buffered_write : + type type_ return. ('t, type_, return) t -> 't -> capacity:int @@ -249,7 +252,8 @@ module Writer = struct [ ( "name" , `String (Sexplib.Sexp.to_string ([%sexp_of: string option] writer.name)) - ) ] ) ; + ) + ] ) ; match writer.type_ with | Synchronous -> Pipe.write writer.writer data @@ -264,42 +268,42 @@ module Writer = struct let my_name = Option.value writer.name ~default:"" in if writer.warn_on_drop then [%log warn] - ~metadata:[("pipe_name", `String my_name)] + ~metadata:[ ("pipe_name", `String my_name) ] "Dropping message on pipe $pipe_name" ; ignore ( Pipe.read_now writer.strict_reader.reader - : [`Eof | `Nothing_available | `Ok of 'a] ) ; - Pipe.write_without_pushback writer.writer data ) + : [ `Eof | `Nothing_available | `Ok of 'a ] ) ; + Pipe.write_without_pushback writer.writer data) ~normal_return:() | Buffered (`Capacity capacity, `Overflow (Call f)) -> handle_buffered_write writer data ~capacity ~on_overflow:(fun () -> Some (f data)) ~normal_return:None - let close {strict_reader; writer; _} = + let close { strict_reader; writer; _ } = Pipe.close writer ; Reader0.close_downstreams strict_reader.downstreams - let kill {strict_reader; writer; _} = + let kill { strict_reader; writer; _ } = Pipe.clear strict_reader.reader ; Pipe.close writer ; Reader0.close_downstreams strict_reader.downstreams - let is_closed {writer; _} = Pipe.is_closed writer + let is_closed { writer; _ } = Pipe.is_closed writer end let create ?name ?(warn_on_drop = true) type_ = let reader, writer = Pipe.create () in let strict_reader = - Reader0.{reader; has_reader= false; downstreams= []; name} + Reader0.{ reader; has_reader = false; downstreams = []; name } in let strict_writer = - Writer.{type_; strict_reader; warn_on_drop; writer; name} + Writer.{ type_; strict_reader; warn_on_drop; writer; name } in (strict_reader, strict_writer) -let transfer reader Writer.{strict_reader; writer; _} ~f = - Reader0.(reader.downstreams <- [strict_reader]) ; +let transfer reader Writer.{ strict_reader; writer; _ } ~f = + Reader0.(reader.downstreams <- [ strict_reader ]) ; Reader0.enforce_single_reader reader (Pipe.transfer reader.reader writer ~f) let rec transfer_while_writer_alive reader writer ~f = @@ -328,13 +332,13 @@ module Reader = struct | `Snd x -> Writer.write writer_b x | `Trd x -> - Writer.write writer_c x )) ; + Writer.write writer_c x)) ; don't_wait_for (let%map () = Pipe.closed reader_a.reader and () = Pipe.closed reader_b.reader and () = Pipe.closed reader_c.reader in Pipe.close_read reader.reader) ; - reader.downstreams <- [reader_a; reader_b; reader_c] ; + reader.downstreams <- [ reader_a; reader_b; reader_c ] ; (reader_a, reader_b, reader_c) end @@ -348,13 +352,13 @@ let%test_module "Strict_pipe.Reader.Merge" = let reader2, writer2 = create (Buffered (`Capacity 10, `Overflow Drop_head)) in - Reader.Merge.iter [reader1; reader2] ~f:(fun _ -> Deferred.unit) + Reader.Merge.iter [ reader1; reader2 ] ~f:(fun _ -> Deferred.unit) |> don't_wait_for ; Writer.write writer1 1 ; Writer.write writer2 2 ; Writer.close writer1 ; let%map () = Async.after (Time.Span.of_ms 5.) in - Writer.write writer2 3 ; () ) + Writer.write writer2 3 ; ()) end ) let%test_module "Strict_pipe.close" = diff --git a/src/lib/pipe_lib/strict_pipe.mli b/src/lib/pipe_lib/strict_pipe.mli index 40356444be5..a5ecbe81f3b 100644 --- a/src/lib/pipe_lib/strict_pipe.mli +++ b/src/lib/pipe_lib/strict_pipe.mli @@ -27,7 +27,7 @@ type _ buffered = Type_buffered type (_, _, _) type_ = | Synchronous : ('a, synchronous, unit Deferred.t) type_ | Buffered : - [`Capacity of int] * [`Overflow of ('a, 'b, 'r) overflow_behavior] + [ `Capacity of int ] * [ `Overflow of ('a, 'b, 'r) overflow_behavior ] -> ('a, 'b buffered, 'r) type_ module Reader : sig @@ -36,9 +36,9 @@ module Reader : sig (* Using [`Eof | `Ok of 't] to mirror interface of Jane Street's Pipe read *) (** Read a single value from the pipe or fail if the pipe is closed *) - val read : 't t -> [`Eof | `Ok of 't] Deferred.t + val read : 't t -> [ `Eof | `Ok of 't ] Deferred.t - val read' : 't t -> [`Eof | `Ok of 't Base.Queue.t] Deferred.t + val read' : 't t -> [ `Eof | `Ok of 't Base.Queue.t ] Deferred.t val to_linear_pipe : 't t -> 't Linear_pipe.Reader.t @@ -58,8 +58,8 @@ module Reader : sig val fold_until : 'a t -> init:'b - -> f:('b -> 'a -> [`Continue of 'b | `Stop of 'c] Deferred.t) - -> [`Eof of 'b | `Terminated of 'c] Deferred.t + -> f:('b -> 'a -> [ `Continue of 'b | `Stop of 'c ] Deferred.t) + -> [ `Eof of 'b | `Terminated of 'c ] Deferred.t (** This has similar semantics to [fold reader ~init ~f], but f isn't * deferred. This function delegates to [Pipe.fold_without_pushback] *) @@ -108,7 +108,7 @@ module Reader : sig * there will be a deadlock. *) val partition_map3 : 'a t - -> f:('a -> [`Fst of 'b | `Snd of 'c | `Trd of 'd]) + -> f:('a -> [ `Fst of 'b | `Snd of 'c | `Trd of 'd ]) -> 'b t * 'c t * 'd t end diff --git a/src/lib/pokolog/hash_intf.ml b/src/lib/pokolog/hash_intf.ml index 90e74b2acc3..cca414efafe 100644 --- a/src/lib/pokolog/hash_intf.ml +++ b/src/lib/pokolog/hash_intf.ml @@ -34,12 +34,12 @@ module type S = sig module Unchecked : Unchecked - with type scalar := Scalar.Unchecked.t - and type group := Group.Unchecked.t + with type scalar := Scalar.Unchecked.t + and type group := Group.Unchecked.t module Checked : Checked - with module Impl := Impl - and type scalar := Scalar.Checked.t - and type group := Group.Checked.t + with module Impl := Impl + and type scalar := Scalar.Checked.t + and type group := Group.Checked.t end diff --git a/src/lib/pokolog/inputs_intf.ml b/src/lib/pokolog/inputs_intf.ml index 6a70888a03b..eba5653d9f4 100644 --- a/src/lib/pokolog/inputs_intf.ml +++ b/src/lib/pokolog/inputs_intf.ml @@ -17,9 +17,9 @@ module type Checked = sig module Hash : Hash_intf.Checked - with module Impl := Impl - and type group := Group.t - and type scalar := Scalar.t + with module Impl := Impl + and type group := Group.t + and type scalar := Scalar.t end module type S = sig @@ -32,7 +32,7 @@ module type S = sig module Hash : Hash_intf.S - with module Impl := Impl - and module Scalar := Scalar - and module Group := Group + with module Impl := Impl + and module Scalar := Scalar + and module Group := Group end diff --git a/src/lib/pokolog/pokolog.ml b/src/lib/pokolog/pokolog.ml index 2952c354d8a..c05540d8550 100644 --- a/src/lib/pokolog/pokolog.ml +++ b/src/lib/pokolog/pokolog.ml @@ -19,9 +19,9 @@ module Variable_base = struct let r = Scalar.random () in let h = Group.(r * base) in let c = Hash.(to_scalar (create h)) in - {h; s= Scalar.(r + (c * log))} + { h; s = Scalar.(r + (c * log)) } - let verify ({h; s} : t) ({base; element} : Instance.t) = + let verify ({ h; s } : t) ({ base; element } : Instance.t) = let c = Hash.(to_scalar (create h)) in let open Group in equal (s * base) (h + (c * element)) @@ -37,7 +37,7 @@ module Variable_base = struct type t = (Group.t, Group.t) Instance.t end - let verify' k ({h; s} : t) ({base; element} : Instance.t) = + let verify' k ({ h; s } : t) ({ base; element } : Instance.t) = let open Impl.Checked in let%bind c = Hash.(create h >>| to_scalar) in let open Group in diff --git a/src/lib/pokolog/proof.ml b/src/lib/pokolog/proof.ml index 7772a5c7c13..12c17bdddfc 100644 --- a/src/lib/pokolog/proof.ml +++ b/src/lib/pokolog/proof.ml @@ -1,2 +1,2 @@ (* someday: optimization, the scalar here can be smaller than full width in our usecase. *) -type ('group, 'scalar) t = {h: 'group; s: 'scalar} [@@deriving bin_io] +type ('group, 'scalar) t = { h : 'group; s : 'scalar } [@@deriving bin_io] diff --git a/src/lib/pokolog/variable_base_instance.ml b/src/lib/pokolog/variable_base_instance.ml index 36f8d3a05dd..c4ede3ce454 100644 --- a/src/lib/pokolog/variable_base_instance.ml +++ b/src/lib/pokolog/variable_base_instance.ml @@ -1 +1 @@ -type ('base, 'group) t = {base: 'base; element: 'group} [@@deriving bin_io] +type ('base, 'group) t = { base : 'base; element : 'group } [@@deriving bin_io] diff --git a/src/lib/ppx_coda/check_ocaml_word_size.ml b/src/lib/ppx_coda/check_ocaml_word_size.ml index ed8b6ece939..517cb2019b9 100644 --- a/src/lib/ppx_coda/check_ocaml_word_size.ml +++ b/src/lib/ppx_coda/check_ocaml_word_size.ml @@ -23,4 +23,4 @@ let ext = expand let () = - Driver.register_transformation name ~rules:[Context_free.Rule.extension ext] + Driver.register_transformation name ~rules:[ Context_free.Rule.extension ext ] diff --git a/src/lib/ppx_coda/define_from_scope.ml b/src/lib/ppx_coda/define_from_scope.ml index caad7eee009..fd2827acf25 100644 --- a/src/lib/ppx_coda/define_from_scope.ml +++ b/src/lib/ppx_coda/define_from_scope.ml @@ -26,7 +26,7 @@ let name = "define_from_scope" let expr_to_id loc expr = match expr.pexp_desc with - | Pexp_ident {txt= Lident s; _} -> + | Pexp_ident { txt = Lident s; _ } -> s | _ -> Location.raise_errorf ~loc "Expected identifier" @@ -45,4 +45,4 @@ let ext = expand let () = - Driver.register_transformation name ~rules:[Context_free.Rule.extension ext] + Driver.register_transformation name ~rules:[ Context_free.Rule.extension ext ] diff --git a/src/lib/ppx_coda/define_locally.ml b/src/lib/ppx_coda/define_locally.ml index a4a437366cf..5cdb4339cfd 100644 --- a/src/lib/ppx_coda/define_locally.ml +++ b/src/lib/ppx_coda/define_locally.ml @@ -21,7 +21,7 @@ let raise_errorf = Location.raise_errorf let expr_to_id loc expr = match expr.pexp_desc with - | Pexp_ident {txt= Lident s; _} -> + | Pexp_ident { txt = Lident s; _ } -> s | _ -> Location.raise_errorf ~loc "Expected identifier" @@ -29,18 +29,18 @@ let expr_to_id loc expr = let expand ~loc ~path:_ open_decl defs = match defs.pexp_desc with | Pexp_tuple exps -> - let (module Ast_builder) = Ast_builder.make loc in - let open Ast_builder in - let names = List.map exps ~f:(expr_to_id loc) in - let vars = List.map names ~f:pvar in - Str.value ~loc Nonrecursive - [ Vb.mk ~loc (Pat.tuple ~loc vars) - (Exp.open_ ~loc open_decl defs) ] - | Pexp_ident {txt= Lident id; _} -> + let (module Ast_builder) = Ast_builder.make loc in + let open Ast_builder in + let names = List.map exps ~f:(expr_to_id loc) in + let vars = List.map names ~f:pvar in + Str.value ~loc Nonrecursive + [ Vb.mk ~loc (Pat.tuple ~loc vars) (Exp.open_ ~loc open_decl defs) ] + | Pexp_ident { txt = Lident id; _ } -> Str.value ~loc Nonrecursive [ Vb.mk ~loc - (Pat.var ~loc {txt= id; loc}) - (Exp.open_ ~loc open_decl defs) ] + (Pat.var ~loc { txt = id; loc }) + (Exp.open_ ~loc open_decl defs) + ] | _ -> raise_errorf ~loc "Must provide an identifier or tuple of identifiers" @@ -50,4 +50,4 @@ let ext = expand let () = - Driver.register_transformation name ~rules:[Context_free.Rule.extension ext] + Driver.register_transformation name ~rules:[ Context_free.Rule.extension ext ] diff --git a/src/lib/ppx_coda/expires_after.ml b/src/lib/ppx_coda/expires_after.ml index e9a87ef4e52..0543dfbb5d4 100644 --- a/src/lib/ppx_coda/expires_after.ml +++ b/src/lib/ppx_coda/expires_after.ml @@ -21,7 +21,7 @@ let expand ~loc ~path:_ str _delimiter = try (* the of_string function below allows too-long strings, as long as it starts with a valid date, so we do our own length check - *) + *) if String.length str.txt > 8 then Location.raise_errorf ~loc:str.loc "Not a valid date, string too long; must be in form YYYYMMDD" ; @@ -41,4 +41,4 @@ let ext = expand let () = - Driver.register_transformation name ~rules:[Context_free.Rule.extension ext] + Driver.register_transformation name ~rules:[ Context_free.Rule.extension ext ] diff --git a/src/lib/ppx_coda/getenv_ppx.ml b/src/lib/ppx_coda/getenv_ppx.ml index cc5998ef91e..8ef8bf56cb8 100644 --- a/src/lib/ppx_coda/getenv_ppx.ml +++ b/src/lib/ppx_coda/getenv_ppx.ml @@ -15,4 +15,4 @@ let ext = expand let () = - Driver.register_transformation name ~rules:[Context_free.Rule.extension ext] + Driver.register_transformation name ~rules:[ Context_free.Rule.extension ext ] diff --git a/src/lib/ppx_coda/log.ml b/src/lib/ppx_coda/log.ml index 3e3676b1f5e..db4b68df3b0 100644 --- a/src/lib/ppx_coda/log.ml +++ b/src/lib/ppx_coda/log.ml @@ -42,8 +42,7 @@ module Make (Info : Ppxinfo) = struct in let log_level_expr = pexp_ident (Located.mk log_level_id) in (* spam and best_tip_diff logs don't contain module, location *) - if - String.equal level_name "spam" || String.equal level_name "best_tip_diff" + if String.equal level_name "spam" || String.equal level_name "best_tip_diff" then [%expr [%e log_level_expr] [%e logger]] else [%expr @@ -69,11 +68,11 @@ module Make (Info : Ppxinfo) = struct let () = Driver.register_transformation Info.name - ~rules:[Context_free.Rule.extension ext_capture_logger] + ~rules:[ Context_free.Rule.extension ext_capture_logger ] let () = Driver.register_transformation (prime Info.name) - ~rules:[Context_free.Rule.extension ext_explicit_logger] + ~rules:[ Context_free.Rule.extension ext_explicit_logger ] end include Make (struct diff --git a/src/lib/ppx_coda/ppx_representatives/ppx_representatives.ml b/src/lib/ppx_coda/ppx_representatives/ppx_representatives.ml index 6ea9d3a1f96..67b254574f1 100644 --- a/src/lib/ppx_coda/ppx_representatives/ppx_representatives.ml +++ b/src/lib/ppx_coda/ppx_representatives/ppx_representatives.ml @@ -55,8 +55,8 @@ let rec core_type ~loc (typ : core_type) : expression = "Cannot derive %s for anonymous type variables" deriver_name | Ptyp_var name -> (* Names for type variables should be placed in context at the type - declaration level. - *) + declaration level. + *) evar ~loc name | Ptyp_arrow (label, _, _) -> [%expr @@ -69,10 +69,11 @@ let rec core_type ~loc (typ : core_type) : expression = [%e estring ~loc (Stdlib.Format.asprintf - "%s: Illegal call to dummy functional value \ - defined by %a" + "%s: Illegal call to dummy functional value defined \ + by %a" deriver_name Ocaml_common.Location.print_loc - typ.ptyp_loc)]]] ]] + typ.ptyp_loc)]]] + ]] | Ptyp_tuple typs -> let exprs = List.map ~f:(core_type ~loc) typs in let mk_name i = @@ -82,7 +83,8 @@ let rec core_type ~loc (typ : core_type) : expression = [%expr [ [%e pexp_tuple ~loc - (List.mapi exprs ~f:(fun i _ -> evar ~loc (mk_name i)))] ]] + (List.mapi exprs ~f:(fun i _ -> evar ~loc (mk_name i)))] + ]] in (* We map over each alternative in the tuple to get every possible combination. @@ -95,10 +97,11 @@ let rec core_type ~loc (typ : core_type) : expression = Ppx_representatives_runtime.Util.rev_concat (Stdlib.List.rev_map (fun [%p pvar ~loc (mk_name i)] -> [%e expr]) - (Stdlib.Lazy.force [%e arg]))] )]] - | Ptyp_constr ({txt= Lident name; _}, []) when is_builtin name -> + (Stdlib.Lazy.force [%e arg]))])]] + | Ptyp_constr ({ txt = Lident name; _ }, []) when is_builtin name -> mk_builtin ~loc name - | Ptyp_constr ({txt= Lident name; _}, [_]) when is_builtin_with_arg name -> + | Ptyp_constr ({ txt = Lident name; _ }, [ _ ]) when is_builtin_with_arg name + -> [%expr [%e mk_builtin ~loc name] ()] | Ptyp_constr (lid, typs) -> let exprs = List.map ~f:(core_type ~loc) typs in @@ -109,8 +112,8 @@ let rec core_type ~loc (typ : core_type) : expression = Location.raise_errorf ~loc:typ.ptyp_loc "Cannot derive %s for object types" deriver_name | Ptyp_class _ -> - Location.raise_errorf ~loc:typ.ptyp_loc - "Cannot derive %s for class types" deriver_name + Location.raise_errorf ~loc:typ.ptyp_loc "Cannot derive %s for class types" + deriver_name | Ptyp_alias _ -> (* Really we should bubble a let definition for the alias out to the type, but we don't need this currently, so it's not worth the @@ -129,28 +132,28 @@ let rec core_type ~loc (typ : core_type) : expression = elist ~loc (List.rev_map rows ~f:(fun row_field -> match row_field.prf_desc with - | Rtag (name, _, []) -> - [%expr [[%e pexp_variant ~loc name.txt None]]] - | Rtag (name,_, [typ]) -> - [%expr - Stdlib.List.rev_map - (fun e -> - [%e pexp_variant ~loc name.txt (Some [%expr e])] - ) - (Stdlib.Lazy.force [%e core_type ~loc typ])] - | Rtag _ -> - Location.raise_errorf ~loc:typ.ptyp_loc - "Cannot derive %s for variant constructors with \ - different type arguments for the same constructor" - deriver_name - | Rinherit typ' -> - [%expr - Stdlib.List.rev - (Stdlib.Lazy.force - (* Coerce here, because the inherited type may be a - strict subtype. - *) - ([%e core_type ~loc typ'] :> [%t typ] list lazy_t))] ))])] + | Rtag (name, _, []) -> + [%expr [ [%e pexp_variant ~loc name.txt None] ]] + | Rtag (name, _, [ typ ]) -> + [%expr + Stdlib.List.rev_map + (fun e -> + [%e pexp_variant ~loc name.txt (Some [%expr e])]) + (Stdlib.Lazy.force [%e core_type ~loc typ])] + | Rtag _ -> + Location.raise_errorf ~loc:typ.ptyp_loc + "Cannot derive %s for variant constructors with \ + different type arguments for the same constructor" + deriver_name + | Rinherit typ' -> + [%expr + Stdlib.List.rev + (Stdlib.Lazy.force + (* Coerce here, because the inherited type may be a + strict subtype. + *) + ( [%e core_type ~loc typ'] + :> [%t typ] list lazy_t ))]))])] | Ptyp_poly (vars, typ) -> (* Inject dummy representatives into the environment so that they can resolve. @@ -163,7 +166,7 @@ let rec core_type ~loc (typ : core_type) : expression = let [%p pvar ~loc var.txt] = Stdlib.Lazy.from_fun (fun () -> failwith "Unknown type") in - [%e expr]] )]] + [%e expr]])]] | Ptyp_package _ -> Location.raise_errorf ~loc:typ.ptyp_loc "Cannot derive %s for packaged modules" deriver_name @@ -182,15 +185,15 @@ let record_decl ~loc (fields : label_declaration list) : expression = Ppx_representatives_runtime.Util.rev_concat (Stdlib.List.rev_map (fun [%p pvar ~loc field.pld_name.txt] -> [%e expr]) - (Lazy.force [%e core_type ~loc field.pld_type]))] ) + (Lazy.force [%e core_type ~loc field.pld_type]))]) ~init: [%expr [ [%e pexp_record ~loc (List.map fields ~f:(fun field -> - (mk_lid field.pld_name, evar ~loc field.pld_name.txt) - )) - None] ]]]] + (mk_lid field.pld_name, evar ~loc field.pld_name.txt))) + None] + ]]]] let str_decl ~loc (decl : type_declaration) : structure_item = let open Ast_builder.Default in @@ -200,7 +203,7 @@ let str_decl ~loc (decl : type_declaration) : structure_item = [%t List.fold_right decl.ptype_params ~f:(fun (param, _) typ -> - [%type: [%t param] list lazy_t -> [%t typ]] ) + [%type: [%t param] list lazy_t -> [%t typ]]) ~init:[%type: [%t constr_of_decl ~loc decl] list lazy_t]]) = [%e List.fold_right decl.ptype_params ~init:expr @@ -215,10 +218,10 @@ let str_decl ~loc (decl : type_declaration) : structure_item = Location.raise_errorf ~loc:param.ptyp_loc "Expected a type variable or _" in - [%expr fun [%p pat] -> [%e expr]] )]] + [%expr fun [%p pat] -> [%e expr]])]] in match decl with - | {ptype_kind= Ptype_variant constrs; _} -> + | { ptype_kind = Ptype_variant constrs; _ } -> binding [%expr lazy @@ -230,7 +233,7 @@ let str_decl ~loc (decl : type_declaration) : structure_item = match constr.pcd_args with | Pcstr_tuple [] -> None - | Pcstr_tuple [typ] -> + | Pcstr_tuple [ typ ] -> Some (core_type ~loc typ) | Pcstr_tuple typs -> Some (core_type ~loc (ptyp_tuple ~loc typs)) @@ -242,19 +245,19 @@ let str_decl ~loc (decl : type_declaration) : structure_item = [%expr [ [%e pexp_construct ~loc (mk_lid constr.pcd_name) - None] ]] + None] + ]] | Some arg -> [%expr Stdlib.List.rev_map (fun x -> [%e - pexp_construct ~loc - (mk_lid constr.pcd_name) - (Some [%expr x])] ) - (Stdlib.Lazy.force [%e arg])] ))])] - | {ptype_kind= Ptype_abstract; ptype_manifest= Some typ; _} -> + pexp_construct ~loc (mk_lid constr.pcd_name) + (Some [%expr x])]) + (Stdlib.Lazy.force [%e arg])]))])] + | { ptype_kind = Ptype_abstract; ptype_manifest = Some typ; _ } -> binding (core_type ~loc typ) - | {ptype_kind= Ptype_record fields; _} -> + | { ptype_kind = Ptype_record fields; _ } -> binding (record_decl ~loc fields) | _ -> Location.raise_errorf ~loc "Cannot derive %s for this type" deriver_name @@ -263,12 +266,11 @@ let sig_decl ~loc (decl : type_declaration) : signature_item = let open Ast_builder.Default in psig_value ~loc @@ value_description ~loc ~prim:[] - ~name: - (Located.mk ~loc (mangle ~suffix:deriver_name decl.ptype_name.txt)) + ~name:(Located.mk ~loc (mangle ~suffix:deriver_name decl.ptype_name.txt)) ~type_: (List.fold_right decl.ptype_params ~f:(fun (param, _) typ -> - [%type: [%t param] list lazy_t -> [%t typ]] ) + [%type: [%t param] list lazy_t -> [%t typ]]) ~init:[%type: [%t constr_of_decl ~loc decl] list lazy_t]) let str_type_decl ~loc ~path:_ (_rec_flag, decls) : structure = diff --git a/src/lib/ppx_coda/ppx_representatives/runtime/ppx_representatives_runtime.ml b/src/lib/ppx_coda/ppx_representatives/runtime/ppx_representatives_runtime.ml index 475ca9745e2..caaca336ad3 100644 --- a/src/lib/ppx_coda/ppx_representatives/runtime/ppx_representatives_runtime.ml +++ b/src/lib/ppx_coda/ppx_representatives/runtime/ppx_representatives_runtime.ml @@ -12,26 +12,26 @@ module Util = struct go [] l end -let unit_to_representatives = lazy [()] +let unit_to_representatives = lazy [ () ] -let bool_to_representatives = lazy [false] +let bool_to_representatives = lazy [ false ] -let int_to_representatives = lazy [0] +let int_to_representatives = lazy [ 0 ] -let string_to_representatives = lazy [""] +let string_to_representatives = lazy [ "" ] -let char_to_representatives = lazy [' '] +let char_to_representatives = lazy [ ' ' ] -let bytes_to_representatives = lazy [Bytes.empty] +let bytes_to_representatives = lazy [ Bytes.empty ] -let int32_to_representatives = lazy [0l] +let int32_to_representatives = lazy [ 0l ] -let int64_to_representatives = lazy [0L] +let int64_to_representatives = lazy [ 0L ] -let nativeint_to_representatives = lazy [0n] +let nativeint_to_representatives = lazy [ 0n ] -let list_to_representatives _ = lazy [[]] +let list_to_representatives _ = lazy [ [] ] -let option_to_representatives _ = lazy [None] +let option_to_representatives _ = lazy [ None ] -let array_to_representatives _ = lazy [[||]] +let array_to_representatives _ = lazy [ [||] ] diff --git a/src/lib/ppx_coda/ppx_to_enum/ppx_to_enum.ml b/src/lib/ppx_coda/ppx_to_enum/ppx_to_enum.ml index 47df3e60524..06dbcff324e 100644 --- a/src/lib/ppx_coda/ppx_to_enum/ppx_to_enum.ml +++ b/src/lib/ppx_coda/ppx_to_enum/ppx_to_enum.ml @@ -36,7 +36,7 @@ let constr_of_decl ~loc decl = let str_decl ~loc (decl : type_declaration) : structure = let open Ast_builder.Default in match decl with - | {ptype_kind= Ptype_variant constrs; ptype_name= name; _} -> + | { ptype_kind = Ptype_variant constrs; ptype_name = name; _ } -> (* [type t = A of int | B of bool | ...] *) [%str let ([%p pvar ~loc (mangle ~suffix:deriver_name name.txt)] : @@ -44,24 +44,26 @@ let str_decl ~loc (decl : type_declaration) : structure = [%e pexp_function ~loc (List.mapi constrs ~f:(fun i constr -> - { pc_lhs= + { pc_lhs = ppat_construct ~loc (mk_lid constr.pcd_name) ( match constr.pcd_args with | Pcstr_tuple [] -> None | _ -> Some (ppat_any ~loc) ) - ; pc_guard= None - ; pc_rhs= eint ~loc i } ))] + ; pc_guard = None + ; pc_rhs = eint ~loc i + }))] let [%p pvar ~loc (mangle_prefix ~prefix:"min" name.txt)] = 0 let [%p pvar ~loc (mangle_prefix ~prefix:"max" name.txt)] = [%e eint ~loc (List.length constrs - 1)]] - | { ptype_kind= Ptype_abstract - ; ptype_name= name - ; ptype_manifest= Some {ptyp_desc= Ptyp_constr (lid, _); _} - ; _ } -> + | { ptype_kind = Ptype_abstract + ; ptype_name = name + ; ptype_manifest = Some { ptyp_desc = Ptyp_constr (lid, _); _ } + ; _ + } -> (* [type t = Foo.t] *) [%str let ([%p pvar ~loc (mangle ~suffix:deriver_name name.txt)] : @@ -76,10 +78,11 @@ let str_decl ~loc (decl : type_declaration) : structure = let [%p pvar ~loc (mangle_prefix ~prefix:"max" name.txt)] = [%e pexp_ident ~loc (Located.map (mangle_prefix_lid ~prefix:"max") lid)]] - | { ptype_kind= Ptype_abstract - ; ptype_name= name - ; ptype_manifest= Some {ptyp_desc= Ptyp_variant (constrs, Closed, _); _} - ; _ } -> + | { ptype_kind = Ptype_abstract + ; ptype_name = name + ; ptype_manifest = Some { ptyp_desc = Ptyp_variant (constrs, Closed, _); _ } + ; _ + } -> (* [type t = [ `A of int | `B of bool | ...]] *) [%str let ([%p pvar ~loc (mangle ~suffix:deriver_name name.txt)] : @@ -89,7 +92,7 @@ let str_decl ~loc (decl : type_declaration) : structure = (List.mapi constrs ~f:(fun i constr -> match constr.prf_desc with | Rtag (label, has_empty, args) -> - { pc_lhs= + { pc_lhs = ( match (has_empty, args) with | _, [] -> (* [`A] *) @@ -104,13 +107,14 @@ let str_decl ~loc (decl : type_declaration) : structure = (ppat_variant ~loc label.txt None) (ppat_variant ~loc label.txt (Some (ppat_any ~loc))) ) - ; pc_guard= None - ; pc_rhs= eint ~loc i } + ; pc_guard = None + ; pc_rhs = eint ~loc i + } | Rinherit typ -> Location.raise_errorf ~loc:typ.ptyp_loc - "Cannot derive %s for this type: inherited fields \ - are not supported" - deriver_name ))] + "Cannot derive %s for this type: inherited fields are \ + not supported" + deriver_name))] let [%p pvar ~loc (mangle_prefix ~prefix:"min" name.txt)] = 0 @@ -136,7 +140,8 @@ let sig_decl ~loc (decl : type_declaration) : signature = ; value_description ~loc ~prim:[] ~name: (Located.mk ~loc (mangle_prefix ~prefix:"max" decl.ptype_name.txt)) - ~type_:[%type: int] ] + ~type_:[%type: int] + ] let str_type_decl ~loc ~path:_ (_rec_flag, decls) : structure = List.concat_map ~f:(str_decl ~loc) decls diff --git a/src/lib/ppx_coda/tests/define_locally_good.ml b/src/lib/ppx_coda/tests/define_locally_good.ml index d8da8b2701d..094871b465e 100644 --- a/src/lib/ppx_coda/tests/define_locally_good.ml +++ b/src/lib/ppx_coda/tests/define_locally_good.ml @@ -9,11 +9,9 @@ module M1 = struct end module M2 = struct - [%%define_locally - M1.(x, y, z)] + [%%define_locally M1.(x, y, z)] - [%%define_locally - M1.(q)] + [%%define_locally M1.(q)] let _ = x diff --git a/src/lib/ppx_coda/tests/unexpired.ml b/src/lib/ppx_coda/tests/unexpired.ml index bb7801c7ef8..1bffa0423e7 100644 --- a/src/lib/ppx_coda/tests/unexpired.ml +++ b/src/lib/ppx_coda/tests/unexpired.ml @@ -1,7 +1,6 @@ (* date in the far future *) -[%%expires_after -"25250101"] +[%%expires_after "25250101"] (* ppx used inside internal module *) diff --git a/src/lib/ppx_dhall_type/deriving.ml b/src/lib/ppx_dhall_type/deriving.ml index 7e4f226a65c..2225c0ec50b 100644 --- a/src/lib/ppx_dhall_type/deriving.ml +++ b/src/lib/ppx_dhall_type/deriving.ml @@ -18,17 +18,17 @@ let field_key_attr = let make_lident_cmp items lident = List.mem items (Longident.name lident.txt) ~equal:String.equal -let is_bool_lident = make_lident_cmp ["bool"; "Bool.t"] +let is_bool_lident = make_lident_cmp [ "bool"; "Bool.t" ] -let is_int_lident = make_lident_cmp ["int"; "Int.t"] +let is_int_lident = make_lident_cmp [ "int"; "Int.t" ] -let is_float_lident = make_lident_cmp ["float"; "Float.t"] +let is_float_lident = make_lident_cmp [ "float"; "Float.t" ] -let is_string_lident = make_lident_cmp ["string"; "String.t"] +let is_string_lident = make_lident_cmp [ "string"; "String.t" ] -let is_option_lident = make_lident_cmp ["option"; "Option.t"] +let is_option_lident = make_lident_cmp [ "option"; "Option.t" ] -let is_list_lident = make_lident_cmp ["list"; "List.t"] +let is_list_lident = make_lident_cmp [ "list"; "List.t" ] let rec dhall_type_of_core_type core_type = let (module Ast_builder) = Ast_builder.make core_type.ptyp_loc in @@ -42,22 +42,21 @@ let rec dhall_type_of_core_type core_type = [%expr Ppx_dhall_type.Dhall_type.Double] | Ptyp_constr (lident, []) when is_string_lident lident -> [%expr Ppx_dhall_type.Dhall_type.Text] - | Ptyp_constr (lident, [ty]) when is_option_lident lident -> - [%expr - Ppx_dhall_type.Dhall_type.Optional [%e dhall_type_of_core_type ty]] - | Ptyp_constr (lident, [ty]) when is_list_lident lident -> + | Ptyp_constr (lident, [ ty ]) when is_option_lident lident -> + [%expr Ppx_dhall_type.Dhall_type.Optional [%e dhall_type_of_core_type ty]] + | Ptyp_constr (lident, [ ty ]) when is_list_lident lident -> [%expr Ppx_dhall_type.Dhall_type.List [%e dhall_type_of_core_type ty]] - | Ptyp_constr ({txt= Lident id; _}, []) -> + | Ptyp_constr ({ txt = Lident id; _ }, []) -> evar (id ^ "_dhall_type") - | Ptyp_constr ({txt= Lident id; _}, params) -> + | Ptyp_constr ({ txt = Lident id; _ }, params) -> let dhall_type_fun = evar (id ^ "_dhall_type") in let args = List.map params ~f:dhall_type_of_core_type in eapply dhall_type_fun args - | Ptyp_constr ({txt= Ldot (prefix, nm); _}, []) -> + | Ptyp_constr ({ txt = Ldot (prefix, nm); _ }, []) -> let mod_path = Longident.name prefix in if String.equal nm "t" then evar (mod_path ^ ".dhall_type") else evar (mod_path ^ "." ^ nm ^ "_dhall_type") - | Ptyp_constr ({txt= Ldot (prefix, nm); _}, params) -> + | Ptyp_constr ({ txt = Ldot (prefix, nm); _ }, params) -> let mod_path = Longident.name prefix in let dhall_type_fun = if String.equal nm "t" then evar (mod_path ^ ".dhall_type") @@ -77,7 +76,7 @@ let dhall_variant_from_constructor_declaration ctor_decl = match ctor_decl.pcd_args with | Pcstr_tuple [] -> [%expr [%e name], None] - | Pcstr_tuple [ty] -> + | Pcstr_tuple [ ty ] -> [%expr [%e name], Some [%e dhall_type_of_core_type ty]] | Pcstr_tuple tys -> let tys_expr = elist (List.map tys ~f:dhall_type_of_core_type) in @@ -105,12 +104,12 @@ let generate_dhall_type type_decl = let dhall_type = match type_decl.ptype_kind with | Ptype_abstract -> ( - match type_decl.ptype_manifest with - | None -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Abstract type declaration has no manifest (right-hand side)" - | Some core_type -> - dhall_type_of_core_type core_type ) + match type_decl.ptype_manifest with + | None -> + Location.raise_errorf ~loc:type_decl.ptype_loc + "Abstract type declaration has no manifest (right-hand side)" + | Some core_type -> + dhall_type_of_core_type core_type ) | Ptype_variant ctor_decls -> [%expr Ppx_dhall_type.Dhall_type.Union @@ -122,8 +121,7 @@ let generate_dhall_type type_decl = [%expr Ppx_dhall_type.Dhall_type.Record [%e - elist - (List.map label_decls ~f:dhall_field_from_label_declaration)]] + elist (List.map label_decls ~f:dhall_field_from_label_declaration)]] | Ptype_open -> Location.raise_errorf ~loc:type_decl.ptype_loc "Open types not supported" @@ -146,7 +144,7 @@ let generate_dhall_type type_decl = pvar a | _ -> Location.raise_errorf ~loc:type_decl.ptype_loc - "Type parameter not a type variable" ) + "Type parameter not a type variable") in let abs = eabstract args dhall_type in [%stri let [%p ty_name] = [%e abs]] @@ -154,7 +152,7 @@ let generate_dhall_type type_decl = let generate_dhall_types ~loc:_ ~path:_ (_rec_flag, type_decls) = List.map type_decls ~f:generate_dhall_type -let attributes = [Attribute.T field_key_attr] +let attributes = [ Attribute.T field_key_attr ] let str_type_decl = Deriving.Generator.make_noarg ~attributes generate_dhall_types diff --git a/src/lib/ppx_register_event/register_event.ml b/src/lib/ppx_register_event/register_event.ml index 6557d6dab4a..aef797e428f 100644 --- a/src/lib/ppx_register_event/register_event.ml +++ b/src/lib/ppx_register_event/register_event.ml @@ -11,25 +11,25 @@ let digest s = Md5.digest_string s |> Md5.to_hex let checked_interpolations_statically ~loc msg label_names = match msg with - | {pexp_desc= Pexp_constant (Pconst_string (s, _)); _} -> ( - (* check that every interpolation point $foo in msg has a matching label; - OK to have extra labels not mentioned in message - *) - match Logproc_lib.Interpolator.parse s with - | Error err -> - Location.raise_errorf ~loc - "Encountered an error while parsing the msg: %s" err - | Ok items -> - List.iter items ~f:(function - | `Interpolate interp - when not (List.mem ~equal:String.equal label_names interp) -> - Location.raise_errorf ~loc - "The msg contains interpolation point \"$%s\" which is not a \ - field in the record" - interp - | _ -> - () ) ; - true ) + | { pexp_desc = Pexp_constant (Pconst_string (s, _)); _ } -> ( + (* check that every interpolation point $foo in msg has a matching label; + OK to have extra labels not mentioned in message + *) + match Logproc_lib.Interpolator.parse s with + | Error err -> + Location.raise_errorf ~loc + "Encountered an error while parsing the msg: %s" err + | Ok items -> + List.iter items ~f:(function + | `Interpolate interp + when not (List.mem ~equal:String.equal label_names interp) -> + Location.raise_errorf ~loc + "The msg contains interpolation point \"$%s\" which is not a \ + field in the record" + interp + | _ -> + ()) ; + true ) | _ -> false @@ -37,10 +37,10 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = let ctor, label_decls = match ty_ext.ptyext_constructors with (* record argument *) - | [{pext_name; pext_kind= Pext_decl (Pcstr_record labels, None); _}] -> + | [ { pext_name; pext_kind = Pext_decl (Pcstr_record labels, None); _ } ] -> (pext_name.txt, labels) (* no arguments *) - | [{pext_name; pext_kind= Pext_decl (Pcstr_tuple [], None); _}] -> + | [ { pext_name; pext_kind = Pext_decl (Pcstr_tuple [], None); _ } ] -> (pext_name.txt, []) | _ -> Location.raise_errorf ~loc:ty_ext.ptyext_path.loc @@ -48,37 +48,40 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = or no argument" in let label_names = - List.map label_decls ~f:(fun {pld_name= {txt; _}; _} -> txt) + List.map label_decls ~f:(fun { pld_name = { txt; _ }; _ } -> txt) in let has_record_arg = not @@ List.is_empty label_names in let deriver_loc = (* succeeds, because we're calling this deriver *) let find_deriver = function - | { pstr_desc= - Pstr_eval ({pexp_desc= Pexp_ident {txt= Lident id; loc}; _}, _) - ; _ } - | { pstr_desc= + | { pstr_desc = + Pstr_eval ({ pexp_desc = Pexp_ident { txt = Lident id; loc }; _ }, _) + ; _ + } + | { pstr_desc = Pstr_eval - ( { pexp_desc= + ( { pexp_desc = Pexp_apply - ({pexp_desc= Pexp_ident {txt= Lident id; loc}; _}, _) - ; _ } + ({ pexp_desc = Pexp_ident { txt = Lident id; loc }; _ }, _) + ; _ + } , _ ) - ; _ } + ; _ + } when String.equal id deriver -> Some loc | _ -> - failwith - (sprintf "Expected structure item in payload for %s" deriver) + failwith (sprintf "Expected structure item in payload for %s" deriver) in - List.find_map_exn ty_ext.ptyext_attributes ~f:(fun ({attr_name={txt; _}; attr_payload=payload;_}) -> + List.find_map_exn ty_ext.ptyext_attributes + ~f:(fun { attr_name = { txt; _ }; attr_payload = payload; _ } -> if String.equal txt "deriving" then match payload with | PStr stris -> Some (List.find_map_exn stris ~f:find_deriver) | _ -> failwith (sprintf "Expected structure payload for %s" deriver) - else None ) + else None) in let (module Ast_builder) = Ast_builder.make deriver_loc in let open Ast_builder in @@ -91,7 +94,7 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = if has_record_arg then let fields = List.map label_names ~f:(fun label -> - sprintf "%s=$%s" label label ) + sprintf "%s=$%s" label label) in sprintf "%s {%s}" ctor (String.concat ~sep:"; " fields) else sprintf "%s" ctor @@ -121,7 +124,7 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = if has_record_arg then let fields = List.map label_names ~f:(fun label -> - (Located.mk (Lident label), pvar label) ) + (Located.mk (Lident label), pvar label)) in Some (record fields Closed) else None @@ -134,7 +137,7 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = if has_record_arg then let fields = List.map label_names ~f:(fun label -> - (Located.mk (Lident label), evar label) ) + (Located.mk (Lident label), evar label)) in Some (record fields None) else None @@ -149,28 +152,28 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = ; [%stri let ([%p pvar (event_name ^ "_structured_events_repr")] : Structured_log_events.repr) = - { id= [%e evar (event_name ^ "_structured_events_id")] - ; event_name= [%e estring event_path] - ; arguments= + { id = [%e evar (event_name ^ "_structured_events_id")] + ; event_name = [%e estring event_path] + ; arguments = Core_kernel.String.Set.of_list [%e elist ~f:estring label_names] - ; log= + ; log = (function | [%p record_pattern] -> Some ( [%e msg] , [%e elist label_decls - ~f:(fun {pld_name= {txt= name; _}; pld_type; _} -> + ~f:(fun { pld_name = { txt = name; _ }; pld_type; _ } + -> Conv_from_ppx_deriving.copy_expression @@ Ppx_deriving_yojson.wrap_runtime @@ Conv_to_ppx_deriving.copy_expression @@ [%expr [%e estring name] - , [%e to_yojson pld_type] [%e evar name]] )] - ) + , [%e to_yojson pld_type] [%e evar name]])] ) | _ -> - None ) - ; parse= + None) + ; parse = (fun args -> let result = let args_list = Core_kernel.String.Map.of_alist_exn args in @@ -178,7 +181,8 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = ignore args_list ; [%e List.fold_right label_decls - ~f:(fun {pld_name= {txt= name; _}; pld_type; _} acc -> + ~f: + (fun { pld_name = { txt = name; _ }; pld_type; _ } acc -> Conv_from_ppx_deriving.copy_expression @@ Ppx_deriving_yojson.wrap_runtime @@ Conv_to_ppx_deriving.copy_expression @@ -190,7 +194,7 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = Core_kernel.Result.bind ([%e of_yojson - ~path:(split_path @ [ctor; name]) + ~path:(split_path @ [ ctor; name ]) pld_type] [%e evar name]) ~f:(fun [%p pvar name] -> [%e acc]) @@ -198,16 +202,17 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = Core_kernel.Result.fail [%e estring - (sprintf - "%s, parse: missing argument %s" - event_path name)]] ) + (sprintf "%s, parse: missing argument %s" + event_path name)]]) ~init:[%expr Core_kernel.Result.return [%e record_expr]]] in - match result with Ok ev -> Some ev | Error _ -> None ) }] + match result with Ok ev -> Some ev | Error _ -> None) + }] ; [%stri let () = Structured_log_events.register_constructor - [%e evar (event_name ^ "_structured_events_repr")]] ] + [%e evar (event_name ^ "_structured_events_repr")]] + ] in if checked_interpolations then stris else @@ -221,13 +226,13 @@ let generate_loggers_and_parsers ~loc:_ ~path ty_ext msg_opt = in [%stri let () = - Structured_log_events.check_interpolations_exn - ~msg_loc:[%e msg_loc_str] [%e msg] + Structured_log_events.check_interpolations_exn ~msg_loc:[%e msg_loc_str] + [%e msg] [%e elist ~f:estring label_names]] :: stris let generate_signature_items ~loc ~path:_ ty_ext = - List.concat_map ty_ext.ptyext_constructors ~f:(fun {pext_name; _} -> + List.concat_map ty_ext.ptyext_constructors ~f:(fun { pext_name; _ } -> let event_name = String.lowercase pext_name.txt in let (module Ast_builder) = Ast_builder.make loc in let open Ast_builder in @@ -238,7 +243,8 @@ let generate_signature_items ~loc ~path:_ ty_ext = ; psig_value (value_description ~name:(Located.mk (event_name ^ "_structured_events_repr")) - ~type_:[%type: Structured_log_events.repr] ~prim:[]) ] ) + ~type_:[%type: Structured_log_events.repr] ~prim:[]) + ]) let str_type_ext = let args = @@ -247,8 +253,7 @@ let str_type_ext = in Ppxlib.Deriving.Generator.make args generate_loggers_and_parsers -let sig_type_ext = - Ppxlib.Deriving.Generator.make_noarg generate_signature_items +let sig_type_ext = Ppxlib.Deriving.Generator.make_noarg generate_signature_items let () = Ppxlib.Deriving.add deriver ~str_type_ext ~sig_type_ext diff --git a/src/lib/precomputed_values/gen_values/gen_values.ml b/src/lib/precomputed_values/gen_values/gen_values.ml index e5729f66441..9557422bac2 100644 --- a/src/lib/precomputed_values/gen_values/gen_values.ml +++ b/src/lib/precomputed_values/gen_values/gen_values.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Ppxlib open Asttypes @@ -10,8 +9,7 @@ open Async open Mina_state (* TODO: refactor to do compile time selection *) -[%%if -proof_level = "full"] +[%%if proof_level = "full"] let use_dummy_values = false @@ -21,8 +19,7 @@ let use_dummy_values = true [%%endif] -[%%inject -"generate_genesis_proof", generate_genesis_proof] +[%%inject "generate_genesis_proof", generate_genesis_proof] module type S = sig val compiled_values : Genesis_proof.t Async.Deferred.t option @@ -49,7 +46,7 @@ let hashes_to_expr ~loc hashes = @@ List.map hashes ~f:(fun (x, y) -> [%expr [%e estring ~loc x] - , Core.Md5.of_hex_exn [%e estring ~loc (Core.Md5.to_hex y)]] ) + , Core.Md5.of_hex_exn [%e estring ~loc (Core.Md5.to_hex y)]]) let vk_id_to_expr ~loc vk_id = let open Ppxlib.Ast_builder.Default in @@ -96,16 +93,17 @@ module Dummy = struct if generate_genesis_proof then Some (Async.return - { Genesis_proof.runtime_config= Runtime_config.default + { Genesis_proof.runtime_config = Runtime_config.default ; constraint_constants ; proof_level ; genesis_constants - ; genesis_ledger= (module Test_genesis_ledger) + ; genesis_ledger = (module Test_genesis_ledger) ; genesis_epoch_data ; consensus_constants ; protocol_state_with_hash - ; constraint_system_digests= hashes - ; proof_data= None }) + ; constraint_system_digests = hashes + ; proof_data = None + }) else None end @@ -127,16 +125,17 @@ module Make_real () = struct Genesis_proof.create_values (module T) (module B) - { runtime_config= Runtime_config.default + { runtime_config = Runtime_config.default ; constraint_constants - ; proof_level= Full + ; proof_level = Full ; genesis_constants - ; genesis_ledger= (module Test_genesis_ledger) + ; genesis_ledger = (module Test_genesis_ledger) ; genesis_epoch_data ; consensus_constants ; protocol_state_with_hash - ; constraint_system_digests= None - ; blockchain_proof_system_id= None } + ; constraint_system_digests = None + ; blockchain_proof_system_id = None + } in values) else None @@ -174,18 +173,19 @@ let main () = ~consensus_constants: (Lazy.force Consensus.Constants.for_unit_tests) in - { runtime_config= Runtime_config.default - ; constraint_constants= + { runtime_config = Runtime_config.default + ; constraint_constants = Genesis_constants.Constraint_constants.for_unit_tests - ; proof_level= Genesis_constants.Proof_level.for_unit_tests - ; genesis_constants= Genesis_constants.for_unit_tests - ; genesis_ledger= Genesis_ledger.for_unit_tests - ; genesis_epoch_data= Consensus.Genesis_epoch_data.for_unit_tests - ; consensus_constants= Lazy.force Consensus.Constants.for_unit_tests + ; proof_level = Genesis_constants.Proof_level.for_unit_tests + ; genesis_constants = Genesis_constants.for_unit_tests + ; genesis_ledger = Genesis_ledger.for_unit_tests + ; genesis_epoch_data = Consensus.Genesis_epoch_data.for_unit_tests + ; consensus_constants = Lazy.force Consensus.Constants.for_unit_tests ; protocol_state_with_hash - ; constraint_system_digests= + ; constraint_system_digests = lazy [%e hashes_to_expr ~loc (Lazy.force hashes)] - ; proof_data= None }) + ; proof_data = None + }) let compiled_inputs = lazy @@ -203,30 +203,32 @@ let main () = ~genesis_ledger:Test_genesis_ledger.t ~genesis_epoch_data ~constraint_constants ~consensus_constants in - { Genesis_proof.Inputs.runtime_config= Runtime_config.default + { Genesis_proof.Inputs.runtime_config = Runtime_config.default ; constraint_constants - ; proof_level= Genesis_constants.Proof_level.compiled + ; proof_level = Genesis_constants.Proof_level.compiled ; genesis_constants - ; genesis_ledger= (module Test_genesis_ledger) + ; genesis_ledger = (module Test_genesis_ledger) ; genesis_epoch_data ; consensus_constants ; protocol_state_with_hash - ; constraint_system_digests= + ; constraint_system_digests = [%e match compiled_values with - | Some {constraint_system_digests= hashes; _} -> + | Some { constraint_system_digests = hashes; _ } -> [%expr Some [%e hashes_to_expr ~loc (Lazy.force hashes)]] | None -> [%expr None]] - ; blockchain_proof_system_id= + ; blockchain_proof_system_id = [%e match compiled_values with | Some - {proof_data= Some {blockchain_proof_system_id= id; _}; _} - -> + { proof_data = Some { blockchain_proof_system_id = id; _ } + ; _ + } -> [%expr Some [%e vk_id_to_expr ~loc id]] | _ -> - [%expr None]] }) + [%expr None]] + }) let compiled = [%e @@ -236,29 +238,28 @@ let main () = Some ( lazy (let inputs = Lazy.force compiled_inputs in - { runtime_config= inputs.runtime_config - ; constraint_constants= inputs.constraint_constants - ; proof_level= inputs.proof_level - ; genesis_constants= inputs.genesis_constants - ; genesis_ledger= inputs.genesis_ledger - ; genesis_epoch_data= inputs.genesis_epoch_data - ; consensus_constants= inputs.consensus_constants - ; protocol_state_with_hash= + { runtime_config = inputs.runtime_config + ; constraint_constants = inputs.constraint_constants + ; proof_level = inputs.proof_level + ; genesis_constants = inputs.genesis_constants + ; genesis_ledger = inputs.genesis_ledger + ; genesis_epoch_data = inputs.genesis_epoch_data + ; consensus_constants = inputs.consensus_constants + ; protocol_state_with_hash = inputs.protocol_state_with_hash - ; constraint_system_digests= + ; constraint_system_digests = lazy [%e hashes_to_expr ~loc (Lazy.force hashes)] - ; proof_data= + ; proof_data = [%e match compiled_values.proof_data with | Some proof_data -> [%expr Some - { blockchain_proof_system_id= + { blockchain_proof_system_id = [%expr vk_id_to_expr ~loc - proof_data - .blockchain_proof_system_id] - ; genesis_proof= + proof_data.blockchain_proof_system_id] + ; genesis_proof = Core.Binable.of_string (module Mina_base.Proof.Stable.Latest) [%e @@ -266,9 +267,11 @@ let main () = (Binable.to_string ( module Mina_base.Proof.Stable .Latest ) - proof_data.genesis_proof)] }] + proof_data.genesis_proof)] + }] | None -> - [%expr None]] }) )] + [%expr None]] + }) )] | None -> [%expr None]]] in diff --git a/src/lib/proof_carrying_data/proof_carrying_data.ml b/src/lib/proof_carrying_data/proof_carrying_data.ml index 737c7cf5a7c..a8911fc9e56 100644 --- a/src/lib/proof_carrying_data/proof_carrying_data.ml +++ b/src/lib/proof_carrying_data/proof_carrying_data.ml @@ -5,6 +5,6 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type ('a, 'b) t = {data: 'a; proof: 'b} [@@deriving sexp, fields] + type ('a, 'b) t = { data : 'a; proof : 'b } [@@deriving sexp, fields] end end] diff --git a/src/lib/protocol_version/protocol_version.ml b/src/lib/protocol_version/protocol_version.ml index e22d5b805c7..86546b043b5 100644 --- a/src/lib/protocol_version/protocol_version.ml +++ b/src/lib/protocol_version/protocol_version.ml @@ -5,7 +5,8 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type t = {major: int; minor: int; patch: int} [@@deriving compare, sexp] + type t = { major : int; minor : int; patch : int } + [@@deriving compare, sexp] let to_latest = Fn.id end @@ -30,7 +31,7 @@ let create_exn ~major ~minor ~patch = if major < 0 || minor < 0 || patch < 0 then failwith "Protocol_version.create: major, minor, and patch must be nonnegative" ; - {major; minor; patch} + { major; minor; patch } let create_opt ~major ~minor ~patch = try Some (create_exn ~major ~minor ~patch) with _ -> None @@ -47,17 +48,17 @@ let is_digit_string s = let of_string_exn s = match String.split s ~on:'.' with - | [major; minor; patch] -> + | [ major; minor; patch ] -> if not ( is_digit_string major && is_digit_string minor && is_digit_string patch ) then - failwith - "Protocol_version.of_string_exn: unexpected nondigits in input" ; - { major= Int.of_string major - ; minor= Int.of_string minor - ; patch= Int.of_string patch } + failwith "Protocol_version.of_string_exn: unexpected nondigits in input" ; + { major = Int.of_string major + ; minor = Int.of_string minor + ; patch = Int.of_string patch + } | _ -> failwith "Protocol_version.of_string_exn: expected string of form nn.nn.nn" diff --git a/src/lib/prover/intf.ml b/src/lib/prover/intf.ml index a56ec2eb43a..cc61d70e5cc 100644 --- a/src/lib/prover/intf.ml +++ b/src/lib/prover/intf.ml @@ -23,7 +23,7 @@ module type S = sig -> constraint_constants:Genesis_constants.Constraint_constants.t -> t Deferred.t - val initialized : t -> [`Initialized] Deferred.Or_error.t + val initialized : t -> [ `Initialized ] Deferred.Or_error.t val extend_blockchain : t diff --git a/src/lib/prover/prover.ml b/src/lib/prover/prover.ml index 16f2056e078..6158333ec77 100644 --- a/src/lib/prover/prover.ml +++ b/src/lib/prover/prover.ml @@ -14,24 +14,26 @@ module Extend_blockchain_input = struct module V1 = struct type t = - { chain: Blockchain.Stable.V1.t - ; next_state: Protocol_state.Value.Stable.V1.t - ; block: Snark_transition.Value.Stable.V1.t - ; ledger_proof: Ledger_proof.Stable.V1.t option - ; prover_state: Consensus.Data.Prover_state.Stable.V1.t - ; pending_coinbase: Pending_coinbase_witness.Stable.V1.t } + { chain : Blockchain.Stable.V1.t + ; next_state : Protocol_state.Value.Stable.V1.t + ; block : Snark_transition.Value.Stable.V1.t + ; ledger_proof : Ledger_proof.Stable.V1.t option + ; prover_state : Consensus.Data.Prover_state.Stable.V1.t + ; pending_coinbase : Pending_coinbase_witness.Stable.V1.t + } let to_latest = Fn.id end end] type t = Stable.Latest.t = - { chain: Blockchain.t - ; next_state: Protocol_state.Value.t - ; block: Snark_transition.Value.t - ; ledger_proof: Ledger_proof.t option - ; prover_state: Consensus.Data.Prover_state.t - ; pending_coinbase: Pending_coinbase_witness.t } + { chain : Blockchain.t + ; next_state : Protocol_state.Value.t + ; block : Snark_transition.Value.t + ; ledger_proof : Ledger_proof.t option + ; prover_state : Consensus.Data.Prover_state.t + ; pending_coinbase : Pending_coinbase_witness.t + } [@@deriving sexp] end @@ -54,11 +56,12 @@ module Worker_state = struct (* bin_io required by rpc_parallel *) type init_arg = - { conf_dir: string - ; logger: Logger.Stable.Latest.t - ; proof_level: Genesis_constants.Proof_level.Stable.Latest.t - ; constraint_constants: - Genesis_constants.Constraint_constants.Stable.Latest.t } + { conf_dir : string + ; logger : Logger.Stable.Latest.t + ; proof_level : Genesis_constants.Proof_level.Stable.Latest.t + ; constraint_constants : + Genesis_constants.Constraint_constants.Stable.Latest.t + } [@@deriving bin_io_unversioned] type t = (module S) @@ -66,25 +69,27 @@ module Worker_state = struct let ledger_proof_opt (chain : Blockchain.t) next_state = function | Some t -> Ledger_proof. - ({(statement t) with sok_digest= sok_digest t}, underlying_proof t) + ({ (statement t) with sok_digest = sok_digest t }, underlying_proof t) | None -> let bs = Protocol_state.blockchain_state in let lh x = Blockchain_state.snarked_ledger_hash (bs x) in let tok x = Blockchain_state.snarked_next_available_token (bs x) in let chain_state = Blockchain_snark.Blockchain.state chain in - ( { source= lh chain_state - ; target= lh next_state - ; supply_increase= Currency.Amount.zero - ; fee_excess= Fee_excess.zero - ; sok_digest= Sok_message.Digest.default - ; next_available_token_before= tok chain_state - ; next_available_token_after= tok next_state - ; pending_coinbase_stack_state= - { source= Pending_coinbase.Stack.empty - ; target= Pending_coinbase.Stack.empty } } + ( { source = lh chain_state + ; target = lh next_state + ; supply_increase = Currency.Amount.zero + ; fee_excess = Fee_excess.zero + ; sok_digest = Sok_message.Digest.default + ; next_available_token_before = tok chain_state + ; next_available_token_after = tok next_state + ; pending_coinbase_stack_state = + { source = Pending_coinbase.Stack.empty + ; target = Pending_coinbase.Stack.empty + } + } , Proof.transaction_dummy ) - let create {logger; proof_level; constraint_constants; _} : t Deferred.t = + let create { logger; proof_level; constraint_constants; _ } : t Deferred.t = Deferred.return (let m = match proof_level with @@ -120,26 +125,27 @@ module Worker_state = struct (Consensus.Data.Prover_state.handler ~constraint_constants state_for_handler ~pending_coinbase) - { transition= block - ; prev_state= - Blockchain_snark.Blockchain.state chain } + { transition = block + ; prev_state = + Blockchain_snark.Blockchain.state chain + } [ ( Blockchain_snark.Blockchain.state chain , Blockchain_snark.Blockchain.proof chain ) - ; t ] + ; t + ] next_state in Blockchain_snark.Blockchain.create ~state:next_state - ~proof ) + ~proof) in Or_error.iter_error res ~f:(fun e -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] - "Prover threw an error while extending block: $error" ) ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] + "Prover threw an error while extending block: $error") ; res - let verify state proof = B.Proof.verify [(state, proof)] - end - : S ) + let verify state proof = B.Proof.verify [ (state, proof) ] + end : S ) | Check -> ( module struct module Transaction_snark = Transaction_snark @@ -152,8 +158,9 @@ module Worker_state = struct let res = Blockchain_snark.Blockchain_snark_state.check ~proof_level ~constraint_constants - { transition= block - ; prev_state= Blockchain_snark.Blockchain.state chain } + { transition = block + ; prev_state = Blockchain_snark.Blockchain.state chain + } ~handler: (Consensus.Data.Prover_state.handler state_for_handler ~constraint_constants ~pending_coinbase) @@ -161,17 +168,16 @@ module Worker_state = struct (Protocol_state.hash next_state) |> Or_error.map ~f:(fun () -> Blockchain_snark.Blockchain.create ~state:next_state - ~proof:Mina_base.Proof.blockchain_dummy ) + ~proof:Mina_base.Proof.blockchain_dummy) in Or_error.iter_error res ~f:(fun e -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] - "Prover threw an error while extending block: $error" ) ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] + "Prover threw an error while extending block: $error") ; Async.Deferred.return res let verify _state _proof = Deferred.return true - end - : S ) + end : S ) | None -> ( module struct module Transaction_snark = Transaction_snark @@ -185,8 +191,7 @@ module Worker_state = struct ~state:next_state) let verify _ _ = Deferred.return true - end - : S ) + end : S ) in Memory_stats.log_memory_stats logger ~process:"prover" ; m) @@ -203,26 +208,33 @@ module Functions = struct let create input output f : ('i, 'o) t = (input, output, f) let initialized = - create bin_unit [%bin_type_class: [`Initialized]] (fun w () -> + create bin_unit [%bin_type_class: [ `Initialized ]] (fun w () -> let (module W) = Worker_state.get w in - Deferred.return `Initialized ) + Deferred.return `Initialized) let extend_blockchain = create Extend_blockchain_input.Stable.Latest.bin_t [%bin_type_class: Blockchain.Stable.Latest.t Or_error.t] - (fun w - {chain; next_state; ledger_proof; block; prover_state; pending_coinbase} + (fun + w + { chain + ; next_state + ; ledger_proof + ; block + ; prover_state + ; pending_coinbase + } -> let (module W) = Worker_state.get w in W.extend_blockchain chain next_state block ledger_proof prover_state - pending_coinbase ) + pending_coinbase) let verify_blockchain = create Blockchain.Stable.Latest.bin_t bin_bool (fun w chain -> let (module W) = Worker_state.get w in W.verify (Blockchain_snark.Blockchain.state chain) - (Blockchain_snark.Blockchain.proof chain) ) + (Blockchain_snark.Blockchain.proof chain)) end module Worker = struct @@ -230,10 +242,11 @@ module Worker = struct module F = Rpc_parallel.Function type 'w functions = - { initialized: ('w, unit, [`Initialized]) F.t - ; extend_blockchain: + { initialized : ('w, unit, [ `Initialized ]) F.t + ; extend_blockchain : ('w, Extend_blockchain_input.t, Blockchain.t Or_error.t) F.t - ; verify_blockchain: ('w, Blockchain.t, bool) F.t } + ; verify_blockchain : ('w, Blockchain.t, bool) F.t + } module Worker_state = Worker_state @@ -246,8 +259,8 @@ module Worker = struct module Functions (C : Rpc_parallel.Creator - with type worker_state := Worker_state.t - and type connection_state := Connection_state.t) = + with type worker_state := Worker_state.t + and type connection_state := Connection_state.t) = struct let functions = let f (i, o, f) = @@ -256,12 +269,13 @@ module Worker = struct ~bin_input:i ~bin_output:o () in let open Functions in - { initialized= f initialized - ; extend_blockchain= f extend_blockchain - ; verify_blockchain= f verify_blockchain } + { initialized = f initialized + ; extend_blockchain = f extend_blockchain + ; verify_blockchain = f verify_blockchain + } let init_worker_state - Worker_state.{conf_dir; logger; proof_level; constraint_constants} = + Worker_state.{ conf_dir; logger; proof_level; constraint_constants } = let max_size = 256 * 1024 * 512 in let num_rotate = 1 in Logger.Consumer_registry.register ~id:"default" @@ -271,36 +285,37 @@ module Worker = struct ~log_filename:"mina-prover.log" ~max_size ~num_rotate) ; [%log info] "Prover started" ; Worker_state.create - {conf_dir; logger; proof_level; constraint_constants} + { conf_dir; logger; proof_level; constraint_constants } - let init_connection_state ~connection:_ ~worker_state:_ () = - Deferred.unit + let init_connection_state ~connection:_ ~worker_state:_ () = Deferred.unit end end include Rpc_parallel.Make (T) end -type t = {connection: Worker.Connection.t; process: Process.t; logger: Logger.t} +type t = + { connection : Worker.Connection.t; process : Process.t; logger : Logger.t } let create ~logger ~pids ~conf_dir ~proof_level ~constraint_constants = let on_failure err = [%log error] "Prover process failed with error $err" - ~metadata:[("err", Error_json.error_to_yojson err)] ; + ~metadata:[ ("err", Error_json.error_to_yojson err) ] ; Error.raise err in let%map connection, process = (* HACK: Need to make connection_timeout long since creating a prover can take a long time*) Worker.spawn_in_foreground_exn ~connection_timeout:(Time.Span.of_min 1.) ~on_failure ~shutdown_on:Disconnect ~connection_state_init_arg:() - {conf_dir; logger; proof_level; constraint_constants} + { conf_dir; logger; proof_level; constraint_constants } in [%log info] "Daemon started process of kind $process_kind with pid $prover_pid" ~metadata: [ ("prover_pid", `Int (Process.pid process |> Pid.to_int)) ; ( "process_kind" - , `String Child_processes.Termination.(show_process_kind Prover) ) ] ; + , `String Child_processes.Termination.(show_process_kind Prover) ) + ] ; Child_processes.Termination.register_process pids process Child_processes.Termination.Prover ; don't_wait_for @@ -309,20 +324,20 @@ let create ~logger ~pids ~conf_dir ~proof_level ~constraint_constants = ~f:(fun stdout -> return @@ [%log debug] "Prover stdout: $stdout" - ~metadata:[("stdout", `String stdout)] ) ; + ~metadata:[ ("stdout", `String stdout) ]) ; don't_wait_for @@ Pipe.iter (Process.stderr process |> Reader.pipe) ~f:(fun stderr -> return @@ [%log error] "Prover stderr: $stderr" - ~metadata:[("stderr", `String stderr)] ) ; - {connection; process; logger} + ~metadata:[ ("stderr", `String stderr) ]) ; + { connection; process; logger } -let initialized {connection; _} = +let initialized { connection; _ } = Worker.Connection.run connection ~f:Worker.functions.initialized ~arg:() -let prove_from_input_sexp {connection; logger; _} sexp = +let prove_from_input_sexp { connection; logger; _ } sexp = let input = Extend_blockchain_input.t_of_sexp sexp in match%map Worker.Connection.run connection ~f:Worker.functions.extend_blockchain @@ -334,10 +349,10 @@ let prove_from_input_sexp {connection; logger; _} sexp = true | Error e -> [%log error] "prover errored :(" - ~metadata:[("error", Error_json.error_to_yojson e)] ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] ; false -let extend_blockchain {connection; logger; _} chain next_state block +let extend_blockchain { connection; logger; _ } chain next_state block ledger_proof prover_state pending_coinbase = let input = { Extend_blockchain_input.chain @@ -345,7 +360,8 @@ let extend_blockchain {connection; logger; _} chain next_state block ; block ; ledger_proof ; prover_state - ; pending_coinbase } + ; pending_coinbase + } in match%map Worker.Connection.run connection ~f:Worker.functions.extend_blockchain @@ -358,15 +374,16 @@ let extend_blockchain {connection; logger; _} chain next_state block [%log error] ~metadata: [ ( "input-sexp" - , `String - (Sexp.to_string (Extend_blockchain_input.sexp_of_t input)) ) + , `String (Sexp.to_string (Extend_blockchain_input.sexp_of_t input)) + ) ; ( "input-bin-io" , `String (Base64.encode_exn (Binable.to_string (module Extend_blockchain_input.Stable.Latest) input)) ) - ; ("error", Error_json.error_to_yojson e) ] + ; ("error", Error_json.error_to_yojson e) + ] "Prover failed: $error" ; Error e @@ -412,11 +429,12 @@ let create_genesis_block t (genesis_inputs : Genesis_proof.Inputs.t) = ~genesis_ledger in let pending_coinbase = - { Mina_base.Pending_coinbase_witness.pending_coinbases= + { Mina_base.Pending_coinbase_witness.pending_coinbases = Mina_base.Pending_coinbase.create ~depth:constraint_constants.pending_coinbase_depth () |> Or_error.ok_exn - ; is_new_stack= true } + ; is_new_stack = true + } in let prover_state : Consensus_mechanism.Data.Prover_state.t = Consensus.Data.Prover_state.genesis_data ~genesis_epoch_ledger diff --git a/src/lib/quickcheck_lib/quickcheck_lib.ml b/src/lib/quickcheck_lib/quickcheck_lib.ml index 61bfb93f18e..8674cea008f 100644 --- a/src/lib/quickcheck_lib/quickcheck_lib.ml +++ b/src/lib/quickcheck_lib/quickcheck_lib.ml @@ -1,14 +1,12 @@ (* quickcheck_lib.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Quickcheck.Generator open Quickcheck.Let_syntax -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] module Currency = Currency_nonconsensus.Currency @@ -72,7 +70,7 @@ let gen_symm_dirichlet : int -> float list Quickcheck.Generator.t = (* technically this should be (0, 1] and not (0, 1) but I expect it doesn't matter for our purposes. *) let%map uniform = Float.gen_uniform_excl 0. 1. in - Float.log uniform ) + Float.log uniform) in let sum = List.fold gammas ~init:0. ~f:(fun x y -> x +. y) in List.map gammas ~f:(fun gamma -> gamma /. sum) @@ -125,8 +123,8 @@ let gen_division_generic (type t) (module M : Int_s with type t = t) (n : t) impossible. " | head :: rest -> (* Going through floating point land may have caused some rounding error. We - tack it onto the first result so that the sum of the output is equal to n. - *) + tack it onto the first result so that the sum of the output is equal to n. + *) if M.( > ) n total then M.(head + (n - total)) :: rest else M.(head - (total - n)) :: rest ) @@ -161,14 +159,14 @@ let gen_imperative_rose_tree ?(p = 0.75) (root_gen : 'a t) in let%map forks = map_gens positive_fork_sizes ~f:(fun s -> - tuple2 node_gen (with_size ~size:(s - 1) self) ) + tuple2 node_gen (with_size ~size:(s - 1) self)) in fun parent -> Rose_tree.T - (parent, List.map forks ~f:(fun (this, f) -> f (this parent))) ) + (parent, List.map forks ~f:(fun (this, f) -> f (this parent)))) -let gen_imperative_ktree ?(p = 0.75) (root_gen : 'a t) - (node_gen : ('a -> 'a) t) = +let gen_imperative_ktree ?(p = 0.75) (root_gen : 'a t) (node_gen : ('a -> 'a) t) + = let%bind root = root_gen in imperative_fixed_point root ~f:(fun self -> match%bind size with @@ -177,7 +175,7 @@ let gen_imperative_ktree ?(p = 0.75) (root_gen : 'a t) (* this case is optional but more effecient *) | 1 -> let%map this = node_gen in - fun parent -> [this parent] + fun parent -> [ this parent ] | n -> let%bind this = node_gen in let%bind fork_count = geometric ~p 1 >>| Int.max n in @@ -187,7 +185,7 @@ let gen_imperative_ktree ?(p = 0.75) (root_gen : 'a t) in fun parent -> let x = this parent in - x :: List.bind forks ~f:(fun f -> f x) ) + x :: List.bind forks ~f:(fun f -> f x)) let gen_imperative_list (root_gen : 'a t) (node_gen : ('a -> 'a) t) = let%bind root = root_gen in @@ -198,7 +196,7 @@ let gen_imperative_list (root_gen : 'a t) (node_gen : ('a -> 'a) t) = | n -> let%bind this = node_gen in let%map f = with_size ~size:(n - 1) self in - fun parent -> parent :: f (this parent) ) + fun parent -> parent :: f (this parent)) let%test_module "Quickcheck lib tests" = ( module struct @@ -224,7 +222,7 @@ let%test_module "Quickcheck lib tests" = else Or_error.errorf !"elements do not add up correctly %d %d" - elem next_elem ) + elem next_elem) in - assert (Result.is_ok result) ) + assert (Result.is_ok result)) end ) diff --git a/src/lib/random_oracle/random_oracle.ml b/src/lib/random_oracle/random_oracle.ml index 004b07a7c3e..740a94ebdaa 100644 --- a/src/lib/random_oracle/random_oracle.ml +++ b/src/lib/random_oracle/random_oracle.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Pickles.Impls.Step.Internal_Basic @@ -25,8 +23,7 @@ module Input = Random_oracle_input let params : Field.t Sponge.Params.t = Sponge.Params.(map pasta_p ~f:Field.of_string) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module Inputs = Pickles.Tick_field_sponge.Inputs @@ -86,8 +83,7 @@ let update ~state = update ~state params let hash ?init = hash ?init params -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module Checked = struct module Inputs = Pickles.Step_main_inputs.Sponge.Permutation @@ -111,7 +107,7 @@ module Checked = struct let hash ?init xs = O1trace.measure "Random_oracle.hash" (fun () -> - hash ?init:(Option.map init ~f:(State.map ~f:constant)) params xs ) + hash ?init:(Option.map init ~f:(State.map ~f:constant)) params xs) let pack_input = Input.pack_to_fields ~size_in_bits:Field.size_in_bits ~pack:Field.Var.pack @@ -129,21 +125,20 @@ let prefix_to_field (s : string) = assert (bits_per_character * String.length s < Field.size_in_bits) ; Field.project Fold_lib.Fold.(to_list (string_bits (s :> string))) -let salt (s : string) = update ~state:initial_state [|prefix_to_field s|] +let salt (s : string) = update ~state:initial_state [| prefix_to_field s |] let%test_unit "iterativeness" = let x1 = Field.random () in let x2 = Field.random () in let x3 = Field.random () in let x4 = Field.random () in - let s_full = update ~state:initial_state [|x1; x2; x3; x4|] in + let s_full = update ~state:initial_state [| x1; x2; x3; x4 |] in let s_it = - update ~state:(update ~state:initial_state [|x1; x2|]) [|x3; x4|] + update ~state:(update ~state:initial_state [| x1; x2 |]) [| x3; x4 |] in [%test_eq: Field.t array] s_full s_it -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let%test_unit "sponge checked-unchecked" = let open Pickles.Impls.Step in @@ -153,8 +148,8 @@ let%test_unit "sponge checked-unchecked" = T.Test.test_equal ~equal:T.Field.equal ~sexp_of_t:T.Field.sexp_of_t T.Typ.(field * field) T.Typ.field - (fun (x, y) -> make_checked (fun () -> Checked.hash [|x; y|])) - (fun (x, y) -> hash [|x; y|]) + (fun (x, y) -> make_checked (fun () -> Checked.hash [| x; y |])) + (fun (x, y) -> hash [| x; y |]) (x, y) [%%endif] diff --git a/src/lib/random_oracle/random_oracle.mli b/src/lib/random_oracle/random_oracle.mli index 4c4dc54dc77..a7d9dd0597b 100644 --- a/src/lib/random_oracle/random_oracle.mli +++ b/src/lib/random_oracle/random_oracle.mli @@ -22,10 +22,10 @@ end include Intf.S - with type field := Field.t - and type field_constant := Field.t - and type bool := bool - and module State := State + with type field := Field.t + and type field_constant := Field.t + and type bool := bool + and module State := State val salt : string -> Field.t State.t @@ -33,9 +33,9 @@ val salt : string -> Field.t State.t module Checked : Intf.S - with type field := Field.Var.t - and type field_constant := Field.t - and type bool := Boolean.var - and module State := State + with type field := Field.Var.t + and type field_constant := Field.t + and type bool := Boolean.var + and module State := State [%%endif] diff --git a/src/lib/random_oracle_input/random_oracle_input.ml b/src/lib/random_oracle_input/random_oracle_input.ml index ad250d10cab..15776abddeb 100644 --- a/src/lib/random_oracle_input/random_oracle_input.ml +++ b/src/lib/random_oracle_input/random_oracle_input.ml @@ -1,22 +1,23 @@ open Core_kernel type ('field, 'bool) t = - {field_elements: 'field array; bitstrings: 'bool list array} + { field_elements : 'field array; bitstrings : 'bool list array } [@@deriving sexp, compare] let append t1 t2 = - { field_elements= Array.append t1.field_elements t2.field_elements - ; bitstrings= Array.append t1.bitstrings t2.bitstrings } + { field_elements = Array.append t1.field_elements t2.field_elements + ; bitstrings = Array.append t1.bitstrings t2.bitstrings + } -let field_elements x = {field_elements= x; bitstrings= [||]} +let field_elements x = { field_elements = x; bitstrings = [||] } -let field x = {field_elements= [|x|]; bitstrings= [||]} +let field x = { field_elements = [| x |]; bitstrings = [||] } -let bitstring x = {field_elements= [||]; bitstrings= [|x|]} +let bitstring x = { field_elements = [||]; bitstrings = [| x |] } -let bitstrings x = {field_elements= [||]; bitstrings= x} +let bitstrings x = { field_elements = [||]; bitstrings = x } -let pack_bits ~max_size ~pack {field_elements= _; bitstrings} = +let pack_bits ~max_size ~pack { field_elements = _; bitstrings } = let rec pack_full_fields rev_fields bits length = if length >= max_size then let field_bits, bits = List.split_n bits max_size in @@ -28,17 +29,17 @@ let pack_bits ~max_size ~pack {field_elements= _; bitstrings} = let n = n + List.length bitstring in let bits = bits @ bitstring in let acc, bits, n = pack_full_fields acc bits n in - (acc, bits, n) ) + (acc, bits, n)) in if remaining_length = 0 then packed_field_elements else pack remaining_bits :: packed_field_elements -let pack_to_fields ~size_in_bits ~pack {field_elements; bitstrings} = +let pack_to_fields ~size_in_bits ~pack { field_elements; bitstrings } = let max_size = size_in_bits - 1 in - let packed_bits = pack_bits ~max_size ~pack {field_elements; bitstrings} in + let packed_bits = pack_bits ~max_size ~pack { field_elements; bitstrings } in Array.append field_elements (Array.of_list_rev packed_bits) -let to_bits ~unpack {field_elements; bitstrings} = +let to_bits ~unpack { field_elements; bitstrings } = let field_bits = Array.map ~f:unpack field_elements in List.concat @@ Array.to_list @@ Array.append field_bits bitstrings @@ -53,14 +54,15 @@ module Coding = struct [ of_int_exn @@ ((x lsr 24) land 0xff) ; of_int_exn @@ ((x lsr 16) land 0xff) ; of_int_exn @@ ((x lsr 8) land 0xff) - ; of_int_exn @@ (x land 0xff) ] + ; of_int_exn @@ (x land 0xff) + ] in let len1 = len_to_string @@ Array.length t.field_elements in let fields = (* We only support 32byte fields *) let () = match t.field_elements with - | [|x; _|] -> + | [| x; _ |] -> assert (String.length (string_of_field x) = 32) | _ -> () @@ -84,7 +86,7 @@ module Coding = struct in let combined = bs @ pad in assert (List.length combined = 8) ; - go 0 0 combined ) + go 0 0 combined) |> List.map ~f:Char.of_int_exn |> List.rev |> String.of_char_list in @@ -115,7 +117,7 @@ module Coding = struct let run p cs = p cs |> M.bind ~f:(fun (a, cs') -> - match cs' with [] -> M.return a | _ -> M.fail `Expected_eof ) + match cs' with [] -> M.return a | _ -> M.fail `Expected_eof) let fail why _ = M.fail why @@ -149,19 +151,15 @@ module Coding = struct let many p = (fun cs -> let rec go xs acc = - match p xs with - | Ok (a, xs) -> - go xs (a :: acc) - | Error _ -> - (acc, xs) + match p xs with Ok (a, xs) -> go xs (a :: acc) | Error _ -> (acc, xs) in - M.return @@ go cs [] ) + M.return @@ go cs []) |> map ~f:List.rev let%test_unit "many" = - [%test_eq: (char list, [`Expected_eof]) Result.t] - (run (many u8) ['a'; 'b'; 'c']) - (Result.return ['a'; 'b'; 'c']) + [%test_eq: (char list, [ `Expected_eof ]) Result.t] + (run (many u8) [ 'a'; 'b'; 'c' ]) + (Result.return [ 'a'; 'b'; 'c' ]) (** p exactly n times *) let exactly n p = @@ -174,14 +172,14 @@ module Coding = struct let%bind a, xs = p xs in go xs (a :: acc) (i - 1) in - go cs [] n ) + go cs [] n) |> map ~f:List.rev let%test_unit "exactly" = [%test_eq: - (char list * char list, [`Expected_eof | `Unexpected_eof]) Result.t] - ((exactly 3 u8) ['a'; 'b'; 'c'; 'd']) - (Result.return (['a'; 'b'; 'c'], ['d'])) + (char list * char list, [ `Expected_eof | `Unexpected_eof ]) Result.t] + ((exactly 3 u8) [ 'a'; 'b'; 'c'; 'd' ]) + (Result.return ([ 'a'; 'b'; 'c' ], [ 'd' ])) let return_res r cs = r |> Result.map ~f:(fun x -> (x, cs)) end @@ -205,7 +203,8 @@ module Coding = struct ; (b land (0x1 lsl 3)) lsr 3 ; (b land (0x1 lsl 2)) lsr 2 ; (b land (0x1 lsl 1)) lsr 1 - ; b land 0x1 ] + ; b land 0x1 + ] |> List.map ~f (** Deserialize bytes into a random oracle input with 32byte fields according to the RFC0038 specification *) @@ -224,7 +223,7 @@ module Coding = struct let%map bytes = Parser.(many u8) in let bits = List.concat_map ~f:(bits_of_byte ~of_bool) bytes in let bitstring = List.take bits len2 in - {field_elements= Array.of_list fields; bitstrings= [|bitstring|]} + { field_elements = Array.of_list fields; bitstrings = [| bitstring |] } in Parser.run parser s @@ -241,7 +240,7 @@ module Coding = struct let pad = List.init (8 - List.length xs) ~f:(Fn.const false) in let combined = xs @ pad in assert (List.length combined = 8) ; - go 0 0 combined ) + go 0 0 combined) |> List.map ~f:Char.of_int_exn |> String.of_char_list @@ -267,8 +266,9 @@ let%test_module "random_oracle input" = in let%map bitstrings = list (list bool) in ( size_in_bits - , { field_elements= Array.of_list field_elements - ; bitstrings= Array.of_list bitstrings } ) + , { field_elements = Array.of_list field_elements + ; bitstrings = Array.of_list bitstrings + } ) let%test_unit "field/string partial isomorphism bitstrings" = Quickcheck.test ~trials:300 @@ -279,7 +279,7 @@ let%test_module "random_oracle input" = Coding.field_of_string serialized ~size_in_bits:255 in [%test_eq: (bool list, unit) Result.t] (input |> Result.return) - deserialized ) + deserialized) let%test_unit "serialize/deserialize partial isomorphism 32byte fields" = let size_in_bits = 255 in @@ -298,21 +298,22 @@ let%test_module "random_oracle input" = in let normalized t = { t with - bitstrings= + bitstrings = ( t.bitstrings |> Array.to_list |> List.concat - |> fun xs -> [|xs|] ) } + |> fun xs -> [| xs |] ) + } in assert ( Array.for_all input.field_elements ~f:(fun el -> - List.length el = size_in_bits ) ) ; + List.length el = size_in_bits) ) ; Result.iter deserialized ~f:(fun x -> assert ( Array.for_all x.field_elements ~f:(fun el -> - List.length el = size_in_bits ) ) ) ; + List.length el = size_in_bits) )) ; [%test_eq: - ((bool list, bool) t, [`Expected_eof | `Unexpected_eof]) Result.t] + ((bool list, bool) t, [ `Expected_eof | `Unexpected_eof ]) Result.t] (normalized input |> Result.return) - (deserialized |> Result.map ~f:normalized) ) + (deserialized |> Result.map ~f:normalized)) let%test_unit "data is preserved by to_bits" = Quickcheck.test ~trials:300 (gen_input ()) @@ -324,10 +325,10 @@ let%test_module "random_oracle input" = Array.fold ~init:bits input.field_elements ~f:(fun bits field -> (* The next chunk of [size_in_bits] bits is for the field element. - *) + *) let field_bits, rest = List.split_n bits size_in_bits in assert (bools_equal field_bits field) ; - rest ) + rest) in (* Bits come after. *) let remaining_bits = @@ -338,10 +339,10 @@ let%test_module "random_oracle input" = List.split_n bits (List.length bitstring) in assert (bools_equal bitstring_bits bitstring) ; - rest ) + rest) in (* All bits should have been consumed. *) - assert (List.is_empty remaining_bits) ) + assert (List.is_empty remaining_bits)) let%test_unit "data is preserved by pack_to_fields" = Quickcheck.test ~trials:300 (gen_input ()) @@ -354,13 +355,13 @@ let%test_module "random_oracle input" = ~f:(fun fields input_field -> (* The next field element should be the literal field element passed in. - *) + *) match fields with | [] -> failwith "Too few field elements" | field :: rest -> assert ([%equal: bool list] field input_field) ; - rest ) + rest) in (* Check that the remaining fields have the correct size. *) let final_field_idx = List.length bitstring_fields - 1 in @@ -370,14 +371,14 @@ let%test_module "random_oracle input" = fewer bits than the maximum field element to ensure that it doesn't overflow, so we expect [size_in_bits - 1] bits for maximum safe density. - *) + *) assert (List.length field_bits = size_in_bits - 1) else ( (* This field will be comprised of the remaining bits, up to a maximum of [size_in_bits - 1]. It should not be empty. - *) + *) assert (not (List.is_empty field_bits)) ; - assert (List.length field_bits < size_in_bits) ) ) ; + assert (List.length field_bits < size_in_bits) )) ; let rec go input_bitstrings packed_fields = match (input_bitstrings, packed_fields) with | [], [] -> @@ -409,6 +410,6 @@ let%test_module "random_oracle input" = in (* Check that the bits match between the input bitstring and the remaining fields. - *) - go (Array.to_list input.bitstrings) bitstring_fields ) + *) + go (Array.to_list input.bitstrings) bitstring_fields) end ) diff --git a/src/lib/rc_pool/rc_pool.ml b/src/lib/rc_pool/rc_pool.ml index 35f68466e41..18e30bb13fa 100644 --- a/src/lib/rc_pool/rc_pool.ml +++ b/src/lib/rc_pool/rc_pool.ml @@ -40,7 +40,7 @@ module Make (Key : Hashable.S) (Data : Data_intf with type key := Key.t) : | None -> Some (Data.copy data, 1) | Some (d, n) -> - Some (d, n) ) + Some (d, n)) let free t key = Key.Table.change t key ~f:(function @@ -49,7 +49,7 @@ module Make (Key : Hashable.S) (Data : Data_intf with type key := Key.t) : | Some (_, 1) -> None | Some (d, n) -> - Some (d, n - 1) ) + Some (d, n - 1)) let find t key = Key.Table.find t key |> Option.map ~f:fst end diff --git a/src/lib/rocksdb/database.ml b/src/lib/rocksdb/database.ml index aa5f707df0c..fc139006e00 100644 --- a/src/lib/rocksdb/database.ml +++ b/src/lib/rocksdb/database.ml @@ -5,7 +5,8 @@ open Core (* Uuid.t deprecates sexp functions; use Uuid.Stable.V1 *) module T = struct - type t = {uuid: Uuid.Stable.V1.t; db: (Rocks.t [@sexp.opaque])} [@@deriving sexp] + type t = { uuid : Uuid.Stable.V1.t; db : (Rocks.t[@sexp.opaque]) } + [@@deriving sexp] end include T @@ -13,7 +14,7 @@ include T let create directory = let opts = Rocks.Options.create () in Rocks.Options.set_create_if_missing opts true ; - {uuid= Uuid_unix.create (); db= Rocks.open_db ~opts directory} + { uuid = Uuid_unix.create (); db = Rocks.open_db ~opts directory } let create_checkpoint t dir = Rocks.checkpoint_create t.db ~dir ?log_size_for_flush:None ; @@ -35,7 +36,7 @@ let set_batch t ?(remove_keys = []) let batch = Rocks.WriteBatch.create () in (* write to batch *) List.iter key_data_pairs ~f:(fun (key, data) -> - Rocks.WriteBatch.put batch key data ) ; + Rocks.WriteBatch.put batch key data) ; (* Delete any key pairs *) List.iter remove_keys ~f:(fun key -> Rocks.WriteBatch.delete batch key) ; (* commit batch *) @@ -106,7 +107,7 @@ let%test_unit "to_alist (of_alist l) = l" = [%test_result: (Bigstring.t * Bigstring.t) list] ~expect:sorted alist ; close db ; - Async.Deferred.unit ) ) + Async.Deferred.unit)) let%test_unit "checkpoint read" = let open Async in @@ -128,17 +129,15 @@ let%test_unit "checkpoint read" = in let db = create db_dir in Hashtbl.iteri db_hashtbl ~f:(fun ~key ~data -> - set db ~key:(to_bigstring key) ~data:(to_bigstring data) ) ; + set db ~key:(to_bigstring key) ~data:(to_bigstring data)) ; let cp = create_checkpoint db cp_dir in match ( Hashtbl.add db_hashtbl ~key:"db_key" ~data:"db_data" , Hashtbl.add cp_hashtbl ~key:"cp_key" ~data:"cp_data" ) with | `Ok, `Ok -> - set db ~key:(to_bigstring "db_key") - ~data:(to_bigstring "db_data") ; - set cp ~key:(to_bigstring "cp_key") - ~data:(to_bigstring "cp_data") ; + set db ~key:(to_bigstring "db_key") ~data:(to_bigstring "db_data") ; + set cp ~key:(to_bigstring "cp_key") ~data:(to_bigstring "cp_data") ; let db_sorted = List.sort (Hashtbl.to_alist db_hashtbl) @@ -159,12 +158,12 @@ let%test_unit "checkpoint read" = List.sort (to_alist cp) ~compare:[%compare: Bigstring.t * Bigstring.t] in - [%test_result: (Bigstring.t * Bigstring.t) list] - ~expect:db_sorted db_alist ; - [%test_result: (Bigstring.t * Bigstring.t) list] - ~expect:cp_sorted cp_alist ; + [%test_result: (Bigstring.t * Bigstring.t) list] ~expect:db_sorted + db_alist ; + [%test_result: (Bigstring.t * Bigstring.t) list] ~expect:cp_sorted + cp_alist ; close db ; close cp ; Deferred.unit | _ -> - Deferred.unit ) ) + Deferred.unit )) diff --git a/src/lib/rocksdb/serializable.ml b/src/lib/rocksdb/serializable.ml index ab5c9584a84..c24fa5db8cc 100644 --- a/src/lib/rocksdb/serializable.ml +++ b/src/lib/rocksdb/serializable.ml @@ -2,10 +2,10 @@ open Core_kernel module Make (Key : Binable.S) (Value : Binable.S) : Key_value_database.Intf.S - with module M := Key_value_database.Monad.Ident - and type key := Key.t - and type value := Value.t - and type config := string = struct + with module M := Key_value_database.Monad.Ident + and type key := Key.t + and type value := Value.t + and type config := string = struct type t = Database.t let create directory = Database.create directory @@ -28,7 +28,7 @@ module Make (Key : Binable.S) (Value : Binable.S) : let key_data_pairs = List.map update_pairs ~f:(fun (key, data) -> ( Binable.to_bigstring (module Key) key - , Binable.to_bigstring (module Value) data ) ) + , Binable.to_bigstring (module Value) data )) in let remove_keys = List.map remove_keys ~f:(Binable.to_bigstring (module Key)) @@ -41,7 +41,7 @@ module Make (Key : Binable.S) (Value : Binable.S) : let to_alist t = List.map (Database.to_alist t) ~f:(fun (key, value) -> ( Binable.of_bigstring (module Key) key - , Binable.of_bigstring (module Value) value ) ) + , Binable.of_bigstring (module Value) value )) end (** Database Interface for storing heterogeneous key-value pairs. Similar to @@ -114,8 +114,7 @@ module GADT = struct let set t ~(key : 'a Key.t) ~(data : 'a) : unit = set_raw t ~key ~data:(bin_data_dump key data) - let remove t ~(key : 'a Key.t) = - Database.remove t ~key:(bin_key_dump key) + let remove t ~(key : 'a Key.t) = Database.remove t ~key:(bin_key_dump key) end let create directory = Database.create directory diff --git a/src/lib/rose_tree/rose_tree.ml b/src/lib/rose_tree/rose_tree.ml index 925181af8bf..c545f06012b 100644 --- a/src/lib/rose_tree/rose_tree.ml +++ b/src/lib/rose_tree/rose_tree.ml @@ -2,27 +2,24 @@ open Core_kernel type 'a t = T of 'a * 'a t list -type 'a display = {value: 'a; children: 'a display list} [@@deriving yojson] +type 'a display = { value : 'a; children : 'a display list } [@@deriving yojson] let rec to_display (T (value, children)) = - {value; children= List.map ~f:to_display children} + { value; children = List.map ~f:to_display children } -let rec of_display {value; children} = +let rec of_display { value; children } = T (value, List.map ~f:of_display children) let to_yojson conv t = display_to_yojson conv (to_display t) -let of_yojson conv json = - Result.map ~f:of_display (display_of_yojson conv json) +let of_yojson conv json = Result.map ~f:of_display (display_of_yojson conv json) let root (T (value, _)) = value let children (T (_, children)) = children let rec print ?(whitespace = 0) ~element_to_string (T (root, branches)) = - Printf.printf "%s- %s\n" - (String.make whitespace ' ') - (element_to_string root) ; + Printf.printf "%s- %s\n" (String.make whitespace ' ') (element_to_string root) ; List.iter branches ~f:(print ~whitespace:(whitespace + 2) ~element_to_string) let rec of_list_exn ?(subtrees = []) = function @@ -30,16 +27,16 @@ let rec of_list_exn ?(subtrees = []) = function raise (Invalid_argument "Rose_tree.of_list_exn: cannot construct rose tree from empty list") - | [h] -> + | [ h ] -> T (h, subtrees) | h :: t -> - T (h, [of_list_exn t ~subtrees]) + T (h, [ of_list_exn t ~subtrees ]) let of_non_empty_list ?(subtrees = []) = Fn.compose (Non_empty_list.fold ~init:(fun x -> T (x, subtrees)) - ~f:(fun acc x -> T (x, [acc]))) + ~f:(fun acc x -> T (x, [ acc ]))) Non_empty_list.rev let rec equal ~f (T (value1, children1)) (T (value2, children2)) = @@ -154,14 +151,14 @@ module Or_error = Make_ops (struct let iter ls ~f = List.fold_left ls ~init:(return ()) ~f:(fun or_error x -> let%bind () = or_error in - f x ) + f x) let map ls ~f = let%map ls' = List.fold_left ls ~init:(return []) ~f:(fun or_error x -> let%bind t = or_error in let%map x' = f x in - x' :: t ) + x' :: t) in List.rev ls' end diff --git a/src/lib/rosetta_coding/coding.ml b/src/lib/rosetta_coding/coding.ml index 8b7546a2040..b1322f22223 100644 --- a/src/lib/rosetta_coding/coding.ml +++ b/src/lib/rosetta_coding/coding.ml @@ -1,12 +1,10 @@ (* coding.ml -- hex encoding/decoding for Rosetta *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module Field = Snark_params.Tick.Field module Scalar = Snark_params.Tick.Inner_curve.Scalar @@ -24,37 +22,37 @@ open Signature_lib_nonconsensus let hex_char_to_bits4 = function | '0' -> - [false; false; false; false] + [ false; false; false; false ] | '1' -> - [false; false; false; true] + [ false; false; false; true ] | '2' -> - [false; false; true; false] + [ false; false; true; false ] | '3' -> - [false; false; true; true] + [ false; false; true; true ] | '4' -> - [false; true; false; false] + [ false; true; false; false ] | '5' -> - [false; true; false; true] + [ false; true; false; true ] | '6' -> - [false; true; true; false] + [ false; true; true; false ] | '7' -> - [false; true; true; true] + [ false; true; true; true ] | '8' -> - [true; false; false; false] + [ true; false; false; false ] | '9' -> - [true; false; false; true] + [ true; false; false; true ] | 'A' | 'a' -> - [true; false; true; false] + [ true; false; true; false ] | 'B' | 'b' -> - [true; false; true; true] + [ true; false; true; true ] | 'C' | 'c' -> - [true; true; false; false] + [ true; true; false; false ] | 'D' | 'd' -> - [true; true; false; true] + [ true; true; false; true ] | 'E' | 'e' -> - [true; true; true; false] + [ true; true; true; false ] | 'F' | 'f' -> - [true; true; true; true] + [ true; true; true; true ] | _ -> failwith "Expected hex character" @@ -157,19 +155,19 @@ let%test "field_hex round-trip" = field_hex_roundtrip_test () let%test "public key round-trip" = pk_roundtrip_test () -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] (* for running tests from JS *) let unit_tests = [ ("field example", field_example_test) ; ("field-hex round-trip", field_hex_roundtrip_test) - ; ("public key round-trip", pk_roundtrip_test) ] + ; ("public key round-trip", pk_roundtrip_test) + ] let run_unit_tests () = List.iter unit_tests ~f:(fun (name, test) -> printf "Running %s test\n%!" name ; - assert (test ()) ) + assert (test ())) [%%endif] diff --git a/src/lib/rosetta_lib/amount_of.ml b/src/lib/rosetta_lib/amount_of.ml index 62063ba58d0..b563e387ec9 100644 --- a/src/lib/rosetta_lib/amount_of.ml +++ b/src/lib/rosetta_lib/amount_of.ml @@ -7,12 +7,12 @@ module Token_id = struct let is_default token_id = Unsigned.UInt64.equal default token_id let encode token_id = - `Assoc [("token_id", `String (Unsigned.UInt64.to_string token_id))] + `Assoc [ ("token_id", `String (Unsigned.UInt64.to_string token_id)) ] module T (M : Monad_fail.S) = struct let decode metadata = match metadata with - | Some (`Assoc [("token_id", `String token_id)]) + | Some (`Assoc [ ("token_id", `String token_id) ]) when try let _ = Unsigned.UInt64.of_string token_id in true @@ -34,20 +34,23 @@ module Token_id = struct end let negated (t : Amount.t) = - {t with value= (Int64.to_string @@ Int64.(neg @@ of_string t.value))} + { t with value = (Int64.to_string @@ Int64.(neg @@ of_string t.value)) } let coda total = - { Amount.value= Unsigned.UInt64.to_string total - ; currency= {Currency.symbol= "CODA"; decimals= 9l; metadata= None} - ; metadata= None } + { Amount.value = Unsigned.UInt64.to_string total + ; currency = { Currency.symbol = "CODA"; decimals = 9l; metadata = None } + ; metadata = None + } let token token_id total = (* TODO: Should we depend on mina_base so we can refer to Token_id.default instead? *) if Unsigned.UInt64.equal token_id (Unsigned.UInt64.of_int 1) then coda total else - { Amount.value= Unsigned.UInt64.to_string total - ; currency= - { Currency.symbol= "CODA+" - ; decimals= 9l - ; metadata= Some (Token_id.encode token_id) } - ; metadata= None } + { Amount.value = Unsigned.UInt64.to_string total + ; currency = + { Currency.symbol = "CODA+" + ; decimals = 9l + ; metadata = Some (Token_id.encode token_id) + } + ; metadata = None + } diff --git a/src/lib/rosetta_lib/errors.ml b/src/lib/rosetta_lib/errors.ml index 503db62ea18..b86baf316b1 100644 --- a/src/lib/rosetta_lib/errors.ml +++ b/src/lib/rosetta_lib/errors.ml @@ -56,14 +56,14 @@ module T : sig val sql : ?context:string - -> ('a, [< Caqti_error.t]) Deferred.Result.t + -> ('a, [< Caqti_error.t ]) Deferred.Result.t -> ('a, t) Deferred.Result.t val wrap : - ('a, t) Deferred.Result.t -> ('a, [> `App of t]) Deferred.Result.t + ('a, t) Deferred.Result.t -> ('a, [> `App of t ]) Deferred.Result.t end end = struct - type t = {extra_context: string option; kind: Variant.t} + type t = { extra_context : string option; kind : Variant.t } [@@deriving yojson, show, equal] let code = Fn.compose (fun x -> x + 1) Variant.to_enum @@ -124,9 +124,9 @@ end = struct | `Chain_info_missing -> Some "Could not get chain information. This probably means you are \ - bootstrapping -- bootstrapping is the process of synchronizing \ - with peers that are way ahead of you on the chain. Try again in a \ - few seconds." + bootstrapping -- bootstrapping is the process of synchronizing with \ + peers that are way ahead of you on the chain. Try again in a few \ + seconds." | `Account_not_found addr -> Some (sprintf @@ -145,8 +145,8 @@ end = struct hash) | `Block_missing -> Some - "We couldn't find the block you specified in the archive node. Ask \ - a friend for the missing data." + "We couldn't find the block you specified in the archive node. Ask a \ + friend for the missing data." | `Malformed_public_key -> None | `Operations_not_valid reasons -> @@ -257,27 +257,31 @@ end = struct "We encountered an internal exception while processing your request. \ (That means you found a bug!)" - let create ?context kind = {extra_context= context; kind} + let create ?context kind = { extra_context = context; kind } let erase (t : t) = - { Rosetta_models.Error.code= Int32.of_int_exn (code t.kind) - ; message= message t.kind - ; retriable= retriable t.kind - ; details= + { Rosetta_models.Error.code = Int32.of_int_exn (code t.kind) + ; message = message t.kind + ; retriable = retriable t.kind + ; details = ( match (context t.kind, t.extra_context) with | None, None -> - Some (`Assoc [("body", Variant.to_yojson t.kind)]) + Some (`Assoc [ ("body", Variant.to_yojson t.kind) ]) | None, Some context | Some context, None -> Some (`Assoc - [("body", Variant.to_yojson t.kind); ("error", `String context)]) + [ ("body", Variant.to_yojson t.kind) + ; ("error", `String context) + ]) | Some context1, Some context2 -> Some (`Assoc [ ("body", Variant.to_yojson t.kind) ; ("error", `String context1) - ; ("extra", `String context2) ]) ) - ; description= Some (description t.kind) } + ; ("extra", `String context2) + ]) ) + ; description = Some (description t.kind) + } (* The most recent rosetta-cli denies errors that have details in them. When * future versions of the spec allow for more detailed descriptions we can @@ -295,9 +299,11 @@ end = struct Variant.to_representatives |> Lazy.map ~f:(fun vs -> List.map vs ~f:(Fn.compose erase create)) |> Lazy.map ~f:(fun es -> - List.map es ~f:(fun e -> {e with Rosetta_models.Error.details= None}) - |> uniq ~eq:(fun {Rosetta_models.Error.code; _} {code= code2; _} -> - Int32.equal code code2 ) ) + List.map es ~f:(fun e -> + { e with Rosetta_models.Error.details = None }) + |> uniq + ~eq:(fun { Rosetta_models.Error.code; _ } { code = code2; _ } -> + Int32.equal code code2)) module Lift = struct let parse ?context res = diff --git a/src/lib/rosetta_lib/operation_statuses.ml b/src/lib/rosetta_lib/operation_statuses.ml index 9ec907f9109..0315f199e08 100644 --- a/src/lib/rosetta_lib/operation_statuses.ml +++ b/src/lib/rosetta_lib/operation_statuses.ml @@ -1,7 +1,7 @@ open Core_kernel open Rosetta_models -type t = [`Success | `Pending | `Failed] [@@deriving to_representatives] +type t = [ `Success | `Pending | `Failed ] [@@deriving to_representatives] let name = function | `Success -> @@ -19,6 +19,7 @@ let successful = function | `Failed -> false -let operation t = {Operation_status.status= name t; successful= successful t} +let operation t = + { Operation_status.status = name t; successful = successful t } let all = to_representatives |> Lazy.map ~f:(List.map ~f:operation) diff --git a/src/lib/rosetta_lib/test/test_encodings.ml b/src/lib/rosetta_lib/test/test_encodings.ml index ed54e460ab4..5e18864d42e 100644 --- a/src/lib/rosetta_lib/test/test_encodings.ml +++ b/src/lib/rosetta_lib/test/test_encodings.ml @@ -1,12 +1,10 @@ (* test_encodings.ml -- print out Rosetta encodings *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Signature_lib open Rosetta_coding diff --git a/src/lib/rosetta_lib/transaction.ml b/src/lib/rosetta_lib/transaction.ml index e2afc401caa..59c917639e3 100644 --- a/src/lib/rosetta_lib/transaction.ml +++ b/src/lib/rosetta_lib/transaction.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module Field = Snark_params.Tick.Field @@ -22,23 +20,25 @@ module Token_id = Mina_base.Token_id module Unsigned = struct type t = - { random_oracle_input: (Field.t, bool) Random_oracle_input.t - ; command: User_command_info.Partial.t - ; nonce: Unsigned_extended.UInt32.t } + { random_oracle_input : (Field.t, bool) Random_oracle_input.t + ; command : User_command_info.Partial.t + ; nonce : Unsigned_extended.UInt32.t + } module Rendered = struct type public_key = string [@@deriving yojson] module Payment = struct type t = - { to_: public_key [@key "to"] - ; from: public_key - ; fee: Unsigned_extended.UInt64.t - ; token: Unsigned_extended.UInt64.t - ; nonce: Unsigned_extended.UInt32.t - ; memo: string option - ; amount: Unsigned_extended.UInt64.t - ; valid_until: Unsigned_extended.UInt32.t option } + { to_ : public_key [@key "to"] + ; from : public_key + ; fee : Unsigned_extended.UInt64.t + ; token : Unsigned_extended.UInt64.t + ; nonce : Unsigned_extended.UInt32.t + ; memo : string option + ; amount : Unsigned_extended.UInt64.t + ; valid_until : Unsigned_extended.UInt32.t option + } [@@deriving yojson] end @@ -46,12 +46,13 @@ module Unsigned = struct type public_key = string [@@deriving yojson] type t = - { delegator: public_key - ; new_delegate: public_key - ; fee: Unsigned_extended.UInt64.t - ; nonce: Unsigned_extended.UInt32.t - ; memo: string option - ; valid_until: Unsigned_extended.UInt32.t option } + { delegator : public_key + ; new_delegate : public_key + ; fee : Unsigned_extended.UInt64.t + ; nonce : Unsigned_extended.UInt32.t + ; memo : string option + ; valid_until : Unsigned_extended.UInt32.t option + } [@@deriving yojson] end @@ -59,12 +60,13 @@ module Unsigned = struct type public_key = string [@@deriving yojson] type t = - { receiver: public_key - ; disable_new_accounts: bool - ; fee: Unsigned_extended.UInt64.t - ; nonce: Unsigned_extended.UInt32.t - ; memo: string option - ; valid_until: Unsigned_extended.UInt32.t option } + { receiver : public_key + ; disable_new_accounts : bool + ; fee : Unsigned_extended.UInt64.t + ; nonce : Unsigned_extended.UInt32.t + ; memo : string option + ; valid_until : Unsigned_extended.UInt32.t option + } [@@deriving yojson] end @@ -72,14 +74,15 @@ module Unsigned = struct type public_key = string [@@deriving yojson] type t = - { token_owner: public_key - ; receiver: public_key - ; token: Token_id.t - ; account_disabled: bool - ; fee: Unsigned_extended.UInt64.t - ; nonce: Unsigned_extended.UInt32.t - ; memo: string option - ; valid_until: Unsigned_extended.UInt32.t option } + { token_owner : public_key + ; receiver : public_key + ; token : Token_id.t + ; account_disabled : bool + ; fee : Unsigned_extended.UInt64.t + ; nonce : Unsigned_extended.UInt32.t + ; memo : string option + ; valid_until : Unsigned_extended.UInt32.t option + } [@@deriving yojson] end @@ -87,32 +90,33 @@ module Unsigned = struct type public_key = string [@@deriving yojson] type t = - { token_owner: public_key - ; receiver: public_key - ; token: Token_id.t - ; amount: Unsigned_extended.UInt64.t - ; fee: Unsigned_extended.UInt64.t - ; nonce: Unsigned_extended.UInt32.t - ; memo: string option - ; valid_until: Unsigned_extended.UInt32.t option } + { token_owner : public_key + ; receiver : public_key + ; token : Token_id.t + ; amount : Unsigned_extended.UInt64.t + ; fee : Unsigned_extended.UInt64.t + ; nonce : Unsigned_extended.UInt32.t + ; memo : string option + ; valid_until : Unsigned_extended.UInt32.t option + } [@@deriving yojson] end type t = - { random_oracle_input: string (* hex *) [@key "randomOracleInput"] - ; payment: Payment.t option - ; stake_delegation: Delegation.t option [@key "stakeDelegation"] - ; create_token: Create_token.t option [@key "createToken"] - ; create_token_account: Create_token_account.t option + { random_oracle_input : string (* hex *) [@key "randomOracleInput"] + ; payment : Payment.t option + ; stake_delegation : Delegation.t option [@key "stakeDelegation"] + ; create_token : Create_token.t option [@key "createToken"] + ; create_token_account : Create_token_account.t option [@key "createTokenAccount"] - ; mint_tokens: Mint_tokens.t option [@key "mintTokens"] } + ; mint_tokens : Mint_tokens.t option [@key "mintTokens"] + } [@@deriving yojson] end let string_of_field field = assert (Field.size_in_bits = 255) ; - Field.unpack field |> List.rev - |> Random_oracle_input.Coding.string_of_field + Field.unpack field |> List.rev |> Random_oracle_input.Coding.string_of_field let field_of_string s = assert (Field.size_in_bits = 255) ; @@ -130,49 +134,53 @@ module Unsigned = struct ~error: (Errors.create (`Operations_not_valid - [Errors.Partial_reason.Amount_not_some])) + [ Errors.Partial_reason.Amount_not_some ])) in let payment = - { Rendered.Payment.to_= un_pk command.receiver - ; from= un_pk command.source - ; fee= command.fee + { Rendered.Payment.to_ = un_pk command.receiver + ; from = un_pk command.source + ; fee = command.fee ; nonce - ; token= command.token - ; memo= None + ; token = command.token + ; memo = None ; amount - ; valid_until= None } + ; valid_until = None + } in Result.return (`Payment payment) | `Delegation -> let delegation = - { Rendered.Delegation.delegator= un_pk command.source - ; new_delegate= un_pk command.receiver - ; fee= command.fee + { Rendered.Delegation.delegator = un_pk command.source + ; new_delegate = un_pk command.receiver + ; fee = command.fee ; nonce - ; memo= None - ; valid_until= None } + ; memo = None + ; valid_until = None + } in Result.return (`Delegation delegation) | `Create_token -> let create_token = - { Rendered.Create_token.receiver= un_pk command.receiver - ; disable_new_accounts= false - ; fee= command.fee + { Rendered.Create_token.receiver = un_pk command.receiver + ; disable_new_accounts = false + ; fee = command.fee ; nonce - ; memo= None - ; valid_until= None } + ; memo = None + ; valid_until = None + } in Result.return (`Create_token create_token) | `Create_token_account -> let create_token_account = - { Rendered.Create_token_account.token_owner= un_pk command.source - ; receiver= un_pk command.receiver - ; token= command.token |> Token_id.of_uint64 - ; account_disabled= false - ; fee= command.fee + { Rendered.Create_token_account.token_owner = un_pk command.source + ; receiver = un_pk command.receiver + ; token = command.token |> Token_id.of_uint64 + ; account_disabled = false + ; fee = command.fee ; nonce - ; memo= None - ; valid_until= None } + ; memo = None + ; valid_until = None + } in Result.return (`Create_token_account create_token_account) | `Mint_tokens -> @@ -181,17 +189,18 @@ module Unsigned = struct ~error: (Errors.create (`Operations_not_valid - [Errors.Partial_reason.Amount_not_some])) + [ Errors.Partial_reason.Amount_not_some ])) in let mint_tokens = - { Rendered.Mint_tokens.token_owner= un_pk command.source - ; receiver= un_pk command.receiver - ; token= command.token |> Token_id.of_uint64 + { Rendered.Mint_tokens.token_owner = un_pk command.source + ; receiver = un_pk command.receiver + ; token = command.token |> Token_id.of_uint64 ; amount - ; fee= command.fee + ; fee = command.fee ; nonce - ; memo= None - ; valid_until= None } + ; memo = None + ; valid_until = None + } in Result.return (`Mint_tokens mint_tokens) @@ -205,94 +214,104 @@ module Unsigned = struct match%map render_command ~nonce:t.nonce t.command with | `Payment payment -> { Rendered.random_oracle_input - ; payment= Some payment - ; stake_delegation= None - ; create_token= None - ; create_token_account= None - ; mint_tokens= None } + ; payment = Some payment + ; stake_delegation = None + ; create_token = None + ; create_token_account = None + ; mint_tokens = None + } | `Delegation delegation -> { Rendered.random_oracle_input - ; payment= None - ; stake_delegation= Some delegation - ; create_token= None - ; create_token_account= None - ; mint_tokens= None } + ; payment = None + ; stake_delegation = Some delegation + ; create_token = None + ; create_token_account = None + ; mint_tokens = None + } | `Create_token create_token -> { Rendered.random_oracle_input - ; payment= None - ; stake_delegation= None - ; create_token= Some create_token - ; create_token_account= None - ; mint_tokens= None } + ; payment = None + ; stake_delegation = None + ; create_token = Some create_token + ; create_token_account = None + ; mint_tokens = None + } | `Create_token_account create_token_account -> { Rendered.random_oracle_input - ; payment= None - ; stake_delegation= None - ; create_token= None - ; create_token_account= Some create_token_account - ; mint_tokens= None } + ; payment = None + ; stake_delegation = None + ; create_token = None + ; create_token_account = Some create_token_account + ; mint_tokens = None + } | `Mint_tokens mint_tokens -> { Rendered.random_oracle_input - ; payment= None - ; stake_delegation= None - ; create_token= None - ; create_token_account= None - ; mint_tokens= Some mint_tokens } - - let of_rendered_payment (r : Rendered.Payment.t) : - User_command_info.Partial.t = - { User_command_info.Partial.receiver= `Pk r.to_ - ; source= `Pk r.from - ; kind= `Payment - ; fee_payer= `Pk r.from - ; fee_token= r.token - ; token= r.token - ; fee= r.fee - ; amount= Some r.amount } + ; payment = None + ; stake_delegation = None + ; create_token = None + ; create_token_account = None + ; mint_tokens = Some mint_tokens + } + + let of_rendered_payment (r : Rendered.Payment.t) : User_command_info.Partial.t + = + { User_command_info.Partial.receiver = `Pk r.to_ + ; source = `Pk r.from + ; kind = `Payment + ; fee_payer = `Pk r.from + ; fee_token = r.token + ; token = r.token + ; fee = r.fee + ; amount = Some r.amount + } let of_rendered_delegation (r : Rendered.Delegation.t) : User_command_info.Partial.t = - { User_command_info.Partial.receiver= `Pk r.new_delegate - ; source= `Pk r.delegator - ; kind= `Delegation - ; fee_payer= `Pk r.delegator - ; fee_token= Mina_base.Token_id.(default |> to_uint64) - ; token= Mina_base.Token_id.(default |> to_uint64) - ; fee= r.fee - ; amount= None } + { User_command_info.Partial.receiver = `Pk r.new_delegate + ; source = `Pk r.delegator + ; kind = `Delegation + ; fee_payer = `Pk r.delegator + ; fee_token = Mina_base.Token_id.(default |> to_uint64) + ; token = Mina_base.Token_id.(default |> to_uint64) + ; fee = r.fee + ; amount = None + } let of_rendered_create_token (r : Rendered.Create_token.t) : User_command_info.Partial.t = - { User_command_info.Partial.receiver= `Pk r.receiver - ; source= `Pk r.receiver - ; kind= `Create_token - ; fee_payer= `Pk r.receiver (* TODO: reviewer, please check! *) - ; fee_token= Mina_base.Token_id.(default |> to_uint64) - ; token= Mina_base.Token_id.(default |> to_uint64) - ; fee= r.fee - ; amount= None } + { User_command_info.Partial.receiver = `Pk r.receiver + ; source = `Pk r.receiver + ; kind = `Create_token + ; fee_payer = `Pk r.receiver (* TODO: reviewer, please check! *) + ; fee_token = Mina_base.Token_id.(default |> to_uint64) + ; token = Mina_base.Token_id.(default |> to_uint64) + ; fee = r.fee + ; amount = None + } let of_rendered_create_token_account (r : Rendered.Create_token_account.t) : User_command_info.Partial.t = - { User_command_info.Partial.receiver= `Pk r.receiver - ; source= `Pk r.token_owner - ; kind= `Create_token - ; fee_payer= `Pk r.receiver - ; fee_token= Mina_base.Token_id.(default |> to_uint64) - ; token= r.token |> Mina_base.Token_id.to_uint64 - ; fee= r.fee - ; amount= None } + { User_command_info.Partial.receiver = `Pk r.receiver + ; source = `Pk r.token_owner + ; kind = `Create_token + ; fee_payer = `Pk r.receiver + ; fee_token = Mina_base.Token_id.(default |> to_uint64) + ; token = r.token |> Mina_base.Token_id.to_uint64 + ; fee = r.fee + ; amount = None + } let of_rendered_mint_tokens (r : Rendered.Mint_tokens.t) : User_command_info.Partial.t = - { User_command_info.Partial.receiver= `Pk r.receiver - ; source= `Pk r.token_owner - ; kind= `Mint_tokens - ; fee_payer= `Pk r.token_owner - ; fee_token= Mina_base.Token_id.(default |> to_uint64) - ; token= r.token |> Mina_base.Token_id.to_uint64 - ; fee= r.fee - ; amount= Some r.amount } + { User_command_info.Partial.receiver = `Pk r.receiver + ; source = `Pk r.token_owner + ; kind = `Mint_tokens + ; fee_payer = `Pk r.token_owner + ; fee_token = Mina_base.Token_id.(default |> to_uint64) + ; token = r.token |> Mina_base.Token_id.to_uint64 + ; fee = r.fee + ; amount = Some r.amount + } let of_rendered (r : Rendered.t) : (t, Errors.t) Result.t = let open Result.Let_syntax in @@ -312,7 +331,7 @@ module Unsigned = struct ~context: (sprintf "Random oracle input deserialization: %s" parse_context) - (`Json_parse None) ) + (`Json_parse None)) in match ( r.payment @@ -323,29 +342,34 @@ module Unsigned = struct with | Some payment, None, None, None, None -> Result.return - { command= of_rendered_payment payment + { command = of_rendered_payment payment ; random_oracle_input - ; nonce= payment.nonce } + ; nonce = payment.nonce + } | None, Some delegation, None, None, None -> Result.return - { command= of_rendered_delegation delegation + { command = of_rendered_delegation delegation ; random_oracle_input - ; nonce= delegation.nonce } + ; nonce = delegation.nonce + } | None, None, Some create_token, None, None -> Result.return - { command= of_rendered_create_token create_token + { command = of_rendered_create_token create_token ; random_oracle_input - ; nonce= create_token.nonce } + ; nonce = create_token.nonce + } | None, None, None, Some create_token_account, None -> Result.return - { command= of_rendered_create_token_account create_token_account + { command = of_rendered_create_token_account create_token_account ; random_oracle_input - ; nonce= create_token_account.nonce } + ; nonce = create_token_account.nonce + } | None, None, None, None, Some mint_tokens -> Result.return - { command= of_rendered_mint_tokens mint_tokens + { command = of_rendered_mint_tokens mint_tokens ; random_oracle_input - ; nonce= mint_tokens.nonce } + ; nonce = mint_tokens.nonce + } | _ -> Result.fail (Errors.create ~context:"Unsigned transaction un-rendering" @@ -354,18 +378,20 @@ end module Signed = struct type t = - { command: User_command_info.Partial.t - ; nonce: Unsigned_extended.UInt32.t - ; signature: string } + { command : User_command_info.Partial.t + ; nonce : Unsigned_extended.UInt32.t + ; signature : string + } module Rendered = struct type t = - { signature: string - ; payment: Unsigned.Rendered.Payment.t option - ; stake_delegation: Unsigned.Rendered.Delegation.t option - ; create_token: Unsigned.Rendered.Create_token.t option - ; create_token_account: Unsigned.Rendered.Create_token_account.t option - ; mint_tokens: Unsigned.Rendered.Mint_tokens.t option } + { signature : string + ; payment : Unsigned.Rendered.Payment.t option + ; stake_delegation : Unsigned.Rendered.Delegation.t option + ; create_token : Unsigned.Rendered.Create_token.t option + ; create_token_account : Unsigned.Rendered.Create_token_account.t option + ; mint_tokens : Unsigned.Rendered.Mint_tokens.t option + } [@@deriving yojson] end @@ -373,40 +399,45 @@ module Signed = struct let open Result.Let_syntax in match%map Unsigned.render_command ~nonce:t.nonce t.command with | `Payment payment -> - { Rendered.signature= t.signature - ; payment= Some payment - ; stake_delegation= None - ; create_token= None - ; create_token_account= None - ; mint_tokens= None } + { Rendered.signature = t.signature + ; payment = Some payment + ; stake_delegation = None + ; create_token = None + ; create_token_account = None + ; mint_tokens = None + } | `Delegation delegation -> - { Rendered.signature= t.signature - ; payment= None - ; stake_delegation= Some delegation - ; create_token= None - ; create_token_account= None - ; mint_tokens= None } + { Rendered.signature = t.signature + ; payment = None + ; stake_delegation = Some delegation + ; create_token = None + ; create_token_account = None + ; mint_tokens = None + } | `Create_token create_token -> - { Rendered.signature= t.signature - ; payment= None - ; stake_delegation= None - ; create_token= Some create_token - ; create_token_account= None - ; mint_tokens= None } + { Rendered.signature = t.signature + ; payment = None + ; stake_delegation = None + ; create_token = Some create_token + ; create_token_account = None + ; mint_tokens = None + } | `Create_token_account create_token_account -> - { Rendered.signature= t.signature - ; payment= None - ; stake_delegation= None - ; create_token= None - ; create_token_account= Some create_token_account - ; mint_tokens= None } + { Rendered.signature = t.signature + ; payment = None + ; stake_delegation = None + ; create_token = None + ; create_token_account = Some create_token_account + ; mint_tokens = None + } | `Mint_tokens mint_tokens -> - { Rendered.signature= t.signature - ; payment= None - ; stake_delegation= None - ; create_token= None - ; create_token_account= None - ; mint_tokens= Some mint_tokens } + { Rendered.signature = t.signature + ; payment = None + ; stake_delegation = None + ; create_token = None + ; create_token_account = None + ; mint_tokens = Some mint_tokens + } let of_rendered (r : Rendered.t) : (t, Errors.t) Result.t = match @@ -418,30 +449,35 @@ module Signed = struct with | Some payment, None, None, None, None -> Result.return - { command= Unsigned.of_rendered_payment payment - ; nonce= payment.nonce - ; signature= r.signature } + { command = Unsigned.of_rendered_payment payment + ; nonce = payment.nonce + ; signature = r.signature + } | None, Some delegation, None, None, None -> Result.return - { command= Unsigned.of_rendered_delegation delegation - ; nonce= delegation.nonce - ; signature= r.signature } + { command = Unsigned.of_rendered_delegation delegation + ; nonce = delegation.nonce + ; signature = r.signature + } | None, None, Some create_token, None, None -> Result.return - { command= Unsigned.of_rendered_create_token create_token - ; nonce= create_token.nonce - ; signature= r.signature } + { command = Unsigned.of_rendered_create_token create_token + ; nonce = create_token.nonce + ; signature = r.signature + } | None, None, None, Some create_token_account, None -> Result.return - { command= + { command = Unsigned.of_rendered_create_token_account create_token_account - ; nonce= create_token_account.nonce - ; signature= r.signature } + ; nonce = create_token_account.nonce + ; signature = r.signature + } | None, None, None, None, Some mint_tokens -> Result.return - { command= Unsigned.of_rendered_mint_tokens mint_tokens - ; nonce= mint_tokens.nonce - ; signature= r.signature } + { command = Unsigned.of_rendered_mint_tokens mint_tokens + ; nonce = mint_tokens.nonce + ; signature = r.signature + } | _ -> Result.fail (Errors.create ~context:"Signed transaction un-rendering" @@ -466,29 +502,29 @@ let to_mina_signed transaction_json = match rosetta_transaction.command.kind with | `Payment -> ( Option.bind rosetta_transaction_rendered.payment - ~f:(fun {valid_until; _} -> valid_until) + ~f:(fun { valid_until; _ } -> valid_until) , Option.bind rosetta_transaction_rendered.payment - ~f:(fun {memo; _} -> memo) ) + ~f:(fun { memo; _ } -> memo) ) | `Delegation -> ( Option.bind rosetta_transaction_rendered.stake_delegation - ~f:(fun {valid_until; _} -> valid_until) + ~f:(fun { valid_until; _ } -> valid_until) , Option.bind rosetta_transaction_rendered.stake_delegation - ~f:(fun {memo; _} -> memo) ) + ~f:(fun { memo; _ } -> memo) ) | `Create_token -> ( Option.bind rosetta_transaction_rendered.create_token - ~f:(fun {valid_until; _} -> valid_until) + ~f:(fun { valid_until; _ } -> valid_until) , Option.bind rosetta_transaction_rendered.create_token - ~f:(fun {memo; _} -> memo) ) + ~f:(fun { memo; _ } -> memo) ) | `Create_token_account -> ( Option.bind rosetta_transaction_rendered.create_token_account - ~f:(fun {valid_until; _} -> valid_until) + ~f:(fun { valid_until; _ } -> valid_until) , Option.bind rosetta_transaction_rendered.create_token_account - ~f:(fun {memo; _} -> memo) ) + ~f:(fun { memo; _ } -> memo) ) | `Mint_tokens -> ( Option.bind rosetta_transaction_rendered.mint_tokens - ~f:(fun {valid_until; _} -> valid_until) + ~f:(fun { valid_until; _ } -> valid_until) , Option.bind rosetta_transaction_rendered.mint_tokens - ~f:(fun {memo; _} -> memo) ) + ~f:(fun { memo; _ } -> memo) ) in let pk (`Pk x) = Signature_lib.Public_key.Compressed.of_base58_check_exn x @@ -508,9 +544,10 @@ let to_mina_signed transaction_json = in let command : Mina_base.Signed_command.t = { Mina_base.Signed_command.Poly.signature - ; signer= + ; signer = pk rosetta_transaction.command.fee_payer |> Signature_lib.Public_key.decompress_exn - ; payload } + ; payload + } in - command ) + command) diff --git a/src/lib/rosetta_lib/user_command_info.ml b/src/lib/rosetta_lib/user_command_info.ml index e554ba3c24f..e753d581f9e 100644 --- a/src/lib/rosetta_lib/user_command_info.ml +++ b/src/lib/rosetta_lib/user_command_info.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifndef -consensus_mechanism] +[%%ifndef consensus_mechanism] module Mina_base = Mina_base_nonconsensus module Currency = Currency_nonconsensus.Currency @@ -26,12 +24,13 @@ module Stake_delegation = Mina_base.Stake_delegation let pk_to_public_key ~context (`Pk pk) = Public_key.Compressed.of_base58_check pk |> Result.map_error ~f:(fun _ -> - Errors.create ~context `Public_key_format_not_valid ) + Errors.create ~context `Public_key_format_not_valid) let account_id (`Pk pk) token_id = - { Account_identifier.address= pk - ; sub_account= None - ; metadata= Some (Amount_of.Token_id.encode token_id) } + { Account_identifier.address = pk + ; sub_account = None + ; metadata = Some (Amount_of.Token_id.encode token_id) + } let token_id_of_account (account : Account_identifier.t) = let module Decoder = Amount_of.Token_id.T (Result) in @@ -40,7 +39,7 @@ let token_id_of_account (account : Account_identifier.t) = |> Result.ok module Op = struct - type 'a t = {label: 'a; related_to: 'a option} [@@deriving equal] + type 'a t = { label : 'a; related_to : 'a option } [@@deriving equal] module T (M : Monad.S2) = struct let build ~a_eq ~plan ~f = @@ -52,8 +51,9 @@ module Op = struct let open M.Let_syntax in let%bind i, acc = macc in let operation_identifier i = - { Operation_identifier.index= Int64.of_int_exn i - ; network_index= None } + { Operation_identifier.index = Int64.of_int_exn i + ; network_index = None + } in let related_operations = match op.related_to with @@ -68,7 +68,7 @@ module Op = struct f ~related_operations ~operation_identifier:(operation_identifier i) op in - (i + 1, a :: acc) ) + (i + 1, a :: acc)) in List.rev rev_data end @@ -111,34 +111,36 @@ module Account_creation_fees_paid = struct end module Failure_status = struct - type t = [`Applied of Account_creation_fees_paid.t | `Failed of string] + type t = [ `Applied of Account_creation_fees_paid.t | `Failed of string ] [@@deriving equal, to_yojson, sexp, compare] end type t = - { kind: Kind.t - ; fee_payer: [`Pk of string] - ; source: [`Pk of string] - ; receiver: [`Pk of string] - ; fee_token: Unsigned_extended.UInt64.t - ; token: Unsigned_extended.UInt64.t - ; fee: Unsigned_extended.UInt64.t - ; nonce: Unsigned_extended.UInt32.t - ; amount: Unsigned_extended.UInt64.t option - ; hash: string - ; failure_status: Failure_status.t option } + { kind : Kind.t + ; fee_payer : [ `Pk of string ] + ; source : [ `Pk of string ] + ; receiver : [ `Pk of string ] + ; fee_token : Unsigned_extended.UInt64.t + ; token : Unsigned_extended.UInt64.t + ; fee : Unsigned_extended.UInt64.t + ; nonce : Unsigned_extended.UInt32.t + ; amount : Unsigned_extended.UInt64.t option + ; hash : string + ; failure_status : Failure_status.t option + } [@@deriving to_yojson, equal, sexp, compare] module Partial = struct type t = - { kind: Kind.t - ; fee_payer: [`Pk of string] - ; source: [`Pk of string] - ; receiver: [`Pk of string] - ; fee_token: Unsigned_extended.UInt64.t - ; token: Unsigned_extended.UInt64.t - ; fee: Unsigned_extended.UInt64.t - ; amount: Unsigned_extended.UInt64.t option } + { kind : Kind.t + ; fee_payer : [ `Pk of string ] + ; source : [ `Pk of string ] + ; receiver : [ `Pk of string ] + ; fee_token : Unsigned_extended.UInt64.t + ; token : Unsigned_extended.UInt64.t + ; fee : Unsigned_extended.UInt64.t + ; amount : Unsigned_extended.UInt64.t option + } [@@deriving to_yojson, sexp, compare] module Reason = Errors.Partial_reason @@ -151,16 +153,14 @@ module Partial = struct -> (Signed_command.Payload.t, Errors.t) Result.t = fun ?memo ?valid_until t ~nonce -> let open Result.Let_syntax in - let%bind fee_payer_pk = - pk_to_public_key ~context:"Fee payer" t.fee_payer - in + let%bind fee_payer_pk = pk_to_public_key ~context:"Fee payer" t.fee_payer in let%bind source_pk = pk_to_public_key ~context:"Source" t.source in let%bind receiver_pk = pk_to_public_key ~context:"Receiver" t.receiver in let%bind memo = match memo with | Some memo -> ( - try Ok (Signed_command_memo.create_from_string_exn memo) - with _ -> Error (Errors.create `Memo_invalid) ) + try Ok (Signed_command_memo.create_from_string_exn memo) + with _ -> Error (Errors.create `Memo_invalid) ) | None -> Ok Signed_command_memo.empty in @@ -172,33 +172,37 @@ module Partial = struct ~error: (Errors.create (`Operations_not_valid - [Errors.Partial_reason.Amount_not_some])) + [ Errors.Partial_reason.Amount_not_some ])) in let payload = { Payment_payload.Poly.source_pk ; receiver_pk - ; token_id= Token_id.of_uint64 t.token - ; amount= Amount_currency.of_uint64 amount } + ; token_id = Token_id.of_uint64 t.token + ; amount = Amount_currency.of_uint64 amount + } in Signed_command.Payload.Body.Payment payload | `Delegation -> let payload = Stake_delegation.Set_delegate - {delegator= source_pk; new_delegate= receiver_pk} + { delegator = source_pk; new_delegate = receiver_pk } in Result.return @@ Signed_command.Payload.Body.Stake_delegation payload | `Create_token -> let payload = - { Mina_base.New_token_payload.token_owner_pk= receiver_pk - ; disable_new_accounts= false } + { Mina_base.New_token_payload.token_owner_pk = receiver_pk + ; disable_new_accounts = false + } in Result.return @@ Signed_command.Payload.Body.Create_new_token payload | `Create_token_account -> let payload = - { Mina_base.New_account_payload.token_id= Token_id.of_uint64 t.token - ; token_owner_pk= source_pk + { Mina_base.New_account_payload.token_id = + Token_id.of_uint64 t.token + ; token_owner_pk = source_pk ; receiver_pk - ; account_disabled= false } + ; account_disabled = false + } in Result.return @@ Signed_command.Payload.Body.Create_token_account payload @@ -208,13 +212,14 @@ module Partial = struct ~error: (Errors.create (`Operations_not_valid - [Errors.Partial_reason.Amount_not_some])) + [ Errors.Partial_reason.Amount_not_some ])) in let payload = - { Mina_base.Minting_payload.token_id= Token_id.of_uint64 t.token - ; token_owner_pk= source_pk + { Mina_base.Minting_payload.token_id = Token_id.of_uint64 t.token + ; token_owner_pk = source_pk ; receiver_pk - ; amount= Amount_currency.of_uint64 amount } + ; amount = Amount_currency.of_uint64 amount + } in Signed_command.Payload.Body.Mint_tokens payload in @@ -225,27 +230,29 @@ module Partial = struct end let forget (t : t) : Partial.t = - { kind= t.kind - ; fee_payer= t.fee_payer - ; source= t.source - ; receiver= t.receiver - ; fee_token= t.fee_token - ; token= t.token - ; fee= t.fee - ; amount= t.amount } + { kind = t.kind + ; fee_payer = t.fee_payer + ; source = t.source + ; receiver = t.receiver + ; fee_token = t.fee_token + ; token = t.token + ; fee = t.fee + ; amount = t.amount + } let remember ~nonce ~hash t = - { kind= t.kind - ; fee_payer= t.fee_payer - ; source= t.source - ; receiver= t.receiver - ; fee_token= t.fee_token - ; token= t.token - ; fee= t.fee - ; amount= t.amount + { kind = t.kind + ; fee_payer = t.fee_payer + ; source = t.source + ; receiver = t.receiver + ; fee_token = t.fee_token + ; token = t.token + ; fee = t.fee + ; amount = t.amount ; hash ; nonce - ; failure_status= None } + ; failure_status = None + } let of_operations (ops : Operation.t list) : (Partial.t, Partial.Reason.t) Validation.t = @@ -254,18 +261,18 @@ let of_operations (ops : Operation.t list) : let find_kind k (ops : Operation.t list) = let name = Operation_types.name k in List.find ops ~f:(fun op -> String.equal op.Operation._type name) - |> Result.of_option ~error:[Partial.Reason.Can't_find_kind name] + |> Result.of_option ~error:[ Partial.Reason.Can't_find_kind name ] in let module V = Validation in let open V.Let_syntax in let open Partial.Reason in (* For a payment we demand: - * - * ops = length exactly 3 - * - * payment_source_dec with account 'a, some amount 'x, status="Pending" - * fee_payer_dec with account 'a, some amount 'y, status="Pending" - * payment_receiver_inc with account 'b, some amount 'x, status="Pending" + * + * ops = length exactly 3 + * + * payment_source_dec with account 'a, some amount 'x, status="Pending" + * fee_payer_dec with account 'a, some amount 'y, status="Pending" + * payment_receiver_inc with account 'b, some amount 'x, status="Pending" *) let payment = let%map () = @@ -273,8 +280,8 @@ let of_operations (ops : Operation.t list) : else V.fail Length_mismatch and account_a = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Payment_source_dec ops - and {account= account'; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Payment_source_dec ops + and { account = account'; _ } = find_kind `Fee_payer_dec ops in match (account, account') with | Some x, Some y when Account_identifier.equal x y -> V.return x @@ -284,42 +291,42 @@ let of_operations (ops : Operation.t list) : V.fail Account_not_some and token = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Payment_source_dec ops in + let%bind { account; _ } = find_kind `Payment_source_dec ops in match account with | Some account -> ( - match token_id_of_account account with - | None -> - V.fail Incorrect_token_id - | Some token -> - V.return token ) + match token_id_of_account account with + | None -> + V.fail Incorrect_token_id + | Some token -> + V.return token ) | None -> V.fail Account_not_some and fee_token = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in match account with | Some account -> ( - match token_id_of_account account with - | Some token_id -> - V.return token_id - | None -> - V.fail Incorrect_token_id ) + match token_id_of_account account with + | Some token_id -> + V.return token_id + | None -> + V.fail Incorrect_token_id ) | None -> V.fail Account_not_some and account_b = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Payment_receiver_inc ops in - Result.of_option account ~error:[Account_not_some] + let%bind { account; _ } = find_kind `Payment_receiver_inc ops in + Result.of_option account ~error:[ Account_not_some ] and () = if List.for_all ops ~f:(fun op -> - Option.equal String.equal op.status (Some "Pending") ) + Option.equal String.equal op.status (Some "Pending")) then V.return () else V.fail Status_not_pending and payment_amount_x = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Payment_source_dec ops - and {amount= amount'; _} = find_kind `Payment_receiver_inc ops in + let%bind { amount; _ } = find_kind `Payment_source_dec ops + and { amount = amount'; _ } = find_kind `Payment_receiver_inc ops in match (amount, amount') with | Some x, Some y when Amount.equal (Amount_of.negated x) y -> V.return y @@ -329,28 +336,29 @@ let of_operations (ops : Operation.t list) : V.fail Amount_not_some and payment_amount_y = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Fee_payer_dec ops in + let%bind { amount; _ } = find_kind `Fee_payer_dec ops in match amount with | Some x -> V.return (Amount_of.negated x) | None -> V.fail Amount_not_some in - { Partial.kind= `Payment - ; fee_payer= `Pk account_a.address - ; source= `Pk account_a.address - ; receiver= `Pk account_b.address + { Partial.kind = `Payment + ; fee_payer = `Pk account_a.address + ; source = `Pk account_a.address + ; receiver = `Pk account_b.address ; fee_token ; token (* TODO: Catch exception properly on these uint64 decodes *) - ; fee= Unsigned.UInt64.of_string payment_amount_y.Amount.value - ; amount= Some (Unsigned.UInt64.of_string payment_amount_x.Amount.value) } + ; fee = Unsigned.UInt64.of_string payment_amount_y.Amount.value + ; amount = Some (Unsigned.UInt64.of_string payment_amount_x.Amount.value) + } in (* For a delegation we demand: - * - * ops = length exactly 2 - * - * fee_payer_dec with account 'a, some amount 'y, status="Pending" - * delegate_change with account 'a, metadata:{delegate_change_target:'b}, status="Pending" + * + * ops = length exactly 2 + * + * fee_payer_dec with account 'a, some amount 'y, status="Pending" + * delegate_change with account 'a, metadata:{delegate_change_target:'b}, status="Pending" *) let delegation = let%map () = @@ -358,64 +366,65 @@ let of_operations (ops : Operation.t list) : else V.fail Length_mismatch and account_a = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in Option.value_map account ~default:(V.fail Account_not_some) ~f:V.return and fee_token = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in match account with | Some account -> ( - match token_id_of_account account with - | Some token_id -> - V.return token_id - | None -> - V.fail Incorrect_token_id ) + match token_id_of_account account with + | Some token_id -> + V.return token_id + | None -> + V.fail Incorrect_token_id ) | None -> V.fail Account_not_some and account_b = let open Result.Let_syntax in - let%bind {metadata; _} = find_kind `Delegate_change ops in + let%bind { metadata; _ } = find_kind `Delegate_change ops in match metadata with | Some metadata -> ( - match metadata with - | `Assoc [("delegate_change_target", `String s)] -> - return s - | _ -> - V.fail Invalid_metadata ) + match metadata with + | `Assoc [ ("delegate_change_target", `String s) ] -> + return s + | _ -> + V.fail Invalid_metadata ) | None -> V.fail Account_not_some and () = if List.for_all ops ~f:(fun op -> - Option.equal String.equal op.status (Some "Pending") ) + Option.equal String.equal op.status (Some "Pending")) then V.return () else V.fail Status_not_pending and payment_amount_y = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Fee_payer_dec ops in + let%bind { amount; _ } = find_kind `Fee_payer_dec ops in match amount with | Some x -> V.return (Amount_of.negated x) | None -> V.fail Amount_not_some in - { Partial.kind= `Delegation - ; fee_payer= `Pk account_a.address - ; source= `Pk account_a.address - ; receiver= `Pk account_b + { Partial.kind = `Delegation + ; fee_payer = `Pk account_a.address + ; source = `Pk account_a.address + ; receiver = `Pk account_b ; fee_token - ; token= + ; token = Token_id.(default |> to_uint64) (* only default token can be delegated *) - ; fee= Unsigned.UInt64.of_string payment_amount_y.Amount.value - ; amount= None } + ; fee = Unsigned.UInt64.of_string payment_amount_y.Amount.value + ; amount = None + } in (* For token creation, we demand: - * - * ops = length exactly 2 - * - * fee_payer_dec with account 'a, some amount 'y, status="Pending" - * create_token with account=None, status="Pending" + * + * ops = length exactly 2 + * + * fee_payer_dec with account 'a, some amount 'y, status="Pending" + * create_token with account=None, status="Pending" *) let create_token = let%map () = @@ -423,29 +432,29 @@ let of_operations (ops : Operation.t list) : else V.fail Length_mismatch and account_a = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in Option.value_map account ~default:(V.fail Account_not_some) ~f:V.return and fee_token = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in match account with | Some account -> ( - match token_id_of_account account with - | Some token_id -> - V.return token_id - | None -> - V.fail Incorrect_token_id ) + match token_id_of_account account with + | Some token_id -> + V.return token_id + | None -> + V.fail Incorrect_token_id ) | None -> V.fail Account_not_some and () = if List.for_all ops ~f:(fun op -> - Option.equal String.equal op.status (Some "Pending") ) + Option.equal String.equal op.status (Some "Pending")) then V.return () else V.fail Status_not_pending and payment_amount_y = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Fee_payer_dec ops in + let%bind { amount; _ } = find_kind `Fee_payer_dec ops in match amount with | Some x -> V.return (Amount_of.negated x) @@ -466,20 +475,21 @@ let of_operations (ops : Operation.t list) : | Error _ -> V.return () in - { Partial.kind= `Create_token - ; fee_payer= `Pk account_a.address - ; source= `Pk account_a.address - ; receiver= `Pk account_a.address (* reviewer: is this sane? *) + { Partial.kind = `Create_token + ; fee_payer = `Pk account_a.address + ; source = `Pk account_a.address + ; receiver = `Pk account_a.address (* reviewer: is this sane? *) ; fee_token - ; token= Token_id.(default |> to_uint64) - ; fee= Unsigned.UInt64.of_string payment_amount_y.Amount.value - ; amount= None } + ; token = Token_id.(default |> to_uint64) + ; fee = Unsigned.UInt64.of_string payment_amount_y.Amount.value + ; amount = None + } in (* For token account creation, we demand: - * - * ops = length exactly 1 - * - * fee_payer_dec with account 'a, some amount 'y, status="Pending" + * + * ops = length exactly 1 + * + * fee_payer_dec with account 'a, some amount 'y, status="Pending" *) let create_token_account = let%map () = @@ -487,50 +497,51 @@ let of_operations (ops : Operation.t list) : else V.fail Length_mismatch and account_a = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in Option.value_map account ~default:(V.fail Account_not_some) ~f:V.return and fee_token = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in match account with | Some account -> ( - match token_id_of_account account with - | Some token_id -> - V.return token_id - | None -> - V.fail Incorrect_token_id ) + match token_id_of_account account with + | Some token_id -> + V.return token_id + | None -> + V.fail Incorrect_token_id ) | None -> V.fail Account_not_some and () = if List.for_all ops ~f:(fun op -> - Option.equal String.equal op.status (Some "Pending") ) + Option.equal String.equal op.status (Some "Pending")) then V.return () else V.fail Status_not_pending and payment_amount_y = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Fee_payer_dec ops in + let%bind { amount; _ } = find_kind `Fee_payer_dec ops in match amount with | Some x -> V.return (Amount_of.negated x) | None -> V.fail Amount_not_some in - { Partial.kind= `Create_token_account - ; fee_payer= `Pk account_a.address - ; source= `Pk account_a.address - ; receiver= `Pk account_a.address + { Partial.kind = `Create_token_account + ; fee_payer = `Pk account_a.address + ; source = `Pk account_a.address + ; receiver = `Pk account_a.address ; fee_token - ; token= Token_id.(default |> to_uint64) - ; fee= Unsigned.UInt64.of_string payment_amount_y.Amount.value - ; amount= None } + ; token = Token_id.(default |> to_uint64) + ; fee = Unsigned.UInt64.of_string payment_amount_y.Amount.value + ; amount = None + } in (* For token minting, we demand: - * - * ops = length exactly 2 - * - * fee_payer_dec with account 'a, some amount 'y, status="Pending" - * mint_tokens with account 'a, some amount 'y with the minted token id, metadata={token_owner_pk:'b}, status=Pending + * + * ops = length exactly 2 + * + * fee_payer_dec with account 'a, some amount 'y, status="Pending" + * mint_tokens with account 'a, some amount 'y with the minted token id, metadata={token_owner_pk:'b}, status=Pending *) let mint_tokens = let%map () = @@ -538,29 +549,29 @@ let of_operations (ops : Operation.t list) : else V.fail Length_mismatch and account_a = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in Option.value_map account ~default:(V.fail Account_not_some) ~f:V.return and fee_token = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Fee_payer_dec ops in + let%bind { account; _ } = find_kind `Fee_payer_dec ops in match account with | Some account -> ( - match token_id_of_account account with - | Some token_id -> - V.return token_id - | None -> - V.fail Incorrect_token_id ) + match token_id_of_account account with + | Some token_id -> + V.return token_id + | None -> + V.fail Incorrect_token_id ) | None -> V.fail Account_not_some and () = if List.for_all ops ~f:(fun op -> - Option.equal String.equal op.status (Some "Pending") ) + Option.equal String.equal op.status (Some "Pending")) then V.return () else V.fail Status_not_pending and payment_amount_y = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Fee_payer_dec ops in + let%bind { amount; _ } = find_kind `Fee_payer_dec ops in match amount with | Some x -> V.return (Amount_of.negated x) @@ -568,50 +579,51 @@ let of_operations (ops : Operation.t list) : V.fail Amount_not_some and account_b = let open Result.Let_syntax in - let%bind {account; _} = find_kind `Mint_tokens ops in + let%bind { account; _ } = find_kind `Mint_tokens ops in Option.value_map account ~default:(V.fail Account_not_some) ~f:V.return and amount_b = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Mint_tokens ops in + let%bind { amount; _ } = find_kind `Mint_tokens ops in Option.value_map amount ~default:(V.fail Amount_not_some) ~f:V.return and account_c = let open Result.Let_syntax in - let%bind {metadata; _} = find_kind `Mint_tokens ops in + let%bind { metadata; _ } = find_kind `Mint_tokens ops in match metadata with | Some metadata -> ( - match metadata with - | `Assoc [("token_owner_pk", `String s)] -> - return s - | _ -> - V.fail Invalid_metadata ) + match metadata with + | `Assoc [ ("token_owner_pk", `String s) ] -> + return s + | _ -> + V.fail Invalid_metadata ) | None -> V.fail Account_not_some and token = let open Result.Let_syntax in - let%bind {amount; _} = find_kind `Mint_tokens ops in + let%bind { amount; _ } = find_kind `Mint_tokens ops in (* check for Amount_not_some already done for amount_b *) - let Amount.{currency= {symbol; _}; _} = Option.value_exn amount in + let Amount.{ currency = { symbol; _ }; _ } = Option.value_exn amount in if String.equal symbol "CODA+" then return (Unsigned.UInt64.of_int 2) else V.fail Incorrect_token_id in - { Partial.kind= `Mint_tokens - ; fee_payer= `Pk account_a.address - ; source= `Pk account_c - ; receiver= `Pk account_b.address + { Partial.kind = `Mint_tokens + ; fee_payer = `Pk account_a.address + ; source = `Pk account_c + ; receiver = `Pk account_b.address ; fee_token ; token - ; fee= Unsigned.UInt64.of_string payment_amount_y.Amount.value - ; amount= Some (amount_b.Amount.value |> Unsigned.UInt64.of_string) } + ; fee = Unsigned.UInt64.of_string payment_amount_y.Amount.value + ; amount = Some (amount_b.Amount.value |> Unsigned.UInt64.of_string) + } in let partials = - [payment; delegation; create_token; create_token_account; mint_tokens] + [ payment; delegation; create_token; create_token_account; mint_tokens ] in let oks, errs = List.partition_map partials ~f:Result.ok_fst in match (oks, errs) with | [], errs -> (* no Oks *) Error (List.concat errs) - | [partial], _ -> + | [ partial ], _ -> (* exactly one Ok *) Ok partial | _, _ -> @@ -629,41 +641,47 @@ let to_operations ~failure_status (t : Partial.t) : Operation.t list = *) let plan : 'a Op.t list = ( if not Unsigned.UInt64.(equal t.fee zero) then - [{Op.label= `Fee_payer_dec; related_to= None}] + [ { Op.label = `Fee_payer_dec; related_to = None } ] else [] ) @ ( match failure_status with | Some (`Applied (Account_creation_fees_paid.By_receiver amount)) -> - [ { Op.label= `Account_creation_fee_via_payment amount - ; related_to= None } ] + [ { Op.label = `Account_creation_fee_via_payment amount + ; related_to = None + } + ] | Some (`Applied (Account_creation_fees_paid.By_fee_payer amount)) -> - [ { Op.label= `Account_creation_fee_via_fee_payer amount - ; related_to= None } ] + [ { Op.label = `Account_creation_fee_via_fee_payer amount + ; related_to = None + } + ] | _ -> [] ) @ match t.kind with | `Payment -> ( - (* When amount is not none, we move the amount from source to receiver -- unless it's a failure, we will capture that below *) - match t.amount with - | Some amount -> - [ {Op.label= `Payment_source_dec amount; related_to= None} - ; { Op.label= `Payment_receiver_inc amount - ; related_to= Some (`Payment_source_dec amount) } ] - | None -> - [] ) + (* When amount is not none, we move the amount from source to receiver -- unless it's a failure, we will capture that below *) + match t.amount with + | Some amount -> + [ { Op.label = `Payment_source_dec amount; related_to = None } + ; { Op.label = `Payment_receiver_inc amount + ; related_to = Some (`Payment_source_dec amount) + } + ] + | None -> + [] ) | `Delegation -> - [{Op.label= `Delegate_change; related_to= None}] + [ { Op.label = `Delegate_change; related_to = None } ] | `Create_token -> - [{Op.label= `Create_token; related_to= None}] + [ { Op.label = `Create_token; related_to = None } ] | `Create_token_account -> [] (* Covered by account creation fee *) | `Mint_tokens -> ( - (* When amount is not none, the amount goes to receiver's account *) - match t.amount with - | Some amount -> - [{Op.label= `Mint_tokens amount; related_to= None}] - | None -> - [] ) + (* When amount is not none, the amount goes to receiver's account *) + match t.amount with + | Some amount -> + [ { Op.label = `Mint_tokens amount; related_to = None } ] + | None -> + [] ) in Op.build ~a_eq: @@ -680,7 +698,7 @@ let to_operations ~failure_status (t : Partial.t) : Operation.t list = | _, Some (`Applied _) -> (`Success, None, false) | _, Some (`Failed reason) -> - (`Failed, Some (`Assoc [("reason", `String reason)]), true) + (`Failed, Some (`Assoc [ ("reason", `String reason) ]), true) in let pending_or_success_only = function | `Pending -> @@ -703,111 +721,121 @@ let to_operations ~failure_status (t : Partial.t) : Operation.t list = | `Fee_payer_dec -> { Operation.operation_identifier ; related_operations - ; status= - Some - (status |> pending_or_success_only |> Operation_statuses.name) - ; account= Some (account_id t.fee_payer t.fee_token) - ; _type= Operation_types.name `Fee_payer_dec - ; amount= Some Amount_of.(negated @@ token t.fee_token t.fee) - ; coin_change= None - ; metadata } + ; status = + Some (status |> pending_or_success_only |> Operation_statuses.name) + ; account = Some (account_id t.fee_payer t.fee_token) + ; _type = Operation_types.name `Fee_payer_dec + ; amount = Some Amount_of.(negated @@ token t.fee_token t.fee) + ; coin_change = None + ; metadata + } | `Payment_source_dec amount -> { Operation.operation_identifier ; related_operations - ; status= Some (Operation_statuses.name status) - ; account= Some (account_id t.source t.token) - ; _type= Operation_types.name `Payment_source_dec - ; amount= + ; status = Some (Operation_statuses.name status) + ; account = Some (account_id t.source t.token) + ; _type = Operation_types.name `Payment_source_dec + ; amount = ( if did_fail then None else Some Amount_of.(negated @@ token t.token amount) ) - ; coin_change= None - ; metadata } + ; coin_change = None + ; metadata + } | `Payment_receiver_inc amount -> { Operation.operation_identifier ; related_operations - ; status= Some (Operation_statuses.name status) - ; account= Some (account_id t.receiver t.token) - ; _type= Operation_types.name `Payment_receiver_inc - ; amount= + ; status = Some (Operation_statuses.name status) + ; account = Some (account_id t.receiver t.token) + ; _type = Operation_types.name `Payment_receiver_inc + ; amount = (if did_fail then None else Some (Amount_of.token t.token amount)) - ; coin_change= None - ; metadata } + ; coin_change = None + ; metadata + } | `Account_creation_fee_via_payment account_creation_fee -> { Operation.operation_identifier ; related_operations - ; status= Some (Operation_statuses.name status) - ; account= Some (account_id t.receiver t.token) - ; _type= Operation_types.name `Account_creation_fee_via_payment - ; amount= Some Amount_of.(negated @@ coda account_creation_fee) - ; coin_change= None - ; metadata } + ; status = Some (Operation_statuses.name status) + ; account = Some (account_id t.receiver t.token) + ; _type = Operation_types.name `Account_creation_fee_via_payment + ; amount = Some Amount_of.(negated @@ coda account_creation_fee) + ; coin_change = None + ; metadata + } | `Account_creation_fee_via_fee_payer account_creation_fee -> { Operation.operation_identifier ; related_operations - ; status= Some (Operation_statuses.name status) - ; account= Some (account_id t.fee_payer t.fee_token) - ; _type= Operation_types.name `Account_creation_fee_via_fee_payer - ; amount= Some Amount_of.(negated @@ coda account_creation_fee) - ; coin_change= None - ; metadata } + ; status = Some (Operation_statuses.name status) + ; account = Some (account_id t.fee_payer t.fee_token) + ; _type = Operation_types.name `Account_creation_fee_via_fee_payer + ; amount = Some Amount_of.(negated @@ coda account_creation_fee) + ; coin_change = None + ; metadata + } | `Create_token -> { Operation.operation_identifier ; related_operations - ; status= Some (Operation_statuses.name status) - ; account= None - ; _type= Operation_types.name `Create_token - ; amount= None - ; coin_change= None - ; metadata } + ; status = Some (Operation_statuses.name status) + ; account = None + ; _type = Operation_types.name `Create_token + ; amount = None + ; coin_change = None + ; metadata + } | `Delegate_change -> { Operation.operation_identifier ; related_operations - ; status= Some (Operation_statuses.name status) - ; account= Some (account_id t.source Amount_of.Token_id.default) - ; _type= Operation_types.name `Delegate_change - ; amount= None - ; coin_change= None - ; metadata= + ; status = Some (Operation_statuses.name status) + ; account = Some (account_id t.source Amount_of.Token_id.default) + ; _type = Operation_types.name `Delegate_change + ; amount = None + ; coin_change = None + ; metadata = merge_metadata metadata (Some (`Assoc [ ( "delegate_change_target" , `String (let (`Pk r) = t.receiver in - r) ) ])) } + r) ) + ])) + } | `Mint_tokens amount -> { Operation.operation_identifier ; related_operations - ; status= Some (Operation_statuses.name status) - ; account= Some (account_id t.receiver t.token) - ; _type= Operation_types.name `Mint_tokens - ; amount= Some (Amount_of.token t.token amount) - ; coin_change= None - ; metadata= + ; status = Some (Operation_statuses.name status) + ; account = Some (account_id t.receiver t.token) + ; _type = Operation_types.name `Mint_tokens + ; amount = Some (Amount_of.token t.token amount) + ; coin_change = None + ; metadata = merge_metadata metadata (Some (`Assoc [ ( "token_owner_pk" , `String (let (`Pk r) = t.source in - r) ) ])) } ) + r) ) + ])) + }) let to_operations' (t : t) : Operation.t list = to_operations ~failure_status:t.failure_status (forget t) let%test_unit "payment_round_trip" = let start = - { kind= `Payment (* default token *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= Some (Unsigned.UInt64.of_int 2_000_000_000) - ; failure_status= None - ; hash= "TXN_1_HASH" } + { kind = `Payment (* default token *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = Some (Unsigned.UInt64.of_int 2_000_000_000) + ; failure_status = None + ; hash = "TXN_1_HASH" + } in let ops = to_operations' start in match of_operations ops with @@ -818,17 +846,18 @@ let%test_unit "payment_round_trip" = let%test_unit "delegation_round_trip" = let start = - { kind= `Delegation - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 1_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 42 - ; amount= None - ; failure_status= None - ; hash= "TXN_2_HASH" } + { kind = `Delegation + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 1_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 42 + ; amount = None + ; failure_status = None + ; hash = "TXN_2_HASH" + } in let ops = to_operations' start in match of_operations ops with @@ -838,121 +867,132 @@ let%test_unit "delegation_round_trip" = failwithf !"Mismatch because %{sexp: Partial.Reason.t list}" e () let dummies = - [ { kind= `Payment (* default token *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= Some (Unsigned.UInt64.of_int 2_000_000_000) - ; failure_status= Some (`Applied Account_creation_fees_paid.By_no_one) - ; hash= "TXN_1_HASH" } - ; { kind= `Payment (* new account created *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= Some (Unsigned.UInt64.of_int 2_000_000_000) - ; failure_status= + [ { kind = `Payment (* default token *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = Some (Unsigned.UInt64.of_int 2_000_000_000) + ; failure_status = Some (`Applied Account_creation_fees_paid.By_no_one) + ; hash = "TXN_1_HASH" + } + ; { kind = `Payment (* new account created *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = Some (Unsigned.UInt64.of_int 2_000_000_000) + ; failure_status = Some (`Applied (Account_creation_fees_paid.By_receiver (Unsigned.UInt64.of_int 1_000_000))) - ; hash= "TXN_1new_HASH" } - ; { kind= `Payment (* failed payment *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= Some (Unsigned.UInt64.of_int 2_000_000_000) - ; failure_status= Some (`Failed "Failure") - ; hash= "TXN_1fail_HASH" } - ; { kind= `Payment (* custom token *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 3 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= Some (Unsigned.UInt64.of_int 2_000_000_000) - ; failure_status= Some (`Applied Account_creation_fees_paid.By_no_one) - ; hash= "TXN_1a_HASH" } - ; { kind= `Payment (* custom fee-token *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 3 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= Some (Unsigned.UInt64.of_int 2_000_000_000) - ; failure_status= Some (`Applied Account_creation_fees_paid.By_no_one) - ; hash= "TXN_1b_HASH" } - ; { kind= `Delegation - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= None - ; failure_status= Some (`Applied Account_creation_fees_paid.By_no_one) - ; hash= "TXN_2_HASH" } - ; { kind= `Create_token (* no new account *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= None - ; failure_status= Some (`Applied Account_creation_fees_paid.By_no_one) - ; hash= "TXN_3a_HASH" } - ; { kind= `Create_token (* new account fee *) - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= None - ; failure_status= + ; hash = "TXN_1new_HASH" + } + ; { kind = `Payment (* failed payment *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = Some (Unsigned.UInt64.of_int 2_000_000_000) + ; failure_status = Some (`Failed "Failure") + ; hash = "TXN_1fail_HASH" + } + ; { kind = `Payment (* custom token *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 3 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = Some (Unsigned.UInt64.of_int 2_000_000_000) + ; failure_status = Some (`Applied Account_creation_fees_paid.By_no_one) + ; hash = "TXN_1a_HASH" + } + ; { kind = `Payment (* custom fee-token *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 3 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = Some (Unsigned.UInt64.of_int 2_000_000_000) + ; failure_status = Some (`Applied Account_creation_fees_paid.By_no_one) + ; hash = "TXN_1b_HASH" + } + ; { kind = `Delegation + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = None + ; failure_status = Some (`Applied Account_creation_fees_paid.By_no_one) + ; hash = "TXN_2_HASH" + } + ; { kind = `Create_token (* no new account *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = None + ; failure_status = Some (`Applied Account_creation_fees_paid.By_no_one) + ; hash = "TXN_3a_HASH" + } + ; { kind = `Create_token (* new account fee *) + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = None + ; failure_status = Some (`Applied (Account_creation_fees_paid.By_fee_payer (Unsigned.UInt64.of_int 3_000))) - ; hash= "TXN_3b_HASH" } - ; { kind= `Create_token_account - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 1 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= None - ; failure_status= Some (`Applied Account_creation_fees_paid.By_no_one) - ; hash= "TXN_4_HASH" } - ; { kind= `Mint_tokens - ; fee_payer= `Pk "Alice" - ; source= `Pk "Alice" - ; token= Unsigned.UInt64.of_int 10 - ; fee= Unsigned.UInt64.of_int 2_000_000_000 - ; receiver= `Pk "Bob" - ; fee_token= Unsigned.UInt64.of_int 1 - ; nonce= Unsigned.UInt32.of_int 3 - ; amount= Some (Unsigned.UInt64.of_int 30_000) - ; failure_status= Some (`Applied Account_creation_fees_paid.By_no_one) - ; hash= "TXN_5_HASH" } ] + ; hash = "TXN_3b_HASH" + } + ; { kind = `Create_token_account + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 1 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = None + ; failure_status = Some (`Applied Account_creation_fees_paid.By_no_one) + ; hash = "TXN_4_HASH" + } + ; { kind = `Mint_tokens + ; fee_payer = `Pk "Alice" + ; source = `Pk "Alice" + ; token = Unsigned.UInt64.of_int 10 + ; fee = Unsigned.UInt64.of_int 2_000_000_000 + ; receiver = `Pk "Bob" + ; fee_token = Unsigned.UInt64.of_int 1 + ; nonce = Unsigned.UInt32.of_int 3 + ; amount = Some (Unsigned.UInt64.of_int 30_000) + ; failure_status = Some (`Applied Account_creation_fees_paid.By_no_one) + ; hash = "TXN_5_HASH" + } + ] diff --git a/src/lib/rosetta_lib/validation.ml b/src/lib/rosetta_lib/validation.ml index dc862a23718..1ec53929446 100644 --- a/src/lib/rosetta_lib/validation.ml +++ b/src/lib/rosetta_lib/validation.ml @@ -8,7 +8,7 @@ module T = struct let return a = Result.return a - let fail e = Result.fail [e] + let fail e = Result.fail [ e ] let apply ft t = match (ft, t) with diff --git a/src/lib/rosetta_models/account_balance_request.ml b/src/lib/rosetta_models/account_balance_request.ml index 031649c62a3..54c5bbea7f7 100644 --- a/src/lib/rosetta_models/account_balance_request.ml +++ b/src/lib/rosetta_models/account_balance_request.ml @@ -7,17 +7,19 @@ *) type t = - { network_identifier: Network_identifier.t - ; account_identifier: Account_identifier.t - ; block_identifier: Partial_block_identifier.t option [@default None] + { network_identifier : Network_identifier.t + ; account_identifier : Account_identifier.t + ; block_identifier : Partial_block_identifier.t option [@default None] ; (* In some cases, the caller may not want to retrieve all available balances for an AccountIdentifier. If the currencies field is populated, only balances for the specified currencies will be returned. If not populated, all available balances will be returned. *) - currencies: Currency.t list } -[@@deriving yojson {strict= false}, show] + currencies : Currency.t list + } +[@@deriving yojson { strict = false }, show] (** An AccountBalanceRequest is utilized to make a balance request on the /account/balance endpoint. If the block_identifier is populated, a historical balance query should be performed. *) let create (network_identifier : Network_identifier.t) (account_identifier : Account_identifier.t) : t = { network_identifier ; account_identifier - ; block_identifier= None - ; currencies= [] } + ; block_identifier = None + ; currencies = [] + } diff --git a/src/lib/rosetta_models/account_balance_response.ml b/src/lib/rosetta_models/account_balance_response.ml index f8b2c5e3e9e..c70c1a781ac 100644 --- a/src/lib/rosetta_models/account_balance_response.ml +++ b/src/lib/rosetta_models/account_balance_response.ml @@ -7,14 +7,15 @@ *) type t = - { block_identifier: Block_identifier.t + { block_identifier : Block_identifier.t ; (* A single account may have a balance in multiple currencies. *) - balances: Amount.t list + balances : Amount.t list ; (* Account-based blockchains that utilize a nonce or sequence number should include that number in the metadata. This number could be unique to the identifier or global across the account address. *) - metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** An AccountBalanceResponse is returned on the /account/balance endpoint. If an account has a balance for each AccountIdentifier describing it (ex: an ERC-20 token balance on a few smart contracts), an account balance request must be made with each AccountIdentifier. The `coins` field was removed and replaced by by `/account/coins` in `v1.4.7`. *) let create (block_identifier : Block_identifier.t) (balances : Amount.t list) : t = - {block_identifier; balances; metadata= None} + { block_identifier; balances; metadata = None } diff --git a/src/lib/rosetta_models/account_coins_request.ml b/src/lib/rosetta_models/account_coins_request.ml index d491f0e906d..c8276928aa8 100644 --- a/src/lib/rosetta_models/account_coins_request.ml +++ b/src/lib/rosetta_models/account_coins_request.ml @@ -7,15 +7,16 @@ *) type t = - { network_identifier: Network_identifier.t - ; account_identifier: Account_identifier.t + { network_identifier : Network_identifier.t + ; account_identifier : Account_identifier.t ; (* Include state from the mempool when looking up an account's unspent coins. Note, using this functionality breaks any guarantee of idempotency. *) - include_mempool: bool + include_mempool : bool ; (* In some cases, the caller may not want to retrieve coins for all currencies for an AccountIdentifier. If the currencies field is populated, only coins for the specified currencies will be returned. If not populated, all unspent coins will be returned. *) - currencies: Currency.t list } -[@@deriving yojson {strict= false}, show] + currencies : Currency.t list + } +[@@deriving yojson { strict = false }, show] (** AccountCoinsRequest is utilized to make a request on the /account/coins endpoint. *) let create (network_identifier : Network_identifier.t) (account_identifier : Account_identifier.t) (include_mempool : bool) : t = - {network_identifier; account_identifier; include_mempool; currencies= []} + { network_identifier; account_identifier; include_mempool; currencies = [] } diff --git a/src/lib/rosetta_models/account_coins_response.ml b/src/lib/rosetta_models/account_coins_response.ml index edb85e03a71..aa2f7032761 100644 --- a/src/lib/rosetta_models/account_coins_response.ml +++ b/src/lib/rosetta_models/account_coins_response.ml @@ -7,13 +7,14 @@ *) type t = - { block_identifier: Block_identifier.t + { block_identifier : Block_identifier.t ; (* If a blockchain is UTXO-based, all unspent Coins owned by an account_identifier should be returned alongside the balance. It is highly recommended to populate this field so that users of the Rosetta API implementation don't need to maintain their own indexer to track their UTXOs. *) - coins: Coin.t list + coins : Coin.t list ; (* Account-based blockchains that utilize a nonce or sequence number should include that number in the metadata. This number could be unique to the identifier or global across the account address. *) - metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** AccountCoinsResponse is returned on the /account/coins endpoint and includes all unspent Coins owned by an AccountIdentifier. *) let create (block_identifier : Block_identifier.t) (coins : Coin.t list) : t = - {block_identifier; coins; metadata= None} + { block_identifier; coins; metadata = None } diff --git a/src/lib/rosetta_models/account_identifier.ml b/src/lib/rosetta_models/account_identifier.ml index f2f555620e8..c3da01df1f7 100644 --- a/src/lib/rosetta_models/account_identifier.ml +++ b/src/lib/rosetta_models/account_identifier.ml @@ -8,11 +8,13 @@ type t = { (* The address may be a cryptographic public key (or some encoding of it) or a provided username. *) - address: string - ; sub_account: Sub_account_identifier.t option [@default None] + address : string + ; sub_account : Sub_account_identifier.t option [@default None] ; (* Blockchains that utilize a username model (where the address is not a derivative of a cryptographic public key) should specify the public key(s) owned by the address in metadata. *) - metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show, eq] + metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show, eq] (** The account_identifier uniquely identifies an account within a network. All fields in the account_identifier are utilized to determine this uniqueness (including the metadata field, if populated). *) -let create (address : string) : t = {address; sub_account= None; metadata= None} +let create (address : string) : t = + { address; sub_account = None; metadata = None } diff --git a/src/lib/rosetta_models/allow.ml b/src/lib/rosetta_models/allow.ml index 160f6932ec3..4abf5b8db73 100644 --- a/src/lib/rosetta_models/allow.ml +++ b/src/lib/rosetta_models/allow.ml @@ -8,34 +8,35 @@ type t = { (* All Operation.Status this implementation supports. Any status that is returned during parsing that is not listed here will cause client validation to error. *) - operation_statuses: Operation_status.t list + operation_statuses : Operation_status.t list ; (* All Operation.Type this implementation supports. Any type that is returned during parsing that is not listed here will cause client validation to error. *) - operation_types: string list + operation_types : string list ; (* All Errors that this implementation could return. Any error that is returned during parsing that is not listed here will cause client validation to error. *) - errors: Error.t list + errors : Error.t list ; (* Any Rosetta implementation that supports querying the balance of an account at any height in the past should set this to true. *) - historical_balance_lookup: bool + historical_balance_lookup : bool ; (* If populated, `timestamp_start_index` indicates the first block index where block timestamps are considered valid (i.e. all blocks less than `timestamp_start_index` could have invalid timestamps). This is useful when the genesis block (or blocks) of a network have timestamp 0. If not populated, block timestamps are assumed to be valid for all available blocks. *) - timestamp_start_index: int64 option [@default None] + timestamp_start_index : int64 option [@default None] ; (* All methods that are supported by the /call endpoint. Communicating which parameters should be provided to /call is the responsibility of the implementer (this is en lieu of defining an entire type system and requiring the implementer to define that in Allow). *) - call_methods: string list + call_methods : string list ; (* BalanceExemptions is an array of BalanceExemption indicating which account balances could change without a corresponding Operation. BalanceExemptions should be used sparingly as they may introduce significant complexity for integrators that attempt to reconcile all account balance changes. If your implementation relies on any BalanceExemptions, you MUST implement historical balance lookup (the ability to query an account balance at any BlockIdentifier). *) - balance_exemptions: Balance_exemption.t list + balance_exemptions : Balance_exemption.t list ; (* Any Rosetta implementation that can update an AccountIdentifier's unspent coins based on the contents of the mempool should populate this field as true. If false, requests to `/account/coins` that set `include_mempool` as true will be automatically rejected. *) - mempool_coins: bool } -[@@deriving yojson {strict= false}, show] + mempool_coins : bool + } +[@@deriving yojson { strict = false }, show] (** Allow specifies supported Operation status, Operation types, and all possible error statuses. This Allow object is used by clients to validate the correctness of a Rosetta Server implementation. It is expected that these clients will error if they receive some response that contains any of the above information that is not specified here. *) let create (operation_statuses : Operation_status.t list) (operation_types : string list) (errors : Error.t list) (historical_balance_lookup : bool) (call_methods : string list) - (balance_exemptions : Balance_exemption.t list) (mempool_coins : bool) : t - = + (balance_exemptions : Balance_exemption.t list) (mempool_coins : bool) : t = { operation_statuses ; operation_types ; errors ; historical_balance_lookup - ; timestamp_start_index= None + ; timestamp_start_index = None ; call_methods ; balance_exemptions - ; mempool_coins } + ; mempool_coins + } diff --git a/src/lib/rosetta_models/amount.ml b/src/lib/rosetta_models/amount.ml index 0c4ab2a8a18..8c5b7670f7c 100644 --- a/src/lib/rosetta_models/amount.ml +++ b/src/lib/rosetta_models/amount.ml @@ -8,11 +8,12 @@ type t = { (* Value of the transaction in atomic units represented as an arbitrary-sized signed integer. For example, 1 BTC would be represented by a value of 100000000. *) - value: string - ; currency: Currency.t - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show, eq] + value : string + ; currency : Currency.t + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show, eq] (** Amount is some Value of a Currency. It is considered invalid to specify a Value without a Currency. *) let create (value : string) (currency : Currency.t) : t = - {value; currency; metadata= None} + { value; currency; metadata = None } diff --git a/src/lib/rosetta_models/balance_exemption.ml b/src/lib/rosetta_models/balance_exemption.ml index b56500b3e70..9630c9472d8 100644 --- a/src/lib/rosetta_models/balance_exemption.ml +++ b/src/lib/rosetta_models/balance_exemption.ml @@ -8,11 +8,12 @@ type t = { (* SubAccountAddress is the SubAccountIdentifier.Address that the BalanceExemption applies to (regardless of the value of SubAccountIdentifier.Metadata). *) - sub_account_address: string option [@default None] - ; currency: Currency.t option [@default None] - ; exemption_type: Enums.exemptiontype option [@default None] } -[@@deriving yojson {strict= false}, show] + sub_account_address : string option [@default None] + ; currency : Currency.t option [@default None] + ; exemption_type : Enums.exemptiontype option [@default None] + } +[@@deriving yojson { strict = false }, show] (** BalanceExemption indicates that the balance for an exempt account could change without a corresponding Operation. This typically occurs with staking rewards, vesting balances, and Currencies with a dynamic supply. Currently, it is possible to exempt an account from strict reconciliation by SubAccountIdentifier.Address or by Currency. This means that any account with SubAccountIdentifier.Address would be exempt or any balance of a particular Currency would be exempt, respectively. BalanceExemptions should be used sparingly as they may introduce significant complexity for integrators that attempt to reconcile all account balance changes. If your implementation relies on any BalanceExemptions, you MUST implement historical balance lookup (the ability to query an account balance at any BlockIdentifier). *) let create () : t = - {sub_account_address= None; currency= None; exemption_type= None} + { sub_account_address = None; currency = None; exemption_type = None } diff --git a/src/lib/rosetta_models/block.ml b/src/lib/rosetta_models/block.ml index 5fe0d5e4e4c..fa49b35bdd0 100644 --- a/src/lib/rosetta_models/block.ml +++ b/src/lib/rosetta_models/block.ml @@ -7,12 +7,13 @@ *) type t = - { block_identifier: Block_identifier.t - ; parent_block_identifier: Block_identifier.t - ; timestamp: Timestamp.t - ; transactions: Transaction.t list - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + { block_identifier : Block_identifier.t + ; parent_block_identifier : Block_identifier.t + ; timestamp : Timestamp.t + ; transactions : Transaction.t list + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** Blocks contain an array of Transactions that occurred at a particular BlockIdentifier. A hard requirement for blocks returned by Rosetta implementations is that they MUST be _inalterable_: once a client has requested and received a block identified by a specific BlockIndentifier, all future calls for that same BlockIdentifier must return the same block contents. *) let create (block_identifier : Block_identifier.t) @@ -22,4 +23,5 @@ let create (block_identifier : Block_identifier.t) ; parent_block_identifier ; timestamp ; transactions - ; metadata= None } + ; metadata = None + } diff --git a/src/lib/rosetta_models/block_event.ml b/src/lib/rosetta_models/block_event.ml index 2ffe2c28d9f..944f2911377 100644 --- a/src/lib/rosetta_models/block_event.ml +++ b/src/lib/rosetta_models/block_event.ml @@ -8,12 +8,13 @@ type t = { (* sequence is the unique identifier of a BlockEvent within the context of a NetworkIdentifier. *) - sequence: int64 - ; block_identifier: Block_identifier.t - ; _type: Enums.blockeventtype } -[@@deriving yojson {strict= false}, show] + sequence : int64 + ; block_identifier : Block_identifier.t + ; _type : Enums.blockeventtype + } +[@@deriving yojson { strict = false }, show] (** BlockEvent represents the addition or removal of a BlockIdentifier from storage. Streaming BlockEvents allows lightweight clients to update their own state without needing to implement their own syncing logic. *) let create (sequence : int64) (block_identifier : Block_identifier.t) (_type : Enums.blockeventtype) : t = - {sequence; block_identifier; _type} + { sequence; block_identifier; _type } diff --git a/src/lib/rosetta_models/block_identifier.ml b/src/lib/rosetta_models/block_identifier.ml index d16c2642372..e9fc31e6e83 100644 --- a/src/lib/rosetta_models/block_identifier.ml +++ b/src/lib/rosetta_models/block_identifier.ml @@ -7,8 +7,8 @@ *) type t = - {(* This is also known as the block height. *) index: int64; hash: string} -[@@deriving yojson {strict= false}, show] + { (* This is also known as the block height. *) index : int64; hash : string } +[@@deriving yojson { strict = false }, show] (** The block_identifier uniquely identifies a block in a particular network. *) -let create (index : int64) (hash : string) : t = {index; hash} +let create (index : int64) (hash : string) : t = { index; hash } diff --git a/src/lib/rosetta_models/block_request.ml b/src/lib/rosetta_models/block_request.ml index a9964f805e9..7ee2efb0cca 100644 --- a/src/lib/rosetta_models/block_request.ml +++ b/src/lib/rosetta_models/block_request.ml @@ -7,11 +7,12 @@ *) type t = - { network_identifier: Network_identifier.t - ; block_identifier: Partial_block_identifier.t } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; block_identifier : Partial_block_identifier.t + } +[@@deriving yojson { strict = false }, show] (** A BlockRequest is utilized to make a block request on the /block endpoint. *) let create (network_identifier : Network_identifier.t) (block_identifier : Partial_block_identifier.t) : t = - {network_identifier; block_identifier} + { network_identifier; block_identifier } diff --git a/src/lib/rosetta_models/block_response.ml b/src/lib/rosetta_models/block_response.ml index 8b17929504f..9ff9986ce71 100644 --- a/src/lib/rosetta_models/block_response.ml +++ b/src/lib/rosetta_models/block_response.ml @@ -7,10 +7,11 @@ *) type t = - { block: Block.t option [@default None] + { block : Block.t option [@default None] ; (* Some blockchains may require additional transactions to be fetched that weren't returned in the block response (ex: block only returns transaction hashes). For blockchains with a lot of transactions in each block, this can be very useful as consumers can concurrently fetch all transactions returned. *) - other_transactions: Transaction_identifier.t list } -[@@deriving yojson {strict= false}, show] + other_transactions : Transaction_identifier.t list + } +[@@deriving yojson { strict = false }, show] (** A BlockResponse includes a fully-populated block or a partially-populated block with a list of other transactions to fetch (other_transactions). As a result of the consensus algorithm of some blockchains, blocks can be omitted (i.e. certain block indexes can be skipped). If a query for one of these omitted indexes is made, the response should not include a `Block` object. It is VERY important to note that blocks MUST still form a canonical, connected chain of blocks where each block has a unique index. In other words, the `PartialBlockIdentifier` of a block after an omitted block should reference the last non-omitted block. *) -let create () : t = {block= None; other_transactions= []} +let create () : t = { block = None; other_transactions = [] } diff --git a/src/lib/rosetta_models/block_transaction.ml b/src/lib/rosetta_models/block_transaction.ml index 083d9707a8d..22bf3ee93e8 100644 --- a/src/lib/rosetta_models/block_transaction.ml +++ b/src/lib/rosetta_models/block_transaction.ml @@ -6,10 +6,10 @@ * Schema Block_transaction.t : BlockTransaction contains a populated Transaction and the BlockIdentifier that contains it. *) -type t = {block_identifier: Block_identifier.t; transaction: Transaction.t} -[@@deriving yojson {strict= false}, show] +type t = { block_identifier : Block_identifier.t; transaction : Transaction.t } +[@@deriving yojson { strict = false }, show] (** BlockTransaction contains a populated Transaction and the BlockIdentifier that contains it. *) -let create (block_identifier : Block_identifier.t) - (transaction : Transaction.t) : t = - {block_identifier; transaction} +let create (block_identifier : Block_identifier.t) (transaction : Transaction.t) + : t = + { block_identifier; transaction } diff --git a/src/lib/rosetta_models/block_transaction_request.ml b/src/lib/rosetta_models/block_transaction_request.ml index c1366dcd4b3..450df7210db 100644 --- a/src/lib/rosetta_models/block_transaction_request.ml +++ b/src/lib/rosetta_models/block_transaction_request.ml @@ -7,13 +7,14 @@ *) type t = - { network_identifier: Network_identifier.t - ; block_identifier: Block_identifier.t - ; transaction_identifier: Transaction_identifier.t } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; block_identifier : Block_identifier.t + ; transaction_identifier : Transaction_identifier.t + } +[@@deriving yojson { strict = false }, show] (** A BlockTransactionRequest is used to fetch a Transaction included in a block that is not returned in a BlockResponse. *) let create (network_identifier : Network_identifier.t) (block_identifier : Block_identifier.t) (transaction_identifier : Transaction_identifier.t) : t = - {network_identifier; block_identifier; transaction_identifier} + { network_identifier; block_identifier; transaction_identifier } diff --git a/src/lib/rosetta_models/block_transaction_response.ml b/src/lib/rosetta_models/block_transaction_response.ml index 79a4baf57ce..1a1065af8b8 100644 --- a/src/lib/rosetta_models/block_transaction_response.ml +++ b/src/lib/rosetta_models/block_transaction_response.ml @@ -6,7 +6,8 @@ * Schema Block_transaction_response.t : A BlockTransactionResponse contains information about a block transaction. *) -type t = {transaction: Transaction.t} [@@deriving yojson {strict= false}, show] +type t = { transaction : Transaction.t } +[@@deriving yojson { strict = false }, show] (** A BlockTransactionResponse contains information about a block transaction. *) -let create (transaction : Transaction.t) : t = {transaction} +let create (transaction : Transaction.t) : t = { transaction } diff --git a/src/lib/rosetta_models/call_request.ml b/src/lib/rosetta_models/call_request.ml index a8d8fb79933..b9699198eaa 100644 --- a/src/lib/rosetta_models/call_request.ml +++ b/src/lib/rosetta_models/call_request.ml @@ -7,14 +7,15 @@ *) type t = - { network_identifier: Network_identifier.t + { network_identifier : Network_identifier.t ; (* Method is some network-specific procedure call. This method could map to a network-specific RPC endpoint, a method in an SDK generated from a smart contract, or some hybrid of the two. The implementation must define all available methods in the Allow object. However, it is up to the caller to determine which parameters to provide when invoking `/call`. *) - _method: string + _method : string ; (* Parameters is some network-specific argument for a method. It is up to the caller to determine which parameters to provide when invoking `/call`. *) - parameters: Yojson.Safe.t } -[@@deriving yojson {strict= false}, show] + parameters : Yojson.Safe.t + } +[@@deriving yojson { strict = false }, show] (** CallRequest is the input to the `/call` endpoint. *) let create (network_identifier : Network_identifier.t) (_method : string) (parameters : Yojson.Safe.t) : t = - {network_identifier; _method; parameters} + { network_identifier; _method; parameters } diff --git a/src/lib/rosetta_models/call_response.ml b/src/lib/rosetta_models/call_response.ml index a31ef347b93..3e9afa488d3 100644 --- a/src/lib/rosetta_models/call_response.ml +++ b/src/lib/rosetta_models/call_response.ml @@ -8,11 +8,12 @@ type t = { (* Result contains the result of the `/call` invocation. This result will not be inspected or interpreted by Rosetta tooling and is left to the caller to decode. *) - _result: Yojson.Safe.t + _result : Yojson.Safe.t ; (* Idempotent indicates that if `/call` is invoked with the same CallRequest again, at any point in time, it will return the same CallResponse. Integrators may cache the CallResponse if this is set to true to avoid making unnecessary calls to the Rosetta implementation. For this reason, implementers should be very conservative about returning true here or they could cause issues for the caller. *) - idempotent: bool } -[@@deriving yojson {strict= false}, show] + idempotent : bool + } +[@@deriving yojson { strict = false }, show] (** CallResponse contains the result of a `/call` invocation. *) let create (_result : Yojson.Safe.t) (idempotent : bool) : t = - {_result; idempotent} + { _result; idempotent } diff --git a/src/lib/rosetta_models/coin.ml b/src/lib/rosetta_models/coin.ml index 5cbf8d12de0..e3644ec2d4c 100644 --- a/src/lib/rosetta_models/coin.ml +++ b/src/lib/rosetta_models/coin.ml @@ -6,9 +6,9 @@ * Schema Coin.t : Coin contains its unique identifier and the amount it represents. *) -type t = {coin_identifier: Coin_identifier.t; amount: Amount.t} -[@@deriving yojson {strict= false}, show] +type t = { coin_identifier : Coin_identifier.t; amount : Amount.t } +[@@deriving yojson { strict = false }, show] (** Coin contains its unique identifier and the amount it represents. *) let create (coin_identifier : Coin_identifier.t) (amount : Amount.t) : t = - {coin_identifier; amount} + { coin_identifier; amount } diff --git a/src/lib/rosetta_models/coin_change.ml b/src/lib/rosetta_models/coin_change.ml index a8f21df600b..423fab472fd 100644 --- a/src/lib/rosetta_models/coin_change.ml +++ b/src/lib/rosetta_models/coin_change.ml @@ -6,10 +6,10 @@ * Schema Coin_change.t : CoinChange is used to represent a change in state of a some coin identified by a coin_identifier. This object is part of the Operation model and must be populated for UTXO-based blockchains. Coincidentally, this abstraction of UTXOs allows for supporting both account-based transfers and UTXO-based transfers on the same blockchain (when a transfer is account-based, don't populate this model). *) -type t = {coin_identifier: Coin_identifier.t; coin_action: Enums.coinaction} -[@@deriving yojson {strict= false}, show, eq] +type t = { coin_identifier : Coin_identifier.t; coin_action : Enums.coinaction } +[@@deriving yojson { strict = false }, show, eq] (** CoinChange is used to represent a change in state of a some coin identified by a coin_identifier. This object is part of the Operation model and must be populated for UTXO-based blockchains. Coincidentally, this abstraction of UTXOs allows for supporting both account-based transfers and UTXO-based transfers on the same blockchain (when a transfer is account-based, don't populate this model). *) let create (coin_identifier : Coin_identifier.t) (coin_action : Enums.coinaction) : t = - {coin_identifier; coin_action} + { coin_identifier; coin_action } diff --git a/src/lib/rosetta_models/coin_identifier.ml b/src/lib/rosetta_models/coin_identifier.ml index 607c6f43ebe..07de2e5d854 100644 --- a/src/lib/rosetta_models/coin_identifier.ml +++ b/src/lib/rosetta_models/coin_identifier.ml @@ -8,8 +8,9 @@ type t = { (* Identifier should be populated with a globally unique identifier of a Coin. In Bitcoin, this identifier would be transaction_hash:index. *) - identifier: string } -[@@deriving yojson {strict= false}, show, eq] + identifier : string + } +[@@deriving yojson { strict = false }, show, eq] (** CoinIdentifier uniquely identifies a Coin. *) -let create (identifier : string) : t = {identifier} +let create (identifier : string) : t = { identifier } diff --git a/src/lib/rosetta_models/construction_combine_request.ml b/src/lib/rosetta_models/construction_combine_request.ml index 69451ce12c2..22a09b18f1a 100644 --- a/src/lib/rosetta_models/construction_combine_request.ml +++ b/src/lib/rosetta_models/construction_combine_request.ml @@ -7,12 +7,13 @@ *) type t = - { network_identifier: Network_identifier.t - ; unsigned_transaction: string - ; signatures: Signature.t list } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; unsigned_transaction : string + ; signatures : Signature.t list + } +[@@deriving yojson { strict = false }, show] (** ConstructionCombineRequest is the input to the `/construction/combine` endpoint. It contains the unsigned transaction blob returned by `/construction/payloads` and all required signatures to create a network transaction. *) let create (network_identifier : Network_identifier.t) (unsigned_transaction : string) (signatures : Signature.t list) : t = - {network_identifier; unsigned_transaction; signatures} + { network_identifier; unsigned_transaction; signatures } diff --git a/src/lib/rosetta_models/construction_combine_response.ml b/src/lib/rosetta_models/construction_combine_response.ml index 0a15d2d533a..32f81c1c9c7 100644 --- a/src/lib/rosetta_models/construction_combine_response.ml +++ b/src/lib/rosetta_models/construction_combine_response.ml @@ -6,7 +6,8 @@ * Schema Construction_combine_response.t : ConstructionCombineResponse is returned by `/construction/combine`. The network payload will be sent directly to the `construction/submit` endpoint. *) -type t = {signed_transaction: string} [@@deriving yojson {strict= false}, show] +type t = { signed_transaction : string } +[@@deriving yojson { strict = false }, show] (** ConstructionCombineResponse is returned by `/construction/combine`. The network payload will be sent directly to the `construction/submit` endpoint. *) -let create (signed_transaction : string) : t = {signed_transaction} +let create (signed_transaction : string) : t = { signed_transaction } diff --git a/src/lib/rosetta_models/construction_derive_request.ml b/src/lib/rosetta_models/construction_derive_request.ml index 99c3270cc62..f4708e8969d 100644 --- a/src/lib/rosetta_models/construction_derive_request.ml +++ b/src/lib/rosetta_models/construction_derive_request.ml @@ -7,12 +7,13 @@ *) type t = - { network_identifier: Network_identifier.t - ; public_key: Public_key.t - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; public_key : Public_key.t + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** ConstructionDeriveRequest is passed to the `/construction/derive` endpoint. Network is provided in the request because some blockchains have different address formats for different networks. Metadata is provided in the request because some blockchains allow for multiple address types (i.e. different address for validators vs normal accounts). *) let create (network_identifier : Network_identifier.t) (public_key : Public_key.t) : t = - {network_identifier; public_key; metadata= None} + { network_identifier; public_key; metadata = None } diff --git a/src/lib/rosetta_models/construction_derive_response.ml b/src/lib/rosetta_models/construction_derive_response.ml index c16f34cb157..10034612bb8 100644 --- a/src/lib/rosetta_models/construction_derive_response.ml +++ b/src/lib/rosetta_models/construction_derive_response.ml @@ -8,10 +8,12 @@ type t = { (* [DEPRECATED by `account_identifier` in `v1.4.4`] Address in network-specific format. *) - address: string option [@default None] - ; account_identifier: Account_identifier.t option [@default None] - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + address : string option [@default None] + ; account_identifier : Account_identifier.t option [@default None] + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** ConstructionDeriveResponse is returned by the `/construction/derive` endpoint. *) -let create () : t = {address= None; account_identifier= None; metadata= None} +let create () : t = + { address = None; account_identifier = None; metadata = None } diff --git a/src/lib/rosetta_models/construction_hash_request.ml b/src/lib/rosetta_models/construction_hash_request.ml index dad5e932be5..053945127df 100644 --- a/src/lib/rosetta_models/construction_hash_request.ml +++ b/src/lib/rosetta_models/construction_hash_request.ml @@ -6,10 +6,11 @@ * Schema Construction_hash_request.t : ConstructionHashRequest is the input to the `/construction/hash` endpoint. *) -type t = {network_identifier: Network_identifier.t; signed_transaction: string} -[@@deriving yojson {strict= false}, show] +type t = + { network_identifier : Network_identifier.t; signed_transaction : string } +[@@deriving yojson { strict = false }, show] (** ConstructionHashRequest is the input to the `/construction/hash` endpoint. *) let create (network_identifier : Network_identifier.t) (signed_transaction : string) : t = - {network_identifier; signed_transaction} + { network_identifier; signed_transaction } diff --git a/src/lib/rosetta_models/construction_hash_response.ml b/src/lib/rosetta_models/construction_hash_response.ml index ec676f0d23d..a0af553a81e 100644 --- a/src/lib/rosetta_models/construction_hash_response.ml +++ b/src/lib/rosetta_models/construction_hash_response.ml @@ -6,7 +6,8 @@ * Schema Construction_hash_response.t : ConstructionHashResponse is the output of the `/construction/hash` endpoint. *) -type t = {transaction_hash: string} [@@deriving yojson {strict= false}, show] +type t = { transaction_hash : string } +[@@deriving yojson { strict = false }, show] (** ConstructionHashResponse is the output of the `/construction/hash` endpoint. *) -let create (transaction_hash : string) : t = {transaction_hash} +let create (transaction_hash : string) : t = { transaction_hash } diff --git a/src/lib/rosetta_models/construction_metadata_request.ml b/src/lib/rosetta_models/construction_metadata_request.ml index 615ed385a3a..4e04e71f36d 100644 --- a/src/lib/rosetta_models/construction_metadata_request.ml +++ b/src/lib/rosetta_models/construction_metadata_request.ml @@ -7,12 +7,13 @@ *) type t = - { network_identifier: Network_identifier.t + { network_identifier : Network_identifier.t ; (* Some blockchains require different metadata for different types of transaction construction (ex: delegation versus a transfer). Instead of requiring a blockchain node to return all possible types of metadata for construction (which may require multiple node fetches), the client can populate an options object to limit the metadata returned to only the subset required. *) - options: Yojson.Safe.t option [@default None] - ; public_keys: Public_key.t list } -[@@deriving yojson {strict= false}, show] + options : Yojson.Safe.t option [@default None] + ; public_keys : Public_key.t list + } +[@@deriving yojson { strict = false }, show] (** A ConstructionMetadataRequest is utilized to get information required to construct a transaction. The Options object used to specify which metadata to return is left purposely unstructured to allow flexibility for implementers. Options is not required in the case that there is network-wide metadata of interest. Optionally, the request can also include an array of PublicKeys associated with the AccountIdentifiers returned in ConstructionPreprocessResponse. *) let create (network_identifier : Network_identifier.t) : t = - {network_identifier; options= None; public_keys= []} + { network_identifier; options = None; public_keys = [] } diff --git a/src/lib/rosetta_models/construction_metadata_response.ml b/src/lib/rosetta_models/construction_metadata_response.ml index ab0075e0bd8..623396f9e92 100644 --- a/src/lib/rosetta_models/construction_metadata_response.ml +++ b/src/lib/rosetta_models/construction_metadata_response.ml @@ -6,8 +6,8 @@ * Schema Construction_metadata_response.t : The ConstructionMetadataResponse returns network-specific metadata used for transaction construction. Optionally, the implementer can return the suggested fee associated with the transaction being constructed. The caller may use this info to adjust the intent of the transaction or to create a transaction with a different account that can pay the suggested fee. Suggested fee is an array in case fee payment must occur in multiple currencies. *) -type t = {metadata: Yojson.Safe.t; suggested_fee: Amount.t list} -[@@deriving yojson {strict= false}, show] +type t = { metadata : Yojson.Safe.t; suggested_fee : Amount.t list } +[@@deriving yojson { strict = false }, show] (** The ConstructionMetadataResponse returns network-specific metadata used for transaction construction. Optionally, the implementer can return the suggested fee associated with the transaction being constructed. The caller may use this info to adjust the intent of the transaction or to create a transaction with a different account that can pay the suggested fee. Suggested fee is an array in case fee payment must occur in multiple currencies. *) -let create (metadata : Yojson.Safe.t) : t = {metadata; suggested_fee= []} +let create (metadata : Yojson.Safe.t) : t = { metadata; suggested_fee = [] } diff --git a/src/lib/rosetta_models/construction_parse_request.ml b/src/lib/rosetta_models/construction_parse_request.ml index f26f42b2c12..f51519cc163 100644 --- a/src/lib/rosetta_models/construction_parse_request.ml +++ b/src/lib/rosetta_models/construction_parse_request.ml @@ -7,14 +7,15 @@ *) type t = - { network_identifier: Network_identifier.t + { network_identifier : Network_identifier.t ; (* Signed is a boolean indicating whether the transaction is signed. *) - signed: bool + signed : bool ; (* This must be either the unsigned transaction blob returned by `/construction/payloads` or the signed transaction blob returned by `/construction/combine`. *) - transaction: string } -[@@deriving yojson {strict= false}, show] + transaction : string + } +[@@deriving yojson { strict = false }, show] (** ConstructionParseRequest is the input to the `/construction/parse` endpoint. It allows the caller to parse either an unsigned or signed transaction. *) let create (network_identifier : Network_identifier.t) (signed : bool) (transaction : string) : t = - {network_identifier; signed; transaction} + { network_identifier; signed; transaction } diff --git a/src/lib/rosetta_models/construction_parse_response.ml b/src/lib/rosetta_models/construction_parse_response.ml index 1222170b79d..d5be4c274f9 100644 --- a/src/lib/rosetta_models/construction_parse_response.ml +++ b/src/lib/rosetta_models/construction_parse_response.ml @@ -7,13 +7,14 @@ *) type t = - { operations: Operation.t list + { operations : Operation.t list ; (* [DEPRECATED by `account_identifier_signers` in `v1.4.4`] All signers (addresses) of a particular transaction. If the transaction is unsigned, it should be empty. *) - signers: string list - ; account_identifier_signers: Account_identifier.t list - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + signers : string list + ; account_identifier_signers : Account_identifier.t list + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** ConstructionParseResponse contains an array of operations that occur in a transaction blob. This should match the array of operations provided to `/construction/preprocess` and `/construction/payloads`. *) let create (operations : Operation.t list) : t = - {operations; signers= []; account_identifier_signers= []; metadata= None} + { operations; signers = []; account_identifier_signers = []; metadata = None } diff --git a/src/lib/rosetta_models/construction_payloads_request.ml b/src/lib/rosetta_models/construction_payloads_request.ml index e707df360e2..f17a48bb349 100644 --- a/src/lib/rosetta_models/construction_payloads_request.ml +++ b/src/lib/rosetta_models/construction_payloads_request.ml @@ -7,13 +7,14 @@ *) type t = - { network_identifier: Network_identifier.t - ; operations: Operation.t list - ; metadata: Yojson.Safe.t option [@default None] - ; public_keys: Public_key.t list } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; operations : Operation.t list + ; metadata : Yojson.Safe.t option [@default None] + ; public_keys : Public_key.t list + } +[@@deriving yojson { strict = false }, show] (** ConstructionPayloadsRequest is the request to `/construction/payloads`. It contains the network, a slice of operations, and arbitrary metadata that was returned by the call to `/construction/metadata`. Optionally, the request can also include an array of PublicKeys associated with the AccountIdentifiers returned in ConstructionPreprocessResponse. *) let create (network_identifier : Network_identifier.t) (operations : Operation.t list) : t = - {network_identifier; operations; metadata= None; public_keys= []} + { network_identifier; operations; metadata = None; public_keys = [] } diff --git a/src/lib/rosetta_models/construction_payloads_response.ml b/src/lib/rosetta_models/construction_payloads_response.ml index 47dd47bbd1a..7e299aa39b3 100644 --- a/src/lib/rosetta_models/construction_payloads_response.ml +++ b/src/lib/rosetta_models/construction_payloads_response.ml @@ -6,10 +6,10 @@ * Schema Construction_payloads_response.t : ConstructionTransactionResponse is returned by `/construction/payloads`. It contains an unsigned transaction blob (that is usually needed to construct the a network transaction from a collection of signatures) and an array of payloads that must be signed by the caller. *) -type t = {unsigned_transaction: string; payloads: Signing_payload.t list} -[@@deriving yojson {strict= false}, show] +type t = { unsigned_transaction : string; payloads : Signing_payload.t list } +[@@deriving yojson { strict = false }, show] (** ConstructionTransactionResponse is returned by `/construction/payloads`. It contains an unsigned transaction blob (that is usually needed to construct the a network transaction from a collection of signatures) and an array of payloads that must be signed by the caller. *) -let create (unsigned_transaction : string) (payloads : Signing_payload.t list) - : t = - {unsigned_transaction; payloads} +let create (unsigned_transaction : string) (payloads : Signing_payload.t list) : + t = + { unsigned_transaction; payloads } diff --git a/src/lib/rosetta_models/construction_preprocess_request.ml b/src/lib/rosetta_models/construction_preprocess_request.ml index 0e42d91c971..d8ab5f3643d 100644 --- a/src/lib/rosetta_models/construction_preprocess_request.ml +++ b/src/lib/rosetta_models/construction_preprocess_request.ml @@ -7,18 +7,20 @@ *) type t = - { network_identifier: Network_identifier.t - ; operations: Operation.t list - ; metadata: Yojson.Safe.t option [@default None] - ; max_fee: Amount.t list - ; suggested_fee_multiplier: float option [@default None] } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; operations : Operation.t list + ; metadata : Yojson.Safe.t option [@default None] + ; max_fee : Amount.t list + ; suggested_fee_multiplier : float option [@default None] + } +[@@deriving yojson { strict = false }, show] (** ConstructionPreprocessRequest is passed to the `/construction/preprocess` endpoint so that a Rosetta implementation can determine which metadata it needs to request for construction. Metadata provided in this object should NEVER be a product of live data (i.e. the caller must follow some network-specific data fetching strategy outside of the Construction API to populate required Metadata). If live data is required for construction, it MUST be fetched in the call to `/construction/metadata`. The caller can provide a max fee they are willing to pay for a transaction. This is an array in the case fees must be paid in multiple currencies. The caller can also provide a suggested fee multiplier to indicate that the suggested fee should be scaled. This may be used to set higher fees for urgent transactions or to pay lower fees when there is less urgency. It is assumed that providing a very low multiplier (like 0.0001) will never lead to a transaction being created with a fee less than the minimum network fee (if applicable). In the case that the caller provides both a max fee and a suggested fee multiplier, the max fee will set an upper bound on the suggested fee (regardless of the multiplier provided). *) let create (network_identifier : Network_identifier.t) (operations : Operation.t list) : t = { network_identifier ; operations - ; metadata= None - ; max_fee= [] - ; suggested_fee_multiplier= None } + ; metadata = None + ; max_fee = [] + ; suggested_fee_multiplier = None + } diff --git a/src/lib/rosetta_models/construction_preprocess_response.ml b/src/lib/rosetta_models/construction_preprocess_response.ml index 999e7edd93c..61c6b01bd72 100644 --- a/src/lib/rosetta_models/construction_preprocess_response.ml +++ b/src/lib/rosetta_models/construction_preprocess_response.ml @@ -8,9 +8,10 @@ type t = { (* The options that will be sent directly to `/construction/metadata` by the caller. *) - options: Yojson.Safe.t option [@default None] - ; required_public_keys: Account_identifier.t list } -[@@deriving yojson {strict= false}, show] + options : Yojson.Safe.t option [@default None] + ; required_public_keys : Account_identifier.t list + } +[@@deriving yojson { strict = false }, show] (** ConstructionPreprocessResponse contains `options` that will be sent unmodified to `/construction/metadata`. If it is not necessary to make a request to `/construction/metadata`, `options` should be omitted. Some blockchains require the PublicKey of particular AccountIdentifiers to construct a valid transaction. To fetch these PublicKeys, populate `required_public_keys` with the AccountIdentifiers associated with the desired PublicKeys. If it is not necessary to retrieve any PublicKeys for construction, `required_public_keys` should be omitted. *) -let create () : t = {options= None; required_public_keys= []} +let create () : t = { options = None; required_public_keys = [] } diff --git a/src/lib/rosetta_models/construction_submit_request.ml b/src/lib/rosetta_models/construction_submit_request.ml index 1b18c1c6e8c..163caf9ebd2 100644 --- a/src/lib/rosetta_models/construction_submit_request.ml +++ b/src/lib/rosetta_models/construction_submit_request.ml @@ -6,10 +6,11 @@ * Schema Construction_submit_request.t : The transaction submission request includes a signed transaction. *) -type t = {network_identifier: Network_identifier.t; signed_transaction: string} -[@@deriving yojson {strict= false}, show] +type t = + { network_identifier : Network_identifier.t; signed_transaction : string } +[@@deriving yojson { strict = false }, show] (** The transaction submission request includes a signed transaction. *) let create (network_identifier : Network_identifier.t) (signed_transaction : string) : t = - {network_identifier; signed_transaction} + { network_identifier; signed_transaction } diff --git a/src/lib/rosetta_models/construction_submit_response.ml b/src/lib/rosetta_models/construction_submit_response.ml index 61539cde581..086e41e7544 100644 --- a/src/lib/rosetta_models/construction_submit_response.ml +++ b/src/lib/rosetta_models/construction_submit_response.ml @@ -7,10 +7,11 @@ *) type t = - { transaction_identifier: Transaction_identifier.t - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + { transaction_identifier : Transaction_identifier.t + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** A TransactionSubmitResponse contains the transaction_identifier of a submitted transaction that was accepted into the mempool. *) let create (transaction_identifier : Transaction_identifier.t) : t = - {transaction_identifier; metadata= None} + { transaction_identifier; metadata = None } diff --git a/src/lib/rosetta_models/currency.ml b/src/lib/rosetta_models/currency.ml index 8b2cfcf8b3b..7e3321a9319 100644 --- a/src/lib/rosetta_models/currency.ml +++ b/src/lib/rosetta_models/currency.ml @@ -8,13 +8,14 @@ type t = { (* Canonical symbol associated with a currency. *) - symbol: string + symbol : string ; (* Number of decimal places in the standard unit representation of the amount. For example, BTC has 8 decimals. Note that it is not possible to represent the value of some currency in atomic units that is not base 10. *) - decimals: int32 + decimals : int32 ; (* Any additional information related to the currency itself. For example, it would be useful to populate this object with the contract address of an ERC-20 token. *) - metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show, eq] + metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show, eq] (** Currency is composed of a canonical Symbol and Decimals. This Decimals value is used to convert an Amount.Value from atomic units (Satoshis) to standard units (Bitcoins). *) let create (symbol : string) (decimals : int32) : t = - {symbol; decimals; metadata= None} + { symbol; decimals; metadata = None } diff --git a/src/lib/rosetta_models/error.ml b/src/lib/rosetta_models/error.ml index 3d00e51d1a6..33752e60ec3 100644 --- a/src/lib/rosetta_models/error.ml +++ b/src/lib/rosetta_models/error.ml @@ -8,17 +8,18 @@ type t = { (* Code is a network-specific error code. If desired, this code can be equivalent to an HTTP status code. *) - code: int32 + code : int32 ; (* Message is a network-specific error message. The message MUST NOT change for a given code. In particular, this means that any contextual information should be included in the details field. *) - message: string + message : string ; (* Description allows the implementer to optionally provide additional information about an error. In many cases, the content of this field will be a copy-and-paste from existing developer documentation. Description can ONLY be populated with generic information about a particular type of error. It MUST NOT be populated with information about a particular instantiation of an error (use `details` for this). Whereas the content of Error.Message should stay stable across releases, the content of Error.Description will likely change across releases (as implementers improve error documentation). For this reason, the content in this field is not part of any type assertion (unlike Error.Message). *) - description: string option [@default None] + description : string option [@default None] ; (* An error is retriable if the same request may succeed if submitted again. *) - retriable: bool + retriable : bool ; (* Often times it is useful to return context specific to the request that caused the error (i.e. a sample of the stack trace or impacted account) in addition to the standard error message. *) - details: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + details : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** Instead of utilizing HTTP status codes to describe node errors (which often do not have a good analog), rich errors are returned using this object. Both the code and message fields can be individually used to correctly identify an error. Implementations MUST use unique values for both fields. *) let create (code : int32) (message : string) (retriable : bool) : t = - {code; message; description= None; retriable; details= None} + { code; message; description = None; retriable; details = None } diff --git a/src/lib/rosetta_models/events_blocks_request.ml b/src/lib/rosetta_models/events_blocks_request.ml index 14feb3e0475..6d60d3eae61 100644 --- a/src/lib/rosetta_models/events_blocks_request.ml +++ b/src/lib/rosetta_models/events_blocks_request.ml @@ -7,13 +7,14 @@ *) type t = - { network_identifier: Network_identifier.t + { network_identifier : Network_identifier.t ; (* offset is the offset into the event stream to sync events from. If this field is not populated, we return the limit events backwards from tip. If this is set to 0, we start from the beginning. *) - offset: int64 option [@default None] + offset : int64 option [@default None] ; (* limit is the maximum number of events to fetch in one call. The implementation may return <= limit events. *) - limit: int64 option [@default None] } -[@@deriving yojson {strict= false}, show] + limit : int64 option [@default None] + } +[@@deriving yojson { strict = false }, show] (** EventsBlocksRequest is utilized to fetch a sequence of BlockEvents indicating which blocks were added and removed from storage to reach the current state. *) let create (network_identifier : Network_identifier.t) : t = - {network_identifier; offset= None; limit= None} + { network_identifier; offset = None; limit = None } diff --git a/src/lib/rosetta_models/events_blocks_response.ml b/src/lib/rosetta_models/events_blocks_response.ml index 589f5030533..841f7abbb70 100644 --- a/src/lib/rosetta_models/events_blocks_response.ml +++ b/src/lib/rosetta_models/events_blocks_response.ml @@ -8,11 +8,12 @@ type t = { (* max_sequence is the maximum available sequence number to fetch. *) - max_sequence: int64 + max_sequence : int64 ; (* events is an array of BlockEvents indicating the order to add and remove blocks to maintain a canonical view of blockchain state. Lightweight clients can use this event stream to update state without implementing their own block syncing logic. *) - events: Block_event.t list } -[@@deriving yojson {strict= false}, show] + events : Block_event.t list + } +[@@deriving yojson { strict = false }, show] (** EventsBlocksResponse contains an ordered collection of BlockEvents and the max retrievable sequence. *) let create (max_sequence : int64) (events : Block_event.t list) : t = - {max_sequence; events} + { max_sequence; events } diff --git a/src/lib/rosetta_models/mempool_response.ml b/src/lib/rosetta_models/mempool_response.ml index 06c49eb14a9..54b5c7c1398 100644 --- a/src/lib/rosetta_models/mempool_response.ml +++ b/src/lib/rosetta_models/mempool_response.ml @@ -6,9 +6,9 @@ * Schema Mempool_response.t : A MempoolResponse contains all transaction identifiers in the mempool for a particular network_identifier. *) -type t = {transaction_identifiers: Transaction_identifier.t list} -[@@deriving yojson {strict= false}, show] +type t = { transaction_identifiers : Transaction_identifier.t list } +[@@deriving yojson { strict = false }, show] (** A MempoolResponse contains all transaction identifiers in the mempool for a particular network_identifier. *) let create (transaction_identifiers : Transaction_identifier.t list) : t = - {transaction_identifiers} + { transaction_identifiers } diff --git a/src/lib/rosetta_models/mempool_transaction_request.ml b/src/lib/rosetta_models/mempool_transaction_request.ml index e23e887c005..b3d6f0004cb 100644 --- a/src/lib/rosetta_models/mempool_transaction_request.ml +++ b/src/lib/rosetta_models/mempool_transaction_request.ml @@ -7,11 +7,12 @@ *) type t = - { network_identifier: Network_identifier.t - ; transaction_identifier: Transaction_identifier.t } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; transaction_identifier : Transaction_identifier.t + } +[@@deriving yojson { strict = false }, show] (** A MempoolTransactionRequest is utilized to retrieve a transaction from the mempool. *) let create (network_identifier : Network_identifier.t) (transaction_identifier : Transaction_identifier.t) : t = - {network_identifier; transaction_identifier} + { network_identifier; transaction_identifier } diff --git a/src/lib/rosetta_models/mempool_transaction_response.ml b/src/lib/rosetta_models/mempool_transaction_response.ml index 2293e08a582..89d42097996 100644 --- a/src/lib/rosetta_models/mempool_transaction_response.ml +++ b/src/lib/rosetta_models/mempool_transaction_response.ml @@ -7,8 +7,10 @@ *) type t = - {transaction: Transaction.t; metadata: Yojson.Safe.t option [@default None]} -[@@deriving yojson {strict= false}, show] + { transaction : Transaction.t + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** A MempoolTransactionResponse contains an estimate of a mempool transaction. It may not be possible to know the full impact of a transaction in the mempool (ex: fee paid). *) -let create (transaction : Transaction.t) : t = {transaction; metadata= None} +let create (transaction : Transaction.t) : t = { transaction; metadata = None } diff --git a/src/lib/rosetta_models/metadata_request.ml b/src/lib/rosetta_models/metadata_request.ml index e80c9963086..524101d277b 100644 --- a/src/lib/rosetta_models/metadata_request.ml +++ b/src/lib/rosetta_models/metadata_request.ml @@ -6,8 +6,8 @@ * Schema Metadata_request.t : A MetadataRequest is utilized in any request where the only argument is optional metadata. *) -type t = {metadata: Yojson.Safe.t option [@default None]} -[@@deriving yojson {strict= false}, show] +type t = { metadata : Yojson.Safe.t option [@default None] } +[@@deriving yojson { strict = false }, show] (** A MetadataRequest is utilized in any request where the only argument is optional metadata. *) -let create () : t = {metadata= None} +let create () : t = { metadata = None } diff --git a/src/lib/rosetta_models/network_identifier.ml b/src/lib/rosetta_models/network_identifier.ml index 8b54385c511..732fb0c6758 100644 --- a/src/lib/rosetta_models/network_identifier.ml +++ b/src/lib/rosetta_models/network_identifier.ml @@ -7,12 +7,13 @@ *) type t = - { blockchain: string + { blockchain : string ; (* If a blockchain has a specific chain-id or network identifier, it should go in this field. It is up to the client to determine which network-specific identifier is mainnet or testnet. *) - network: string - ; sub_network_identifier: Sub_network_identifier.t option [@default None] } -[@@deriving yojson {strict= false}, show] + network : string + ; sub_network_identifier : Sub_network_identifier.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** The network_identifier specifies which network a particular object is associated with. *) let create (blockchain : string) (network : string) : t = - {blockchain; network; sub_network_identifier= None} + { blockchain; network; sub_network_identifier = None } diff --git a/src/lib/rosetta_models/network_list_response.ml b/src/lib/rosetta_models/network_list_response.ml index 7f300a3cbb5..3c02d8250f8 100644 --- a/src/lib/rosetta_models/network_list_response.ml +++ b/src/lib/rosetta_models/network_list_response.ml @@ -6,9 +6,9 @@ * Schema Network_list_response.t : A NetworkListResponse contains all NetworkIdentifiers that the node can serve information for. *) -type t = {network_identifiers: Network_identifier.t list} -[@@deriving yojson {strict= false}, show] +type t = { network_identifiers : Network_identifier.t list } +[@@deriving yojson { strict = false }, show] (** A NetworkListResponse contains all NetworkIdentifiers that the node can serve information for. *) let create (network_identifiers : Network_identifier.t list) : t = - {network_identifiers} + { network_identifiers } diff --git a/src/lib/rosetta_models/network_options_response.ml b/src/lib/rosetta_models/network_options_response.ml index 2e1e919f985..8abb347564f 100644 --- a/src/lib/rosetta_models/network_options_response.ml +++ b/src/lib/rosetta_models/network_options_response.ml @@ -6,8 +6,8 @@ * Schema Network_options_response.t : NetworkOptionsResponse contains information about the versioning of the node and the allowed operation statuses, operation types, and errors. *) -type t = {version: Version.t; allow: Allow.t} -[@@deriving yojson {strict= false}, show] +type t = { version : Version.t; allow : Allow.t } +[@@deriving yojson { strict = false }, show] (** NetworkOptionsResponse contains information about the versioning of the node and the allowed operation statuses, operation types, and errors. *) -let create (version : Version.t) (allow : Allow.t) : t = {version; allow} +let create (version : Version.t) (allow : Allow.t) : t = { version; allow } diff --git a/src/lib/rosetta_models/network_request.ml b/src/lib/rosetta_models/network_request.ml index c9654c173b7..4c0771a0ca1 100644 --- a/src/lib/rosetta_models/network_request.ml +++ b/src/lib/rosetta_models/network_request.ml @@ -7,10 +7,11 @@ *) type t = - { network_identifier: Network_identifier.t - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + { network_identifier : Network_identifier.t + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** A NetworkRequest is utilized to retrieve some data specific exclusively to a NetworkIdentifier. *) let create (network_identifier : Network_identifier.t) : t = - {network_identifier; metadata= None} + { network_identifier; metadata = None } diff --git a/src/lib/rosetta_models/network_status_response.ml b/src/lib/rosetta_models/network_status_response.ml index ee7eab6eefb..33423a8cf20 100644 --- a/src/lib/rosetta_models/network_status_response.ml +++ b/src/lib/rosetta_models/network_status_response.ml @@ -7,13 +7,14 @@ *) type t = - { current_block_identifier: Block_identifier.t - ; current_block_timestamp: Timestamp.t - ; genesis_block_identifier: Block_identifier.t - ; oldest_block_identifier: Block_identifier.t option [@default None] - ; sync_status: Sync_status.t option [@default None] - ; peers: Peer.t list } -[@@deriving yojson {strict= false}, show] + { current_block_identifier : Block_identifier.t + ; current_block_timestamp : Timestamp.t + ; genesis_block_identifier : Block_identifier.t + ; oldest_block_identifier : Block_identifier.t option [@default None] + ; sync_status : Sync_status.t option [@default None] + ; peers : Peer.t list + } +[@@deriving yojson { strict = false }, show] (** NetworkStatusResponse contains basic information about the node's view of a blockchain network. It is assumed that any BlockIdentifier.Index less than or equal to CurrentBlockIdentifier.Index can be queried. If a Rosetta implementation prunes historical state, it should populate the optional `oldest_block_identifier` field with the oldest block available to query. If this is not populated, it is assumed that the `genesis_block_identifier` is the oldest queryable block. If a Rosetta implementation performs some pre-sync before it is possible to query blocks, sync_status should be populated so that clients can still monitor healthiness. Without this field, it may appear that the implementation is stuck syncing and needs to be terminated. *) let create (current_block_identifier : Block_identifier.t) @@ -22,6 +23,7 @@ let create (current_block_identifier : Block_identifier.t) { current_block_identifier ; current_block_timestamp ; genesis_block_identifier - ; oldest_block_identifier= None - ; sync_status= None - ; peers } + ; oldest_block_identifier = None + ; sync_status = None + ; peers + } diff --git a/src/lib/rosetta_models/operation.ml b/src/lib/rosetta_models/operation.ml index 6170064aa02..155d8354d37 100644 --- a/src/lib/rosetta_models/operation.ml +++ b/src/lib/rosetta_models/operation.ml @@ -7,27 +7,29 @@ *) type t = - { operation_identifier: Operation_identifier.t + { operation_identifier : Operation_identifier.t ; (* Restrict referenced related_operations to identifier indexes < the current operation_identifier.index. This ensures there exists a clear DAG-structure of relations. Since operations are one-sided, one could imagine relating operations in a single transfer or linking operations in a call tree. *) - related_operations: Operation_identifier.t list + related_operations : Operation_identifier.t list ; (* Type is the network-specific type of the operation. Ensure that any type that can be returned here is also specified in the NetworkOptionsResponse. This can be very useful to downstream consumers that parse all block data. *) - _type: string [@key "type"] + _type : string [@key "type"] ; (* Status is the network-specific status of the operation. Status is not defined on the transaction object because blockchains with smart contracts may have transactions that partially apply (some operations are successful and some are not). Blockchains with atomic transactions (all operations succeed or all operations fail) will have the same status for each operation. On-chain operations (operations retrieved in the `/block` and `/block/transaction` endpoints) MUST have a populated status field (anything on-chain must have succeeded or failed). However, operations provided during transaction construction (often times called \''intent\'' in the documentation) MUST NOT have a populated status field (operations yet to be included on-chain have not yet succeeded or failed). *) - status: string option [@default None] - ; account: Account_identifier.t option [@default None] - ; amount: Amount.t option [@default None] - ; coin_change: Coin_change.t option [@default None] - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show, eq] + status : string option [@default None] + ; account : Account_identifier.t option [@default None] + ; amount : Amount.t option [@default None] + ; coin_change : Coin_change.t option [@default None] + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show, eq] (** Operations contain all balance-changing information within a transaction. They are always one-sided (only affect 1 AccountIdentifier) and can succeed or fail independently from a Transaction. Operations are used both to represent on-chain data (Data API) and to construct new transactions (Construction API), creating a standard interface for reading and writing to blockchains. *) let create (operation_identifier : Operation_identifier.t) (_type : string) : t = { operation_identifier - ; related_operations= [] + ; related_operations = [] ; _type - ; status= None - ; account= None - ; amount= None - ; coin_change= None - ; metadata= None } + ; status = None + ; account = None + ; amount = None + ; coin_change = None + ; metadata = None + } diff --git a/src/lib/rosetta_models/operation_identifier.ml b/src/lib/rosetta_models/operation_identifier.ml index 912acfe2400..06d416db8c7 100644 --- a/src/lib/rosetta_models/operation_identifier.ml +++ b/src/lib/rosetta_models/operation_identifier.ml @@ -8,10 +8,11 @@ type t = { (* The operation index is used to ensure each operation has a unique identifier within a transaction. This index is only relative to the transaction and NOT GLOBAL. The operations in each transaction should start from index 0. To clarify, there may not be any notion of an operation index in the blockchain being described. *) - index: int64 + index : int64 ; (* Some blockchains specify an operation index that is essential for client use. For example, Bitcoin uses a network_index to identify which UTXO was used in a transaction. network_index should not be populated if there is no notion of an operation index in a blockchain (typically most account-based blockchains). *) - network_index: int64 option [@default None] } -[@@deriving yojson {strict= false}, show, eq] + network_index : int64 option [@default None] + } +[@@deriving yojson { strict = false }, show, eq] (** The operation_identifier uniquely identifies an operation within a transaction. *) -let create (index : int64) : t = {index; network_index= None} +let create (index : int64) : t = { index; network_index = None } diff --git a/src/lib/rosetta_models/operation_status.ml b/src/lib/rosetta_models/operation_status.ml index 16402929f4e..3ea8add9d74 100644 --- a/src/lib/rosetta_models/operation_status.ml +++ b/src/lib/rosetta_models/operation_status.ml @@ -8,10 +8,11 @@ type t = { (* The status is the network-specific status of the operation. *) - status: string + status : string ; (* An Operation is considered successful if the Operation.Amount should affect the Operation.Account. Some blockchains (like Bitcoin) only include successful operations in blocks but other blockchains (like Ethereum) include unsuccessful operations that incur a fee. To reconcile the computed balance from the stream of Operations, it is critical to understand which Operation.Status indicate an Operation is successful and should affect an Account. *) - successful: bool } -[@@deriving yojson {strict= false}, show] + successful : bool + } +[@@deriving yojson { strict = false }, show] (** OperationStatus is utilized to indicate which Operation status are considered successful. *) -let create (status : string) (successful : bool) : t = {status; successful} +let create (status : string) (successful : bool) : t = { status; successful } diff --git a/src/lib/rosetta_models/partial_block_identifier.ml b/src/lib/rosetta_models/partial_block_identifier.ml index 9682239327d..0f8286bf353 100644 --- a/src/lib/rosetta_models/partial_block_identifier.ml +++ b/src/lib/rosetta_models/partial_block_identifier.ml @@ -7,8 +7,8 @@ *) type t = - {index: int64 option [@default None]; hash: string option [@default None]} -[@@deriving yojson {strict= false}, show] + { index : int64 option [@default None]; hash : string option [@default None] } +[@@deriving yojson { strict = false }, show] (** When fetching data by BlockIdentifier, it may be possible to only specify the index or hash. If neither property is specified, it is assumed that the client is making a request at the current block. *) -let create () : t = {index= None; hash= None} +let create () : t = { index = None; hash = None } diff --git a/src/lib/rosetta_models/peer.ml b/src/lib/rosetta_models/peer.ml index 43219a9ec20..4f325d68cc1 100644 --- a/src/lib/rosetta_models/peer.ml +++ b/src/lib/rosetta_models/peer.ml @@ -6,8 +6,8 @@ * Schema Peer.t : A Peer is a representation of a node's peer. *) -type t = {peer_id: string; metadata: Yojson.Safe.t option [@default None]} -[@@deriving yojson {strict= false}, show] +type t = { peer_id : string; metadata : Yojson.Safe.t option [@default None] } +[@@deriving yojson { strict = false }, show] (** A Peer is a representation of a node's peer. *) -let create (peer_id : string) : t = {peer_id; metadata= None} +let create (peer_id : string) : t = { peer_id; metadata = None } diff --git a/src/lib/rosetta_models/public_key.ml b/src/lib/rosetta_models/public_key.ml index 5f632550c1f..d73c2a4799f 100644 --- a/src/lib/rosetta_models/public_key.ml +++ b/src/lib/rosetta_models/public_key.ml @@ -8,10 +8,11 @@ type t = { (* Hex-encoded public key bytes in the format specified by the CurveType. *) - hex_bytes: string - ; curve_type: Enums.curvetype } -[@@deriving yojson {strict= false}, show] + hex_bytes : string + ; curve_type : Enums.curvetype + } +[@@deriving yojson { strict = false }, show] (** PublicKey contains a public key byte array for a particular CurveType encoded in hex. Note that there is no PrivateKey struct as this is NEVER the concern of an implementation. *) let create (hex_bytes : string) (curve_type : Enums.curvetype) : t = - {hex_bytes; curve_type} + { hex_bytes; curve_type } diff --git a/src/lib/rosetta_models/search_transactions_request.ml b/src/lib/rosetta_models/search_transactions_request.ml index 270cb6cba95..333f1d8cba5 100644 --- a/src/lib/rosetta_models/search_transactions_request.ml +++ b/src/lib/rosetta_models/search_transactions_request.ml @@ -7,40 +7,42 @@ *) type t = - { network_identifier: Network_identifier.t - ; operator: Enums.operator option [@default None] + { network_identifier : Network_identifier.t + ; operator : Enums.operator option [@default None] ; (* max_block is the largest block index to consider when searching for transactions. If this field is not populated, the current block is considered the max_block. If you do not specify a max_block, it is possible a newly synced block will interfere with paginated transaction queries (as the offset could become invalid with newly added rows). *) - max_block: int64 option [@default None] + max_block : int64 option [@default None] ; (* offset is the offset into the query result to start returning transactions. If any search conditions are changed, the query offset will change and you must restart your search iteration. *) - offset: int64 option [@default None] + offset : int64 option [@default None] ; (* limit is the maximum number of transactions to return in one call. The implementation may return <= limit transactions. *) - limit: int64 option [@default None] - ; transaction_identifier: Transaction_identifier.t option [@default None] - ; account_identifier: Account_identifier.t option [@default None] - ; coin_identifier: Coin_identifier.t option [@default None] - ; currency: Currency.t option [@default None] + limit : int64 option [@default None] + ; transaction_identifier : Transaction_identifier.t option [@default None] + ; account_identifier : Account_identifier.t option [@default None] + ; coin_identifier : Coin_identifier.t option [@default None] + ; currency : Currency.t option [@default None] ; (* status is the network-specific operation type. *) - status: string option [@default None] + status : string option [@default None] ; (* type is the network-specific operation type. *) - _type: string option [@default None] + _type : string option [@default None] ; (* address is AccountIdentifier.Address. This is used to get all transactions related to an AccountIdentifier.Address, regardless of SubAccountIdentifier. *) - address: string option [@default None] + address : string option [@default None] ; (* success is a synthetic condition populated by parsing network-specific operation statuses (using the mapping provided in `/network/options`). *) - success: bool option [@default None] } -[@@deriving yojson {strict= false}, show] + success : bool option [@default None] + } +[@@deriving yojson { strict = false }, show] (** SearchTransactionsRequest is used to search for transactions matching a set of provided conditions in canonical blocks. *) let create (network_identifier : Network_identifier.t) : t = { network_identifier - ; operator= None - ; max_block= None - ; offset= None - ; limit= None - ; transaction_identifier= None - ; account_identifier= None - ; coin_identifier= None - ; currency= None - ; status= None - ; _type= None - ; address= None - ; success= None } + ; operator = None + ; max_block = None + ; offset = None + ; limit = None + ; transaction_identifier = None + ; account_identifier = None + ; coin_identifier = None + ; currency = None + ; status = None + ; _type = None + ; address = None + ; success = None + } diff --git a/src/lib/rosetta_models/search_transactions_response.ml b/src/lib/rosetta_models/search_transactions_response.ml index 085ae1ded1c..a90104221ed 100644 --- a/src/lib/rosetta_models/search_transactions_response.ml +++ b/src/lib/rosetta_models/search_transactions_response.ml @@ -8,11 +8,12 @@ type t = { (* next_offset is the next offset to use when paginating through transaction results. If this field is not populated, there are no more transactions to query. *) - next_offset: int64 option [@default None] + next_offset : int64 option [@default None] ; (* transactions is an array of BlockTransactions sorted by most recent BlockIdentifier (meaning that transactions in recent blocks appear first). If there are many transactions for a particular search, transactions may not contain all matching transactions. It is up to the caller to paginate these transactions using the max_block field. *) - transactions: Block_transaction.t list } -[@@deriving yojson {strict= false}, show] + transactions : Block_transaction.t list + } +[@@deriving yojson { strict = false }, show] (** SearchTransactionsResponse contains an ordered collection of BlockTransactions that match the query in SearchTransactionsRequest. These BlockTransactions are sorted from most recent block to oldest block. *) let create (transactions : Block_transaction.t list) : t = - {next_offset= None; transactions} + { next_offset = None; transactions } diff --git a/src/lib/rosetta_models/signature.ml b/src/lib/rosetta_models/signature.ml index f99796f741c..3d5909f0d41 100644 --- a/src/lib/rosetta_models/signature.ml +++ b/src/lib/rosetta_models/signature.ml @@ -7,13 +7,14 @@ *) type t = - { signing_payload: Signing_payload.t - ; public_key: Public_key.t - ; signature_type: Enums.signaturetype - ; hex_bytes: string } -[@@deriving yojson {strict= false}, show] + { signing_payload : Signing_payload.t + ; public_key : Public_key.t + ; signature_type : Enums.signaturetype + ; hex_bytes : string + } +[@@deriving yojson { strict = false }, show] (** Signature contains the payload that was signed, the public keys of the keypairs used to produce the signature, the signature (encoded in hex), and the SignatureType. PublicKey is often times not known during construction of the signing payloads but may be needed to combine signatures properly. *) let create (signing_payload : Signing_payload.t) (public_key : Public_key.t) (signature_type : Enums.signaturetype) (hex_bytes : string) : t = - {signing_payload; public_key; signature_type; hex_bytes} + { signing_payload; public_key; signature_type; hex_bytes } diff --git a/src/lib/rosetta_models/signing_payload.ml b/src/lib/rosetta_models/signing_payload.ml index 8d5f6c31897..68dd4d82d0e 100644 --- a/src/lib/rosetta_models/signing_payload.ml +++ b/src/lib/rosetta_models/signing_payload.ml @@ -8,12 +8,17 @@ type t = { (* [DEPRECATED by `account_identifier` in `v1.4.4`] The network-specific address of the account that should sign the payload. *) - address: string option [@default None] - ; account_identifier: Account_identifier.t option [@default None] - ; hex_bytes: string - ; signature_type: Enums.signaturetype option [@default None] } -[@@deriving yojson {strict= false}, show] + address : string option [@default None] + ; account_identifier : Account_identifier.t option [@default None] + ; hex_bytes : string + ; signature_type : Enums.signaturetype option [@default None] + } +[@@deriving yojson { strict = false }, show] (** SigningPayload is signed by the client with the keypair associated with an AccountIdentifier using the specified SignatureType. SignatureType can be optionally populated if there is a restriction on the signature scheme that can be used to sign the payload. *) let create (hex_bytes : string) : t = - {address= None; account_identifier= None; hex_bytes; signature_type= None} + { address = None + ; account_identifier = None + ; hex_bytes + ; signature_type = None + } diff --git a/src/lib/rosetta_models/sub_account_identifier.ml b/src/lib/rosetta_models/sub_account_identifier.ml index 692e6b4c639..3986c333788 100644 --- a/src/lib/rosetta_models/sub_account_identifier.ml +++ b/src/lib/rosetta_models/sub_account_identifier.ml @@ -8,10 +8,11 @@ type t = { (* The SubAccount address may be a cryptographic value or some other identifier (ex: bonded) that uniquely specifies a SubAccount. *) - address: string + address : string ; (* If the SubAccount address is not sufficient to uniquely specify a SubAccount, any other identifying information can be stored here. It is important to note that two SubAccounts with identical addresses but differing metadata will not be considered equal by clients. *) - metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show, eq] + metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show, eq] (** An account may have state specific to a contract address (ERC-20 token) and/or a stake (delegated balance). The sub_account_identifier should specify which state (if applicable) an account instantiation refers to. *) -let create (address : string) : t = {address; metadata= None} +let create (address : string) : t = { address; metadata = None } diff --git a/src/lib/rosetta_models/sub_network_identifier.ml b/src/lib/rosetta_models/sub_network_identifier.ml index 02455099398..c15aca67e6d 100644 --- a/src/lib/rosetta_models/sub_network_identifier.ml +++ b/src/lib/rosetta_models/sub_network_identifier.ml @@ -6,8 +6,8 @@ * Schema Sub_network_identifier.t : In blockchains with sharded state, the SubNetworkIdentifier is required to query some object on a specific shard. This identifier is optional for all non-sharded blockchains. *) -type t = {network: string; metadata: Yojson.Safe.t option [@default None]} -[@@deriving yojson {strict= false}, show] +type t = { network : string; metadata : Yojson.Safe.t option [@default None] } +[@@deriving yojson { strict = false }, show] (** In blockchains with sharded state, the SubNetworkIdentifier is required to query some object on a specific shard. This identifier is optional for all non-sharded blockchains. *) -let create (network : string) : t = {network; metadata= None} +let create (network : string) : t = { network; metadata = None } diff --git a/src/lib/rosetta_models/sync_status.ml b/src/lib/rosetta_models/sync_status.ml index 816a84396c1..7f3ab7d8859 100644 --- a/src/lib/rosetta_models/sync_status.ml +++ b/src/lib/rosetta_models/sync_status.ml @@ -8,13 +8,14 @@ type t = { (* CurrentIndex is the index of the last synced block in the current stage. *) - current_index: int64 + current_index : int64 ; (* TargetIndex is the index of the block that the implementation is attempting to sync to in the current stage. *) - target_index: int64 option [@default None] + target_index : int64 option [@default None] ; (* Stage is the phase of the sync process. *) - stage: string option [@default None] } -[@@deriving yojson {strict= false}, show] + stage : string option [@default None] + } +[@@deriving yojson { strict = false }, show] (** SyncStatus is used to provide additional context about an implementation's sync status. It is often used to indicate that an implementation is healthy when it cannot be queried until some sync phase occurs. If an implementation is immediately queryable, this model is often not populated. *) let create (current_index : int64) : t = - {current_index; target_index= None; stage= None} + { current_index; target_index = None; stage = None } diff --git a/src/lib/rosetta_models/timestamp.ml b/src/lib/rosetta_models/timestamp.ml index 9da4fa0a6bb..344b022e34a 100644 --- a/src/lib/rosetta_models/timestamp.ml +++ b/src/lib/rosetta_models/timestamp.ml @@ -1 +1 @@ -type t = int64 [@@deriving yojson {strict= false}, show] +type t = int64 [@@deriving yojson { strict = false }, show] diff --git a/src/lib/rosetta_models/transaction.ml b/src/lib/rosetta_models/transaction.ml index 7699aafdebc..2ee18be197c 100644 --- a/src/lib/rosetta_models/transaction.ml +++ b/src/lib/rosetta_models/transaction.ml @@ -7,13 +7,14 @@ *) type t = - { transaction_identifier: Transaction_identifier.t - ; operations: Operation.t list + { transaction_identifier : Transaction_identifier.t + ; operations : Operation.t list ; (* Transactions that are related to other transactions (like a cross-shard transaction) should include the tranaction_identifier of these transactions in the metadata. *) - metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** Transactions contain an array of Operations that are attributable to the same TransactionIdentifier. *) let create (transaction_identifier : Transaction_identifier.t) (operations : Operation.t list) : t = - {transaction_identifier; operations; metadata= None} + { transaction_identifier; operations; metadata = None } diff --git a/src/lib/rosetta_models/transaction_identifier.ml b/src/lib/rosetta_models/transaction_identifier.ml index 12877e3ebfa..b7c1aca7965 100644 --- a/src/lib/rosetta_models/transaction_identifier.ml +++ b/src/lib/rosetta_models/transaction_identifier.ml @@ -8,8 +8,9 @@ type t = { (* Any transactions that are attributable only to a block (ex: a block event) should use the hash of the block as the identifier. *) - hash: string } -[@@deriving yojson {strict= false}, show] + hash : string + } +[@@deriving yojson { strict = false }, show] (** The transaction_identifier uniquely identifies a transaction in a particular network and block or in the mempool. *) -let create (hash : string) : t = {hash} +let create (hash : string) : t = { hash } diff --git a/src/lib/rosetta_models/transaction_identifier_response.ml b/src/lib/rosetta_models/transaction_identifier_response.ml index 735e8d11a83..f3aa5aee9b0 100644 --- a/src/lib/rosetta_models/transaction_identifier_response.ml +++ b/src/lib/rosetta_models/transaction_identifier_response.ml @@ -7,10 +7,11 @@ *) type t = - { transaction_identifier: Transaction_identifier.t - ; metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + { transaction_identifier : Transaction_identifier.t + ; metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** TransactionIdentifierResponse contains the transaction_identifier of a transaction that was submitted to either `/construction/hash` or `/construction/submit`. *) let create (transaction_identifier : Transaction_identifier.t) : t = - {transaction_identifier; metadata= None} + { transaction_identifier; metadata = None } diff --git a/src/lib/rosetta_models/version.ml b/src/lib/rosetta_models/version.ml index 766c4fd7bbf..508aaae7630 100644 --- a/src/lib/rosetta_models/version.ml +++ b/src/lib/rosetta_models/version.ml @@ -8,15 +8,16 @@ type t = { (* The rosetta_version is the version of the Rosetta interface the implementation adheres to. This can be useful for clients looking to reliably parse responses. *) - rosetta_version: string + rosetta_version : string ; (* The node_version is the canonical version of the node runtime. This can help clients manage deployments. *) - node_version: string + node_version : string ; (* When a middleware server is used to adhere to the Rosetta interface, it should return its version here. This can help clients manage deployments. *) - middleware_version: string option [@default None] + middleware_version : string option [@default None] ; (* Any other information that may be useful about versioning of dependent services should be returned here. *) - metadata: Yojson.Safe.t option [@default None] } -[@@deriving yojson {strict= false}, show] + metadata : Yojson.Safe.t option [@default None] + } +[@@deriving yojson { strict = false }, show] (** The Version object is utilized to inform the client of the versions of different components of the Rosetta implementation. *) let create (rosetta_version : string) (node_version : string) : t = - {rosetta_version; node_version; middleware_version= None; metadata= None} + { rosetta_version; node_version; middleware_version = None; metadata = None } diff --git a/src/lib/runtime_config/runtime_config.ml b/src/lib/runtime_config/runtime_config.ml index 4cc0550c242..d3d01658b41 100644 --- a/src/lib/runtime_config/runtime_config.ml +++ b/src/lib/runtime_config/runtime_config.ml @@ -2,9 +2,10 @@ open Core_kernel module Fork_config = struct type t = - { previous_state_hash: string - ; previous_length: int - ; previous_global_slot: int } + { previous_state_hash : string + ; previous_length : int + ; previous_global_slot : int + } [@@deriving yojson, dhall_type, bin_io_unversioned] end @@ -12,7 +13,7 @@ let yojson_strip_fields ~keep_fields = function | `Assoc l -> `Assoc (List.filter l ~f:(fun (fld, _) -> - Array.mem ~equal:String.equal keep_fields fld )) + Array.mem ~equal:String.equal keep_fields fld)) | json -> json @@ -23,9 +24,9 @@ let yojson_rename_fields ~alternates = function let fld = Option.value ~default:fld (Array.find_map alternates ~f:(fun (alt, orig) -> - if String.equal fld alt then Some orig else None )) + if String.equal fld alt then Some orig else None)) in - (fld, json) )) + (fld, json))) | json -> json @@ -41,8 +42,7 @@ let result_opt ~f x = let dump_on_error yojson x = Result.map_error x ~f:(fun str -> - str ^ "\n\nCould not parse JSON:\n" ^ Yojson.Safe.pretty_to_string yojson - ) + str ^ "\n\nCould not parse JSON:\n" ^ Yojson.Safe.pretty_to_string yojson) let of_yojson_generic ~fields of_yojson json = dump_on_error json @@ of_yojson @@ -53,11 +53,12 @@ module Json_layout = struct module Single = struct module Timed = struct type t = - { initial_minimum_balance: Currency.Balance.t - ; cliff_time: Mina_numbers.Global_slot.t - ; cliff_amount: Currency.Amount.t - ; vesting_period: Mina_numbers.Global_slot.t - ; vesting_increment: Currency.Amount.t } + { initial_minimum_balance : Currency.Balance.t + ; cliff_time : Mina_numbers.Global_slot.t + ; cliff_amount : Currency.Amount.t + ; vesting_period : Mina_numbers.Global_slot.t + ; vesting_increment : Currency.Amount.t + } [@@deriving yojson, dhall_type, sexp] let fields = @@ -65,7 +66,8 @@ module Json_layout = struct ; "cliff_time" ; "cliff_amount" ; "vesting_period" - ; "vesting_increment" |] + ; "vesting_increment" + |] let of_yojson json = of_yojson_generic ~fields of_yojson json end @@ -91,34 +93,35 @@ module Json_layout = struct let of_yojson = function | `String s -> ( - match String.lowercase s with - | "none" -> - Ok None - | "either" -> - Ok Either - | "proof" -> - Ok Proof - | "signature" -> - Ok Signature - | "both" -> - Ok Both - | "impossible" -> - Ok Impossible - | _ -> - Error (sprintf "Invalid Auth_required.t value: %s" s) ) + match String.lowercase s with + | "none" -> + Ok None + | "either" -> + Ok Either + | "proof" -> + Ok Proof + | "signature" -> + Ok Signature + | "both" -> + Ok Both + | "impossible" -> + Ok Impossible + | _ -> + Error (sprintf "Invalid Auth_required.t value: %s" s) ) | _ -> Error "Runtime_config.Json_Account.Single.Permissions.Auth_Required.t" end type t = - { stake: bool [@default false] - ; edit_state: Auth_required.t [@default None] - ; send: Auth_required.t [@default None] - ; receive: Auth_required.t [@default None] - ; set_delegate: Auth_required.t [@default None] - ; set_permissions: Auth_required.t [@default None] - ; set_verification_key: Auth_required.t [@default None] } + { stake : bool [@default false] + ; edit_state : Auth_required.t [@default None] + ; send : Auth_required.t [@default None] + ; receive : Auth_required.t [@default None] + ; set_delegate : Auth_required.t [@default None] + ; set_permissions : Auth_required.t [@default None] + ; set_verification_key : Auth_required.t [@default None] + } [@@deriving yojson, dhall_type, sexp, bin_io_unversioned] let fields = @@ -128,20 +131,22 @@ module Json_layout = struct ; "receive" ; "set_delegate" ; "set_permissions" - ; "set_verification_key" |] + ; "set_verification_key" + |] let of_yojson json = of_yojson_generic ~fields of_yojson json end module Token_permissions = struct type t = - { token_owned: bool [@default false] - ; account_disabled: bool [@default false] - ; disable_new_accounts: bool [@default false] } + { token_owned : bool [@default false] + ; account_disabled : bool [@default false] + ; disable_new_accounts : bool [@default false] + } [@@deriving yojson, dhall_type, sexp, bin_io_unversioned] let fields = - [|"token_owned"; "account_disabled"; "disable_new_accounts"|] + [| "token_owned"; "account_disabled"; "disable_new_accounts" |] let of_yojson json = of_yojson_generic ~fields of_yojson json end @@ -163,28 +168,29 @@ module Json_layout = struct Error "Invalid Field.t runtime config Snapp_account.state" end - type t = {state: Field.t list; verification_key: string option} + type t = { state : Field.t list; verification_key : string option } [@@deriving sexp, dhall_type, yojson, bin_io_unversioned] - let fields = [|"state"; "verification_key"|] + let fields = [| "state"; "verification_key" |] let of_yojson json = of_yojson_generic ~fields of_yojson json end type t = - { pk: string option [@default None] - ; sk: string option [@default None] - ; balance: Currency.Balance.t - ; delegate: string option [@default None] - ; timing: Timed.t option [@default None] - ; token: Unsigned_extended.UInt64.t option [@default None] - ; token_permissions: Token_permissions.t option [@default None] - ; nonce: Mina_numbers.Account_nonce.t + { pk : string option [@default None] + ; sk : string option [@default None] + ; balance : Currency.Balance.t + ; delegate : string option [@default None] + ; timing : Timed.t option [@default None] + ; token : Unsigned_extended.UInt64.t option [@default None] + ; token_permissions : Token_permissions.t option [@default None] + ; nonce : Mina_numbers.Account_nonce.t [@default Mina_numbers.Account_nonce.zero] - ; receipt_chain_hash: string option [@default None] - ; voting_for: string option [@default None] - ; snapp: Snapp_account.t option [@default None] - ; permissions: Permissions.t option [@default None] } + ; receipt_chain_hash : string option [@default None] + ; voting_for : string option [@default None] + ; snapp : Snapp_account.t option [@default None] + ; permissions : Permissions.t option [@default None] + } [@@deriving sexp, yojson, dhall_type] let fields = @@ -199,23 +205,25 @@ module Json_layout = struct ; "receipt_chain_hash" ; "voting_for" ; "snapp" - ; "permissions" |] + ; "permissions" + |] let of_yojson json = of_yojson_generic ~fields of_yojson json let default : t = - { pk= None - ; sk= None - ; balance= Currency.Balance.zero - ; delegate= None - ; timing= None - ; token= None - ; token_permissions= None - ; nonce= Mina_numbers.Account_nonce.zero - ; receipt_chain_hash= None - ; voting_for= None - ; snapp= None - ; permissions= None } + { pk = None + ; sk = None + ; balance = Currency.Balance.zero + ; delegate = None + ; timing = None + ; token = None + ; token_permissions = None + ; nonce = Mina_numbers.Account_nonce.zero + ; receipt_chain_hash = None + ; voting_for = None + ; snapp = None + ; permissions = None + } end type t = Single.t list [@@deriving yojson, dhall_type] @@ -223,17 +231,18 @@ module Json_layout = struct module Ledger = struct module Balance_spec = struct - type t = {number: int; balance: Currency.Balance.t} + type t = { number : int; balance : Currency.Balance.t } [@@deriving yojson, dhall_type] end type t = - { accounts: Accounts.t option [@default None] - ; num_accounts: int option [@default None] - ; balances: Balance_spec.t list [@default []] - ; hash: string option [@default None] - ; name: string option [@default None] - ; add_genesis_winner: bool option [@default None] } + { accounts : Accounts.t option [@default None] + ; num_accounts : int option [@default None] + ; balances : Balance_spec.t list [@default []] + ; hash : string option [@default None] + ; name : string option [@default None] + ; add_genesis_winner : bool option [@default None] + } [@@deriving yojson, dhall_type] let fields = @@ -242,7 +251,8 @@ module Json_layout = struct ; "balances" ; "hash" ; "name" - ; "add_genesis_winner" |] + ; "add_genesis_winner" + |] let of_yojson json = of_yojson_generic ~fields of_yojson json end @@ -250,14 +260,15 @@ module Json_layout = struct module Proof_keys = struct module Transaction_capacity = struct type t = - { log_2: int option + { log_2 : int option [@default None] [@key "2_to_the"] [@dhall_type.key "two_to_the"] - ; txns_per_second_x10: int option [@default None] } + ; txns_per_second_x10 : int option [@default None] + } [@@deriving yojson, dhall_type] - let fields = [|"2_to_the"; "txns_per_second_x10"|] + let fields = [| "2_to_the"; "txns_per_second_x10" |] - let alternates = [|("two_to_the", "2_to_the"); ("log_2", "2_to_the")|] + let alternates = [| ("two_to_the", "2_to_the"); ("log_2", "2_to_the") |] let of_yojson json = json @@ -267,16 +278,17 @@ module Json_layout = struct end type t = - { level: string option [@default None] - ; sub_windows_per_window: int option [@default None] - ; ledger_depth: int option [@default None] - ; work_delay: int option [@default None] - ; block_window_duration_ms: int option [@default None] - ; transaction_capacity: Transaction_capacity.t option [@default None] - ; coinbase_amount: Currency.Amount.t option [@default None] - ; supercharged_coinbase_factor: int option [@default None] - ; account_creation_fee: Currency.Fee.t option [@default None] - ; fork: Fork_config.t option [@default None] } + { level : string option [@default None] + ; sub_windows_per_window : int option [@default None] + ; ledger_depth : int option [@default None] + ; work_delay : int option [@default None] + ; block_window_duration_ms : int option [@default None] + ; transaction_capacity : Transaction_capacity.t option [@default None] + ; coinbase_amount : Currency.Amount.t option [@default None] + ; supercharged_coinbase_factor : int option [@default None] + ; account_creation_fee : Currency.Fee.t option [@default None] + ; fork : Fork_config.t option [@default None] + } [@@deriving yojson, dhall_type] let fields = @@ -288,18 +300,20 @@ module Json_layout = struct ; "transaction_capacity" ; "coinbase_amount" ; "supercharged_coinbase_factor" - ; "account_creation_fee" |] + ; "account_creation_fee" + |] let of_yojson json = of_yojson_generic ~fields of_yojson json end module Genesis = struct type t = - { k: (int option[@default None]) - ; delta: (int option[@default None]) - ; slots_per_epoch: (int option[@default None]) - ; slots_per_sub_window: (int option[@default None]) - ; genesis_state_timestamp: (string option[@default None]) } + { k : int option [@default None] + ; delta : int option [@default None] + ; slots_per_epoch : int option [@default None] + ; slots_per_sub_window : int option [@default None] + ; genesis_state_timestamp : string option [@default None] + } [@@deriving yojson, dhall_type] let fields = @@ -308,51 +322,55 @@ module Json_layout = struct ; "slots_per_epoch" ; "slots_per_sub_window" ; "sub_window_per_window" - ; "genesis_state_timestamp" |] + ; "genesis_state_timestamp" + |] let of_yojson json = of_yojson_generic ~fields of_yojson json end module Daemon = struct type t = - { txpool_max_size: int option [@default None] - ; peer_list_url: string option [@default None] } + { txpool_max_size : int option [@default None] + ; peer_list_url : string option [@default None] + } [@@deriving yojson, dhall_type] - let fields = [|"txpool_max_size"; "peer_list_url"|] + let fields = [| "txpool_max_size"; "peer_list_url" |] let of_yojson json = of_yojson_generic ~fields of_yojson json end module Epoch_data = struct module Data = struct - type t = {accounts: Accounts.t; seed: string} + type t = { accounts : Accounts.t; seed : string } [@@deriving yojson, dhall_type] - let fields = [|"accounts"; "seed"|] + let fields = [| "accounts"; "seed" |] let of_yojson json = of_yojson_generic ~fields of_yojson json end type t = - { staking: Data.t - ; next: (Data.t option[@default None]) (*If None then next = staking*) } + { staking : Data.t + ; next : (Data.t option[@default None]) (*If None then next = staking*) + } [@@deriving yojson, dhall_type] - let fields = [|"staking"; "next"|] + let fields = [| "staking"; "next" |] let of_yojson json = of_yojson_generic ~fields of_yojson json end type t = - { daemon: Daemon.t option [@default None] - ; genesis: Genesis.t option [@default None] - ; proof: Proof_keys.t option [@default None] - ; ledger: Ledger.t option [@default None] - ; epoch_data: Epoch_data.t option [@default None] } + { daemon : Daemon.t option [@default None] + ; genesis : Genesis.t option [@default None] + ; proof : Proof_keys.t option [@default None] + ; ledger : Ledger.t option [@default None] + ; epoch_data : Epoch_data.t option [@default None] + } [@@deriving yojson, dhall_type] - let fields = [|"daemon"; "ledger"; "genesis"; "proof"; "epoch_data"|] + let fields = [| "daemon"; "ledger"; "genesis"; "proof"; "epoch_data" |] let of_yojson json = of_yojson_generic ~fields of_yojson json end @@ -398,11 +416,12 @@ module Accounts = struct module Single = struct module Timed = struct type t = Json_layout.Accounts.Single.Timed.t = - { initial_minimum_balance: Currency.Balance.Stable.Latest.t - ; cliff_time: Mina_numbers.Global_slot.Stable.Latest.t - ; cliff_amount: Currency.Amount.Stable.Latest.t - ; vesting_period: Mina_numbers.Global_slot.Stable.Latest.t - ; vesting_increment: Currency.Amount.Stable.Latest.t } + { initial_minimum_balance : Currency.Balance.Stable.Latest.t + ; cliff_time : Mina_numbers.Global_slot.Stable.Latest.t + ; cliff_amount : Currency.Amount.Stable.Latest.t + ; vesting_period : Mina_numbers.Global_slot.Stable.Latest.t + ; vesting_increment : Currency.Amount.Stable.Latest.t + } [@@deriving bin_io_unversioned, sexp] end @@ -411,48 +430,48 @@ module Accounts = struct module Snapp_account = Json_layout.Accounts.Single.Snapp_account type t = Json_layout.Accounts.Single.t = - { pk: string option - ; sk: string option - ; balance: Currency.Balance.Stable.Latest.t - ; delegate: string option - ; timing: Timed.t option - ; token: Unsigned_extended.UInt64.Stable.Latest.t option - ; token_permissions: Token_permissions.t option - ; nonce: Mina_numbers.Account_nonce.Stable.Latest.t - ; receipt_chain_hash: string option - ; voting_for: string option - ; snapp: Snapp_account.t option - ; permissions: Permissions.t option } + { pk : string option + ; sk : string option + ; balance : Currency.Balance.Stable.Latest.t + ; delegate : string option + ; timing : Timed.t option + ; token : Unsigned_extended.UInt64.Stable.Latest.t option + ; token_permissions : Token_permissions.t option + ; nonce : Mina_numbers.Account_nonce.Stable.Latest.t + ; receipt_chain_hash : string option + ; voting_for : string option + ; snapp : Snapp_account.t option + ; permissions : Permissions.t option + } [@@deriving bin_io_unversioned, sexp] let to_json_layout : t -> Json_layout.Accounts.Single.t = Fn.id - let of_json_layout : Json_layout.Accounts.Single.t -> (t, string) Result.t - = + let of_json_layout : Json_layout.Accounts.Single.t -> (t, string) Result.t = Result.return let to_yojson x = Json_layout.Accounts.Single.to_yojson (to_json_layout x) let of_yojson json = - Result.bind ~f:of_json_layout - (Json_layout.Accounts.Single.of_yojson json) + Result.bind ~f:of_json_layout (Json_layout.Accounts.Single.of_yojson json) let default = Json_layout.Accounts.Single.default end type single = Single.t = - { pk: string option - ; sk: string option - ; balance: Currency.Balance.t - ; delegate: string option - ; timing: Single.Timed.t option - ; token: Unsigned_extended.UInt64.t option - ; token_permissions: Single.Token_permissions.t option - ; nonce: Mina_numbers.Account_nonce.t - ; receipt_chain_hash: string option - ; voting_for: string option - ; snapp: Single.Snapp_account.t option - ; permissions: Single.Permissions.t option } + { pk : string option + ; sk : string option + ; balance : Currency.Balance.t + ; delegate : string option + ; timing : Single.Timed.t option + ; token : Unsigned_extended.UInt64.t option + ; token_permissions : Single.Token_permissions.t option + ; nonce : Mina_numbers.Account_nonce.t + ; receipt_chain_hash : string option + ; voting_for : string option + ; snapp : Single.Snapp_account.t option + ; permissions : Single.Permissions.t option + } type t = Single.t list [@@deriving bin_io_unversioned] @@ -468,7 +487,7 @@ module Accounts = struct | Ok x -> x | Error err -> - raise (Stop err) ) + raise (Stop err)) with Stop err -> Error err let to_yojson x = Json_layout.Accounts.to_yojson (to_json_layout x) @@ -485,34 +504,41 @@ module Ledger = struct [@@deriving bin_io_unversioned] type t = - { base: base - ; num_accounts: int option - ; balances: (int * Currency.Balance.Stable.Latest.t) list - ; hash: string option - ; name: string option - ; add_genesis_winner: bool option } + { base : base + ; num_accounts : int option + ; balances : (int * Currency.Balance.Stable.Latest.t) list + ; hash : string option + ; name : string option + ; add_genesis_winner : bool option + } [@@deriving bin_io_unversioned] let to_json_layout - {base; num_accounts; balances; hash; name; add_genesis_winner} : + { base; num_accounts; balances; hash; name; add_genesis_winner } : Json_layout.Ledger.t = let balances = List.map balances ~f:(fun (number, balance) -> - {Json_layout.Ledger.Balance_spec.number; balance} ) + { Json_layout.Ledger.Balance_spec.number; balance }) in let without_base : Json_layout.Ledger.t = - {accounts= None; num_accounts; balances; hash; name; add_genesis_winner} + { accounts = None + ; num_accounts + ; balances + ; hash + ; name + ; add_genesis_winner + } in match base with | Named name -> - {without_base with name= Some name} + { without_base with name = Some name } | Accounts accounts -> - {without_base with accounts= Some (Accounts.to_json_layout accounts)} + { without_base with accounts = Some (Accounts.to_json_layout accounts) } | Hash hash -> - {without_base with hash= Some hash} + { without_base with hash = Some hash } let of_json_layout - ({accounts; num_accounts; balances; hash; name; add_genesis_winner} : + ({ accounts; num_accounts; balances; hash; name; add_genesis_winner } : Json_layout.Ledger.t) : (t, string) Result.t = let open Result.Let_syntax in let%map base = @@ -521,24 +547,24 @@ module Ledger = struct let%map accounts = Accounts.of_json_layout accounts in Accounts accounts | None -> ( - match name with - | Some name -> - return (Named name) - | None -> ( - match hash with - | Some hash -> - return (Hash hash) - | None -> - Error - "Runtime_config.Ledger.of_json_layout: Expected a field \ - 'accounts', 'name' or 'hash'" ) ) + match name with + | Some name -> + return (Named name) + | None -> ( + match hash with + | Some hash -> + return (Hash hash) + | None -> + Error + "Runtime_config.Ledger.of_json_layout: Expected a field \ + 'accounts', 'name' or 'hash'" ) ) in let balances = List.map balances - ~f:(fun {Json_layout.Ledger.Balance_spec.number; balance} -> - (number, balance) ) + ~f:(fun { Json_layout.Ledger.Balance_spec.number; balance } -> + (number, balance)) in - {base; num_accounts; balances; hash; name; add_genesis_winner} + { base; num_accounts; balances; hash; name; add_genesis_winner } let to_yojson x = Json_layout.Ledger.to_yojson (to_json_layout x) @@ -574,7 +600,7 @@ module Proof_keys = struct let of_json_layout str = Result.map_error (of_string str) ~f:(fun err -> "Runtime_config.Proof_keys.Level.of_json_layout: Could not decode \ - field 'level'. " ^ err ) + field 'level'. " ^ err) let to_yojson x = `String (to_json_layout x) @@ -594,16 +620,16 @@ module Proof_keys = struct let to_json_layout : t -> Json_layout.Proof_keys.Transaction_capacity.t = function | Log_2 i -> - {log_2= Some i; txns_per_second_x10= None} + { log_2 = Some i; txns_per_second_x10 = None } | Txns_per_second_x10 i -> - {log_2= None; txns_per_second_x10= Some i} + { log_2 = None; txns_per_second_x10 = Some i } let of_json_layout : Json_layout.Proof_keys.Transaction_capacity.t -> (t, string) Result.t = function - | {log_2= Some i; txns_per_second_x10= None} -> + | { log_2 = Some i; txns_per_second_x10 = None } -> Ok (Log_2 i) - | {txns_per_second_x10= Some i; log_2= None} -> + | { txns_per_second_x10 = Some i; log_2 = None } -> Ok (Txns_per_second_x10 i) | _ -> Error @@ -620,16 +646,17 @@ module Proof_keys = struct end type t = - { level: Level.t option - ; sub_windows_per_window: int option - ; ledger_depth: int option - ; work_delay: int option - ; block_window_duration_ms: int option - ; transaction_capacity: Transaction_capacity.t option - ; coinbase_amount: Currency.Amount.Stable.Latest.t option - ; supercharged_coinbase_factor: int option - ; account_creation_fee: Currency.Fee.Stable.Latest.t option - ; fork: Fork_config.t option } + { level : Level.t option + ; sub_windows_per_window : int option + ; ledger_depth : int option + ; work_delay : int option + ; block_window_duration_ms : int option + ; transaction_capacity : Transaction_capacity.t option + ; coinbase_amount : Currency.Amount.Stable.Latest.t option + ; supercharged_coinbase_factor : int option + ; account_creation_fee : Currency.Fee.Stable.Latest.t option + ; fork : Fork_config.t option + } [@@deriving bin_io_unversioned] let to_json_layout @@ -642,18 +669,20 @@ module Proof_keys = struct ; coinbase_amount ; supercharged_coinbase_factor ; account_creation_fee - ; fork } = - { Json_layout.Proof_keys.level= Option.map ~f:Level.to_json_layout level + ; fork + } = + { Json_layout.Proof_keys.level = Option.map ~f:Level.to_json_layout level ; sub_windows_per_window ; ledger_depth ; work_delay ; block_window_duration_ms - ; transaction_capacity= + ; transaction_capacity = Option.map ~f:Transaction_capacity.to_json_layout transaction_capacity ; coinbase_amount ; supercharged_coinbase_factor ; account_creation_fee - ; fork } + ; fork + } let of_json_layout { Json_layout.Proof_keys.level @@ -665,7 +694,8 @@ module Proof_keys = struct ; coinbase_amount ; supercharged_coinbase_factor ; account_creation_fee - ; fork } = + ; fork + } = let open Result.Let_syntax in let%map level = result_opt ~f:Level.of_json_layout level and transaction_capacity = @@ -680,7 +710,8 @@ module Proof_keys = struct ; coinbase_amount ; supercharged_coinbase_factor ; account_creation_fee - ; fork } + ; fork + } let to_yojson x = Json_layout.Proof_keys.to_yojson (to_json_layout x) @@ -688,36 +719,36 @@ module Proof_keys = struct Result.bind ~f:of_json_layout (Json_layout.Proof_keys.of_yojson json) let combine t1 t2 = - { level= opt_fallthrough ~default:t1.level t2.level - ; sub_windows_per_window= + { level = opt_fallthrough ~default:t1.level t2.level + ; sub_windows_per_window = opt_fallthrough ~default:t1.sub_windows_per_window t2.sub_windows_per_window - ; ledger_depth= opt_fallthrough ~default:t1.ledger_depth t2.ledger_depth - ; work_delay= opt_fallthrough ~default:t1.work_delay t2.work_delay - ; block_window_duration_ms= + ; ledger_depth = opt_fallthrough ~default:t1.ledger_depth t2.ledger_depth + ; work_delay = opt_fallthrough ~default:t1.work_delay t2.work_delay + ; block_window_duration_ms = opt_fallthrough ~default:t1.block_window_duration_ms t2.block_window_duration_ms - ; transaction_capacity= - opt_fallthrough ~default:t1.transaction_capacity - t2.transaction_capacity - ; coinbase_amount= + ; transaction_capacity = + opt_fallthrough ~default:t1.transaction_capacity t2.transaction_capacity + ; coinbase_amount = opt_fallthrough ~default:t1.coinbase_amount t2.coinbase_amount - ; supercharged_coinbase_factor= + ; supercharged_coinbase_factor = opt_fallthrough ~default:t1.supercharged_coinbase_factor t2.supercharged_coinbase_factor - ; account_creation_fee= - opt_fallthrough ~default:t1.account_creation_fee - t2.account_creation_fee - ; fork= opt_fallthrough ~default:t1.fork t2.fork } + ; account_creation_fee = + opt_fallthrough ~default:t1.account_creation_fee t2.account_creation_fee + ; fork = opt_fallthrough ~default:t1.fork t2.fork + } end module Genesis = struct type t = Json_layout.Genesis.t = - { k: int option - ; delta: int option - ; slots_per_epoch: int option - ; slots_per_sub_window: int option - ; genesis_state_timestamp: string option } + { k : int option + ; delta : int option + ; slots_per_epoch : int option + ; slots_per_sub_window : int option + ; genesis_state_timestamp : string option + } [@@deriving bin_io_unversioned] let to_json_layout : t -> Json_layout.Genesis.t = Fn.id @@ -731,21 +762,21 @@ module Genesis = struct Result.bind ~f:of_json_layout (Json_layout.Genesis.of_yojson json) let combine t1 t2 = - { k= opt_fallthrough ~default:t1.k t2.k - ; delta= opt_fallthrough ~default:t1.delta t2.delta - ; slots_per_epoch= + { k = opt_fallthrough ~default:t1.k t2.k + ; delta = opt_fallthrough ~default:t1.delta t2.delta + ; slots_per_epoch = opt_fallthrough ~default:t1.slots_per_epoch t2.slots_per_epoch - ; slots_per_sub_window= - opt_fallthrough ~default:t1.slots_per_sub_window - t2.slots_per_sub_window - ; genesis_state_timestamp= + ; slots_per_sub_window = + opt_fallthrough ~default:t1.slots_per_sub_window t2.slots_per_sub_window + ; genesis_state_timestamp = opt_fallthrough ~default:t1.genesis_state_timestamp - t2.genesis_state_timestamp } + t2.genesis_state_timestamp + } end module Daemon = struct type t = Json_layout.Daemon.t = - {txpool_max_size: int option; peer_list_url: string option} + { txpool_max_size : int option; peer_list_url : string option } [@@deriving bin_io_unversioned] let to_json_layout : t -> Json_layout.Daemon.t = Fn.id @@ -759,54 +790,57 @@ module Daemon = struct Result.bind ~f:of_json_layout (Json_layout.Daemon.of_yojson json) let combine t1 t2 = - { txpool_max_size= + { txpool_max_size = opt_fallthrough ~default:t1.txpool_max_size t2.txpool_max_size - ; peer_list_url= opt_fallthrough ~default:t1.peer_list_url t2.peer_list_url + ; peer_list_url = opt_fallthrough ~default:t1.peer_list_url t2.peer_list_url } end module Epoch_data = struct module Data = struct - type t = {ledger: Ledger.t; seed: string} + type t = { ledger : Ledger.t; seed : string } [@@deriving bin_io_unversioned, yojson] end type t = - {staking: Data.t; next: Data.t option (*If None, then next = staking*)} + { staking : Data.t; next : Data.t option (*If None, then next = staking*) } [@@deriving bin_io_unversioned, yojson] let to_json_layout : t -> Json_layout.Epoch_data.t = - fun {staking; next} -> + fun { staking; next } -> let accounts (ledger : Ledger.t) = match ledger.base with Accounts acc -> acc | _ -> assert false in let staking = - { Json_layout.Epoch_data.Data.accounts= accounts staking.ledger - ; seed= staking.seed } + { Json_layout.Epoch_data.Data.accounts = accounts staking.ledger + ; seed = staking.seed + } in let next = Option.map next ~f:(fun n -> - { Json_layout.Epoch_data.Data.accounts= accounts n.ledger - ; seed= n.seed } ) + { Json_layout.Epoch_data.Data.accounts = accounts n.ledger + ; seed = n.seed + }) in - {Json_layout.Epoch_data.staking; next} + { Json_layout.Epoch_data.staking; next } let of_json_layout : Json_layout.Epoch_data.t -> (t, string) Result.t = - fun {staking; next} -> + fun { staking; next } -> let data accounts seed = let ledger = - { Ledger.base= Accounts accounts - ; num_accounts= None - ; balances= [] - ; hash= None - ; name= None - ; add_genesis_winner= Some false } + { Ledger.base = Accounts accounts + ; num_accounts = None + ; balances = [] + ; hash = None + ; name = None + ; add_genesis_winner = Some false + } in - {Data.ledger; seed} + { Data.ledger; seed } in let staking = data staking.accounts staking.seed in let next = Option.map next ~f:(fun n -> data n.accounts n.seed) in - Ok {staking; next} + Ok { staking; next } let to_yojson x = Json_layout.Epoch_data.to_yojson (to_json_layout x) @@ -815,35 +849,42 @@ module Epoch_data = struct end type t = - { daemon: Daemon.t option - ; genesis: Genesis.t option - ; proof: Proof_keys.t option - ; ledger: Ledger.t option - ; epoch_data: Epoch_data.t option } + { daemon : Daemon.t option + ; genesis : Genesis.t option + ; proof : Proof_keys.t option + ; ledger : Ledger.t option + ; epoch_data : Epoch_data.t option + } [@@deriving bin_io_unversioned] -let to_json_layout {daemon; genesis; proof; ledger; epoch_data} = - { Json_layout.daemon= Option.map ~f:Daemon.to_json_layout daemon - ; genesis= Option.map ~f:Genesis.to_json_layout genesis - ; proof= Option.map ~f:Proof_keys.to_json_layout proof - ; ledger= Option.map ~f:Ledger.to_json_layout ledger - ; epoch_data= Option.map ~f:Epoch_data.to_json_layout epoch_data } +let to_json_layout { daemon; genesis; proof; ledger; epoch_data } = + { Json_layout.daemon = Option.map ~f:Daemon.to_json_layout daemon + ; genesis = Option.map ~f:Genesis.to_json_layout genesis + ; proof = Option.map ~f:Proof_keys.to_json_layout proof + ; ledger = Option.map ~f:Ledger.to_json_layout ledger + ; epoch_data = Option.map ~f:Epoch_data.to_json_layout epoch_data + } -let of_json_layout {Json_layout.daemon; genesis; proof; ledger; epoch_data} = +let of_json_layout { Json_layout.daemon; genesis; proof; ledger; epoch_data } = let open Result.Let_syntax in let%map daemon = result_opt ~f:Daemon.of_json_layout daemon and genesis = result_opt ~f:Genesis.of_json_layout genesis and proof = result_opt ~f:Proof_keys.of_json_layout proof and ledger = result_opt ~f:Ledger.of_json_layout ledger and epoch_data = result_opt ~f:Epoch_data.of_json_layout epoch_data in - {daemon; genesis; proof; ledger; epoch_data} + { daemon; genesis; proof; ledger; epoch_data } let to_yojson x = Json_layout.to_yojson (to_json_layout x) let of_yojson json = Result.bind ~f:of_json_layout (Json_layout.of_yojson json) let default = - {daemon= None; genesis= None; proof= None; ledger= None; epoch_data= None} + { daemon = None + ; genesis = None + ; proof = None + ; ledger = None + ; epoch_data = None + } let combine t1 t2 = let merge ~combine t1 t2 = @@ -855,11 +896,12 @@ let combine t1 t2 = | None, None -> None in - { daemon= merge ~combine:Daemon.combine t1.daemon t2.daemon - ; genesis= merge ~combine:Genesis.combine t1.genesis t2.genesis - ; proof= merge ~combine:Proof_keys.combine t1.proof t2.proof - ; ledger= opt_fallthrough ~default:t1.ledger t2.ledger - ; epoch_data= opt_fallthrough ~default:t1.epoch_data t2.epoch_data } + { daemon = merge ~combine:Daemon.combine t1.daemon t2.daemon + ; genesis = merge ~combine:Genesis.combine t1.genesis t2.genesis + ; proof = merge ~combine:Proof_keys.combine t1.proof t2.proof + ; ledger = opt_fallthrough ~default:t1.ledger t2.ledger + ; epoch_data = opt_fallthrough ~default:t1.epoch_data t2.epoch_data + } module Test_configs = struct let bootstrap = diff --git a/src/lib/secrets/hardware_wallets.ml b/src/lib/secrets/hardware_wallets.ml index cea44317b8a..ab509191269 100644 --- a/src/lib/secrets/hardware_wallets.ml +++ b/src/lib/secrets/hardware_wallets.ml @@ -32,17 +32,17 @@ let decode_field (type field) (module Tick : Tick_intf with type field = field) |> B58.decode Base58_check.mina_alphabet |> Bytes.to_list |> List.rev |> Bytes.of_char_list |> Bytes.to_string |> String.foldi ~init:Bigint.zero ~f:(fun i acc byte -> - Bigint.(acc lor (of_int (Char.to_int byte) lsl Int.( * ) 8 i)) ) + Bigint.(acc lor (of_int (Char.to_int byte) lsl Int.( * ) 8 i))) |> Tick.Bigint.of_bignum_bigint |> Tick.Bigint.to_field -type public_key = {status: string; x: string; y: string} [@@deriving yojson] +type public_key = { status : string; x : string; y : string } +[@@deriving yojson] let decode_status_code ~f = function | "Ok" -> Ok (f ()) | "Hardware_wallet_not_found" -> - Error - "Hardware wallet not found. Is the device plugged in and activated?" + Error "Hardware wallet not found. Is the device plugged in and activated?" | "Computation_aborted" -> Error "An internal error happens in hardware wallet." | s -> @@ -63,12 +63,12 @@ let decode_public_key : string -> (Public_key.t, string) Result.t = Yojson.Safe.from_string public_key |> public_key_of_yojson |> Result.map_error ~f:report_json_error - |> Result.bind ~f:(fun {status; x; y} -> + |> Result.bind ~f:(fun { status; x; y } -> decode_status_code status ~f:(fun () -> ( decode_field (module Snark_params.Tick) x - , decode_field (module Snark_params.Tick) y ) ) ) + , decode_field (module Snark_params.Tick) y ))) -type signature = {status: string; field: string; scalar: string} +type signature = { status : string; field : string; scalar : string } [@@deriving yojson] let decode_signature : string -> (Signature.t, string) Result.t = @@ -76,17 +76,18 @@ let decode_signature : string -> (Signature.t, string) Result.t = Yojson.Safe.from_string signature |> signature_of_yojson |> Result.map_error ~f:report_json_error - |> Result.bind ~f:(fun {status; field; scalar} -> + |> Result.bind ~f:(fun { status; field; scalar } -> decode_status_code status ~f:(fun () -> ( decode_field (module Snark_params.Tick) field - , decode_field (module Snark_params.Tock) scalar ) ) ) + , decode_field (module Snark_params.Tock) scalar ))) let compute_public_key ~hd_index = let prog, args = ( "python3" , [ "-m" ^ hardware_wallet_script ; "--request=publickey" - ; "--nonce=" ^ Mina_numbers.Hd_index.to_string hd_index ] ) + ; "--nonce=" ^ Mina_numbers.Hd_index.to_string hd_index + ] ) in Process.run ~prog ~args () |> Deferred.Result.map_error ~f:report_process_error @@ -112,7 +113,8 @@ let sign ~hd_index ~public_key ~user_command_payload : ; "--request=sign" ; "--msgx=" ^ messages.(0) ; "--msgm=" ^ messages.(1) - ; "--nonce=" ^ Mina_numbers.Hd_index.to_string hd_index ] ) + ; "--nonce=" ^ Mina_numbers.Hd_index.to_string hd_index + ] ) in let%bind signature_str = Process.run ~prog ~args () diff --git a/src/lib/secrets/keypair.ml b/src/lib/secrets/keypair.ml index f52eaa754c1..38327eab45d 100644 --- a/src/lib/secrets/keypair.ml +++ b/src/lib/secrets/keypair.ml @@ -12,7 +12,7 @@ module T = struct let which = "coda keypair" (** Writes a keypair to [privkey_path] and [privkey_path ^ ".pub"] using [Secret_file] *) - let write_exn {Keypair.private_key; public_key} ~(privkey_path : string) + let write_exn { Keypair.private_key; public_key } ~(privkey_path : string) ~(password : Secret_file.password) : unit Deferred.t = let privkey_bytes = Private_key.to_bigstring private_key |> Bigstring.to_bytes @@ -26,9 +26,9 @@ module T = struct with | Ok () -> (* The hope is that if [Secret_file.write] succeeded then this ought to - as well, letting [handle_open] stay inside [Secret_file]. It might not - if the environment changes underneath us, and we won't have nice errors - in that case. *) + as well, letting [handle_open] stay inside [Secret_file]. It might not + if the environment changes underneath us, and we won't have nice errors + in that case. *) let%bind pubkey_f = Writer.open_file (privkey_path ^ ".pub") in Writer.write_line pubkey_f pubkey_string ; Writer.close pubkey_f @@ -70,8 +70,7 @@ module T = struct let read_exn' path = read_exn ~privkey_path:path - ~password: - (lazy (Password.hidden_line_or_env "Secret key password: " ~env)) + ~password:(lazy (Password.hidden_line_or_env "Secret key password: " ~env)) end include T diff --git a/src/lib/secrets/keypair_common.ml b/src/lib/secrets/keypair_common.ml index d3602e5498b..486ddb37469 100644 --- a/src/lib/secrets/keypair_common.ml +++ b/src/lib/secrets/keypair_common.ml @@ -15,10 +15,7 @@ module Make_terminal_stdin (KP : sig -> (t, Privkey_error.t) Deferred.Result.t val write_exn : - t - -> privkey_path:string - -> password:Secret_file.password - -> unit Deferred.t + t -> privkey_path:string -> password:Secret_file.password -> unit Deferred.t end) = struct open KP diff --git a/src/lib/secrets/libp2p_keypair.ml b/src/lib/secrets/libp2p_keypair.ml index 9fafc028ae5..fb6d5b4c153 100644 --- a/src/lib/secrets/libp2p_keypair.ml +++ b/src/lib/secrets/libp2p_keypair.ml @@ -11,8 +11,8 @@ module T = struct let which = "libp2p keypair" (** Writes a keypair to [privkey_path] and [privkey_path ^ ".pub"] using [Secret_file] *) - let write_exn kp ~(privkey_path : string) ~(password : Secret_file.password) - : unit Deferred.t = + let write_exn kp ~(privkey_path : string) ~(password : Secret_file.password) : + unit Deferred.t = let str = Mina_net2.Keypair.to_string kp in match%bind Secret_file.write ~path:privkey_path ~mkdir:true @@ -20,9 +20,9 @@ module T = struct with | Ok () -> (* The hope is that if [Secret_file.write] succeeded then this ought to - as well, letting [handle_open] stay inside [Secret_file]. It might not - if the environment changes underneath us, and we won't have nice errors - in that case. *) + as well, letting [handle_open] stay inside [Secret_file]. It might not + if the environment changes underneath us, and we won't have nice errors + in that case. *) let%bind pubkey_f = Writer.open_file (privkey_path ^ ".peerid") in Writer.write_line pubkey_f (Mina_net2.Keypair.to_peer_id kp) ; Writer.close pubkey_f diff --git a/src/lib/secrets/password.ml b/src/lib/secrets/password.ml index a9654c5c19d..505e298ecfa 100644 --- a/src/lib/secrets/password.ml +++ b/src/lib/secrets/password.ml @@ -11,7 +11,7 @@ let read_hidden_line ~error_help_message prompt : Bytes.t Async.Deferred.t = let () = if isatty then Terminal_io.tcsetattr ~mode:Terminal_io.TCSANOW - {(Option.value_exn old_termios) with c_echo= false; c_echonl= true} + { (Option.value_exn old_termios) with c_echo = false; c_echonl = true } stdin in Writer.write (Lazy.force Writer.stdout) prompt ; diff --git a/src/lib/secrets/password.mli b/src/lib/secrets/password.mli index 49577d78fdd..8f5f2875310 100644 --- a/src/lib/secrets/password.mli +++ b/src/lib/secrets/password.mli @@ -1,7 +1,6 @@ open Async -val read_hidden_line : - error_help_message:string -> string -> Bytes.t Deferred.t +val read_hidden_line : error_help_message:string -> string -> Bytes.t Deferred.t val hidden_line_or_env : ?error_help_message:string -> string -> env:string -> Bytes.t Deferred.t diff --git a/src/lib/secrets/privkey_error.ml b/src/lib/secrets/privkey_error.ml index af82df85b00..721d8e88b19 100644 --- a/src/lib/secrets/privkey_error.ml +++ b/src/lib/secrets/privkey_error.ml @@ -26,5 +26,4 @@ let raise ~which t = let where = sprintf "loading %s" which in Mina_user_error.raise ~where (to_string t) -let corrupted_privkey error : (_, t) Result.t = - Error (`Corrupted_privkey error) +let corrupted_privkey error : (_, t) Result.t = Error (`Corrupted_privkey error) diff --git a/src/lib/secrets/secret_box.ml b/src/lib/secrets/secret_box.ml index 8ffc807721e..201507f2a44 100644 --- a/src/lib/secrets/secret_box.ml +++ b/src/lib/secrets/secret_box.ml @@ -14,25 +14,26 @@ module BytesWr = struct let of_yojson = function | `String s -> ( - match Base58_check.decode s with - | Error e -> - Error - (sprintf "Bytes.of_yojson, bad Base58Check: %s" - (Error.to_string_hum e)) - | Ok x -> - Ok (Bytes.of_string x) ) + match Base58_check.decode s with + | Error e -> + Error + (sprintf "Bytes.of_yojson, bad Base58Check: %s" + (Error.to_string_hum e)) + | Ok x -> + Ok (Bytes.of_string x) ) | _ -> Error "Bytes.of_yojson needs a string" end module T = struct type t = - { box_primitive: string - ; pw_primitive: string - ; nonce: Bytes.t - ; pwsalt: Bytes.t - ; pwdiff: Int64.t * int - ; ciphertext: Bytes.t } + { box_primitive : string + ; pw_primitive : string + ; nonce : Bytes.t + ; pwsalt : Bytes.t + ; pwdiff : Int64.t * int + ; ciphertext : Bytes.t + } [@@deriving sexp] end @@ -44,30 +45,32 @@ module Json : sig val to_stable : t -> T.t end = struct type t = - { box_primitive: string - ; pw_primitive: string - ; nonce: BytesWr.t - ; pwsalt: BytesWr.t - ; pwdiff: Int64.t * int - ; ciphertext: BytesWr.t } + { box_primitive : string + ; pw_primitive : string + ; nonce : BytesWr.t + ; pwsalt : BytesWr.t + ; pwdiff : Int64.t * int + ; ciphertext : BytesWr.t + } [@@deriving yojson] let of_stable - {T.box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext} = - {box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext} + { T.box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext } = + { box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext } - let to_stable {box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext} - = - {T.box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext} + let to_stable + { box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext } = + { T.box_primitive; pw_primitive; nonce; pwsalt; pwdiff; ciphertext } end type t = T.t = - { box_primitive: string - ; pw_primitive: string - ; nonce: Bytes.t - ; pwsalt: Bytes.t - ; pwdiff: Int64.t * int - ; ciphertext: Bytes.t } + { box_primitive : string + ; pw_primitive : string + ; nonce : Bytes.t + ; pwsalt : Bytes.t + ; pwdiff : Int64.t * int + ; ciphertext : Bytes.t + } [@@deriving sexp] let to_yojson t : Yojson.Safe.t = Json.to_yojson (Json.of_stable t) @@ -79,18 +82,19 @@ let of_yojson (t : Yojson.Safe.t) = let encrypt ~(password : Bytes.t) ~(plaintext : Bytes.t) = let nonce = Secret_box.random_nonce () in let salt = Password_hash.random_salt () in - let ({Password_hash.mem_limit; ops_limit} as diff) = + let ({ Password_hash.mem_limit; ops_limit } as diff) = Password_hash.moderate in let pw = Password_hash.Bytes.wipe_to_password password in let key = Secret_box.derive_key diff pw salt in let ciphertext = Secret_box.Bytes.secret_box key plaintext nonce in - { box_primitive= Secret_box.primitive - ; pw_primitive= Password_hash.primitive - ; nonce= Secret_box.Bytes.of_nonce nonce - ; pwsalt= Password_hash.Bytes.of_salt salt - ; pwdiff= (mem_limit, ops_limit) - ; ciphertext } + { box_primitive = Secret_box.primitive + ; pw_primitive = Password_hash.primitive + ; nonce = Secret_box.Bytes.of_nonce nonce + ; pwsalt = Password_hash.Bytes.of_salt salt + ; pwdiff = (mem_limit, ops_limit) + ; ciphertext + } (** warning: this will zero [password] *) let decrypt ~(password : Bytes.t) @@ -98,8 +102,9 @@ let decrypt ~(password : Bytes.t) ; pw_primitive ; nonce ; pwsalt - ; pwdiff= mem_limit, ops_limit - ; ciphertext } = + ; pwdiff = mem_limit, ops_limit + ; ciphertext + } = if not (String.equal box_primitive Secret_box.primitive) then Error (`Corrupted_privkey @@ -115,7 +120,7 @@ let decrypt ~(password : Bytes.t) else let nonce = Secret_box.Bytes.to_nonce nonce in let salt = Password_hash.Bytes.to_salt pwsalt in - let diff = {Password_hash.mem_limit; ops_limit} in + let diff = { Password_hash.mem_limit; ops_limit } in let pw = Password_hash.Bytes.wipe_to_password password in let key = Secret_box.derive_key diff pw salt in try Result.return @@ Secret_box.Bytes.secret_box_open key ciphertext nonce @@ -131,7 +136,7 @@ let%test_unit "successful roundtrip" = ~f:(fun (password, plaintext) -> let enc = encrypt ~password:(Bytes.copy password) ~plaintext in let dec = Option.value_exn (decrypt enc ~password |> Result.ok) in - [%test_eq: Bytes.t] dec plaintext ) + [%test_eq: Bytes.t] dec plaintext) let%test "bad password fails" = let enc = diff --git a/src/lib/secrets/secret_file.ml b/src/lib/secrets/secret_file.ml index 5ecfbb5801b..eebb6c36c16 100644 --- a/src/lib/secrets/secret_file.ml +++ b/src/lib/secrets/secret_file.ml @@ -15,12 +15,12 @@ let handle_open ~mkdir ~(f : string -> 'a Deferred.t) path = let%bind stat = Unix.stat dn in Deferred.return @@ - if (Unix.File_kind.equal stat.kind `Directory) then Ok true + if Unix.File_kind.equal stat.kind `Directory then Ok true else corrupted_privkey (Error.createf "%s exists and it is not a directory, can't store files there" - dn) ) + dn)) with | Ok x -> return x @@ -44,7 +44,7 @@ let handle_open ~mkdir ~(f : string -> 'a Deferred.t) path = Deferred.Result.return () else if not parent_exists then Deferred.return (Error (`Parent_directory_does_not_exist dn)) - else Deferred.Result.return () ) + else Deferred.Result.return ()) with | Ok x -> Deferred.return x @@ -57,16 +57,16 @@ let handle_open ~mkdir ~(f : string -> 'a Deferred.t) path = let open Deferred.Let_syntax in match%bind Deferred.Or_error.try_with ~here:[%here] ~extract_exn:true (fun () -> - f path ) + f path) with | Ok x -> Deferred.Result.return x | Error e -> ( - match Error.to_exn e with - | Unix.Unix_error (_, _, _) -> - Deferred.return (Error (`Cannot_open_file path)) - | e -> - Deferred.return @@ corrupted_privkey (Error.of_exn e) ) + match Error.to_exn e with + | Unix.Unix_error (_, _, _) -> + Deferred.return (Error (`Cannot_open_file path)) + | e -> + Deferred.return @@ corrupted_privkey (Error.of_exn e) ) let lift (t : 'a Deferred.t) : ('a, 'b) Deferred.Result.t = t >>| fun x -> Ok x diff --git a/src/lib/secrets/wallets.ml b/src/lib/secrets/wallets.ml index 99263ce7420..df982479209 100644 --- a/src/lib/secrets/wallets.ml +++ b/src/lib/secrets/wallets.ml @@ -10,12 +10,12 @@ type locked_key = | Hd_account of Mina_numbers.Hd_index.t (* A simple cache on top of the fs *) -type t = {cache: locked_key Public_key.Compressed.Table.t; path: string} +type t = { cache : locked_key Public_key.Compressed.Table.t; path : string } let get_privkey_filename public_key = Public_key.Compressed.to_base58_check public_key -let get_path {path; cache} public_key = +let get_path { path; cache } public_key = (* TODO: Do we need to version this? *) let filename = Public_key.Compressed.Table.find cache public_key @@ -24,7 +24,7 @@ let get_path {path; cache} public_key = Option.return file | Hd_account _ -> Option.return - (Public_key.Compressed.to_base58_check public_key ^ ".index") ) + (Public_key.Compressed.to_base58_check public_key ^ ".index")) |> Option.value ~default:(get_privkey_filename public_key) in path ^/ filename @@ -40,12 +40,13 @@ let decode_public_key key file path logger = ~metadata: [ ("file", `String file) ; ("path", `String path) - ; ("error", Error_json.error_to_yojson e) ] ; + ; ("error", Error_json.error_to_yojson e) + ] ; None -let reload ~logger {cache; path} : unit Deferred.t = +let reload ~logger { cache; path } : unit Deferred.t = let logger = - Logger.extend logger [("wallets_context", `String "Wallets.get")] + Logger.extend logger [ ("wallets_context", `String "Wallets.get") ] in Public_key.Compressed.Table.clear cache ; let%bind () = File_system.create_dir path in @@ -61,34 +62,34 @@ let reload ~logger {cache; path} : unit Deferred.t = |> Option.iter ~f:(fun pk -> ignore @@ Public_key.Compressed.Table.add cache ~key:pk - ~data:(Locked sk_filename) ) + ~data:(Locked sk_filename)) | _ -> () ) | None -> ( - match String.chop_suffix file ~suffix:".index" with - | Some public_key -> ( - let%map lines = Reader.file_lines (path ^/ file) in - match lines with - | hd_index :: _ -> - decode_public_key public_key file path logger - |> Option.iter ~f:(fun pk -> - ignore - @@ Public_key.Compressed.Table.add cache ~key:pk - ~data: - (Hd_account - (Mina_numbers.Hd_index.of_string hd_index)) - ) - | _ -> - () ) - | None -> - return () ) ) + match String.chop_suffix file ~suffix:".index" with + | Some public_key -> ( + let%map lines = Reader.file_lines (path ^/ file) in + match lines with + | hd_index :: _ -> + decode_public_key public_key file path logger + |> Option.iter ~f:(fun pk -> + ignore + @@ Public_key.Compressed.Table.add cache ~key:pk + ~data: + (Hd_account + (Mina_numbers.Hd_index.of_string hd_index))) + | _ -> + () ) + | None -> + return () )) in Unix.chmod path ~perm:0o700 let load ~logger ~disk_location = let t = - { cache= Public_key.Compressed.Table.create () - ; path= disk_location ^/ "store" } + { cache = Public_key.Compressed.Table.create () + ; path = disk_location ^/ "store" + } in let%map () = reload ~logger t in t @@ -101,16 +102,16 @@ let import_keypair_helper t keypair write_keypair = ignore ( Public_key.Compressed.Table.add t.cache ~key:compressed_pk ~data:(Unlocked (get_privkey_filename compressed_pk, keypair)) - : [`Duplicate | `Ok] ) ; + : [ `Duplicate | `Ok ] ) ; compressed_pk let import_keypair t keypair ~password = import_keypair_helper t keypair (fun privkey_path -> - Secret_keypair.write_exn keypair ~privkey_path ~password ) + Secret_keypair.write_exn keypair ~privkey_path ~password) let import_keypair_terminal_stdin t keypair = import_keypair_helper t keypair (fun privkey_path -> - Secret_keypair.Terminal_stdin.write_exn keypair ~privkey_path ) + Secret_keypair.Terminal_stdin.write_exn keypair ~privkey_path) (** Generates a new private key file and a keypair *) let generate_new t ~password : Public_key.Compressed.t Deferred.t = @@ -135,19 +136,19 @@ let create_hd_account t ~hd_index : ignore ( Public_key.Compressed.Table.add t.cache ~key:compressed_pk ~data:(Hd_account hd_index) - : [`Duplicate | `Ok] ) ; + : [ `Duplicate | `Ok ] ) ; compressed_pk -let delete ({cache; _} as t : t) (pk : Public_key.Compressed.t) : - (unit, [`Not_found]) Deferred.Result.t = +let delete ({ cache; _ } as t : t) (pk : Public_key.Compressed.t) : + (unit, [ `Not_found ]) Deferred.Result.t = Hashtbl.remove cache pk ; Deferred.Or_error.try_with ~here:[%here] (fun () -> - Unix.remove (get_path t pk) ) + Unix.remove (get_path t pk)) |> Deferred.Result.map_error ~f:(fun _ -> `Not_found) -let pks ({cache; _} : t) = Public_key.Compressed.Table.keys cache +let pks ({ cache; _ } : t) = Public_key.Compressed.Table.keys cache -let find_unlocked ({cache; _} : t) ~needle = +let find_unlocked ({ cache; _ } : t) ~needle = Public_key.Compressed.Table.find cache needle |> Option.bind ~f:(function | Locked _ -> @@ -155,9 +156,9 @@ let find_unlocked ({cache; _} : t) ~needle = | Unlocked (_, kp) -> Some kp | Hd_account _ -> - None ) + None) -let find_identity ({cache; _} : t) ~needle = +let find_identity ({ cache; _ } : t) ~needle = Public_key.Compressed.Table.find cache needle |> Option.bind ~f:(function | Locked _ -> @@ -165,9 +166,9 @@ let find_identity ({cache; _} : t) ~needle = | Unlocked (_, kp) -> Some (`Keypair kp) | Hd_account index -> - Some (`Hd_index index) ) + Some (`Hd_index index)) -let check_locked {cache; _} ~needle = +let check_locked { cache; _ } ~needle = Public_key.Compressed.Table.find cache needle |> Option.map ~f:(function | Locked _ -> @@ -175,16 +176,16 @@ let check_locked {cache; _} ~needle = | Unlocked _ -> false | Hd_account _ -> - true ) + true) -let unlock {cache; path} ~needle ~password = +let unlock { cache; path } ~needle ~password = let unlock_keypair = function | Locked file -> Secret_keypair.read ~privkey_path:(path ^/ file) ~password |> Deferred.Result.map_error ~f:(fun _ -> `Bad_password) |> Deferred.Result.map ~f:(fun kp -> Public_key.Compressed.Table.set cache ~key:needle - ~data:(Unlocked (file, kp)) ) + ~data:(Unlocked (file, kp))) |> Deferred.Result.ignore_m | Unlocked _ -> Deferred.Result.return () @@ -196,12 +197,12 @@ let unlock {cache; path} ~needle ~password = |> Deferred.return |> Deferred.Result.bind ~f:unlock_keypair -let lock {cache; _} ~needle = +let lock { cache; _ } ~needle = Public_key.Compressed.Table.change cache needle ~f:(function | Some (Unlocked (file, _)) -> Some (Locked file) | k -> - k ) + k) let%test_module "wallets" = ( module struct @@ -218,7 +219,7 @@ let%test_module "wallets" = let%map pk = generate_new wallets ~password in let keys = Set.of_list (pks wallets) in assert (Set.mem keys pk) ; - assert (find_unlocked wallets ~needle:pk |> Option.is_some) ) ) + assert (find_unlocked wallets ~needle:pk |> Option.is_some))) let%test_unit "get from existing file system not-scratch" = Backtrace.elide := false ; @@ -232,7 +233,7 @@ let%test_module "wallets" = (* Get wallets again from scratch *) let%map wallets = load ~logger ~disk_location:path in let keys = Set.of_list (pks wallets) in - assert (Set.mem keys pk1 && Set.mem keys pk2) ) ) + assert (Set.mem keys pk1 && Set.mem keys pk2))) let%test_unit "create wallet then delete it" = Async.Thread_safe.block_on_async_exn (fun () -> @@ -247,7 +248,7 @@ let%test_module "wallets" = Option.is_none @@ Public_key.Compressed.Table.find wallets.cache pk ) | Error _ -> - failwith "unexpected" ) ) + failwith "unexpected")) let%test_unit "Unable to find wallet" = Async.Thread_safe.block_on_async_exn (fun () -> @@ -257,5 +258,5 @@ let%test_module "wallets" = let%map result = delete wallets (Public_key.compress @@ keypair.public_key) in - assert (Result.is_error result) ) ) + assert (Result.is_error result))) end ) diff --git a/src/lib/secrets/wallets.mli b/src/lib/secrets/wallets.mli index 44e4a27679b..af6edd43c89 100644 --- a/src/lib/secrets/wallets.mli +++ b/src/lib/secrets/wallets.mli @@ -31,7 +31,7 @@ val find_unlocked : t -> needle:Public_key.Compressed.t -> Keypair.t option val find_identity : t -> needle:Public_key.Compressed.t - -> [`Keypair of Keypair.t | `Hd_index of Mina_numbers.Hd_index.t] option + -> [ `Keypair of Keypair.t | `Hd_index of Mina_numbers.Hd_index.t ] option val check_locked : t -> needle:Public_key.Compressed.t -> bool option @@ -39,11 +39,11 @@ val unlock : t -> needle:Public_key.Compressed.t -> password:Secret_file.password - -> (unit, [`Not_found | `Bad_password]) Deferred.Result.t + -> (unit, [ `Not_found | `Bad_password ]) Deferred.Result.t val lock : t -> needle:Public_key.Compressed.t -> unit val get_path : t -> Public_key.Compressed.t -> string val delete : - t -> Public_key.Compressed.t -> (unit, [`Not_found]) Deferred.Result.t + t -> Public_key.Compressed.t -> (unit, [ `Not_found ]) Deferred.Result.t diff --git a/src/lib/sgn/sgn.ml b/src/lib/sgn/sgn.ml index d704ea42b87..87843562f95 100644 --- a/src/lib/sgn/sgn.ml +++ b/src/lib/sgn/sgn.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -26,7 +24,7 @@ end] let gen = Quickcheck.Generator.map Bool.quickcheck_generator ~f:(fun b -> - if b then Pos else Neg ) + if b then Pos else Neg) let negate = function Pos -> Neg | Neg -> Pos @@ -39,17 +37,17 @@ let of_field_exn x = else if Field.equal x neg_one then Neg else failwith "Sgn.of_field: Expected positive or negative 1" -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] type var = Field.Var.t let typ : (var, t) Typ.t = let open Typ in - { check= (fun x -> assert_r1cs x x (Field.Var.constant Field.one)) - ; store= (fun t -> Store.store (to_field t)) - ; read= (fun x -> Read.(read x >>| of_field_exn)) - ; alloc= Alloc.alloc } + { check = (fun x -> assert_r1cs x x (Field.Var.constant Field.one)) + ; store = (fun t -> Store.store (to_field t)) + ; read = (fun x -> Read.(read x >>| of_field_exn)) + ; alloc = Alloc.alloc + } module Checked = struct let two = Field.of_int 2 diff --git a/src/lib/signature_kind/compile_config/mina_signature_kind.ml b/src/lib/signature_kind/compile_config/mina_signature_kind.ml index 95536dd9508..7e204ae39de 100644 --- a/src/lib/signature_kind/compile_config/mina_signature_kind.ml +++ b/src/lib/signature_kind/compile_config/mina_signature_kind.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] type t = Testnet | Mainnet -[%%if -mainnet] +[%%if mainnet] let t = Mainnet diff --git a/src/lib/signature_lib/keypair.ml b/src/lib/signature_lib/keypair.ml index aa8387389ca..a0517b6725f 100644 --- a/src/lib/signature_lib/keypair.ml +++ b/src/lib/signature_lib/keypair.ml @@ -6,8 +6,9 @@ module Stable = struct module V1 = struct type t = - { public_key: Public_key.Stable.V1.t - ; private_key: Private_key.Stable.V1.t [@sexp.opaque] } + { public_key : Public_key.Stable.V1.t + ; private_key : Private_key.Stable.V1.t [@sexp.opaque] + } [@@deriving sexp] let to_latest = Fn.id @@ -18,11 +19,11 @@ end] module T = struct type t = Stable.Latest.t = - {public_key: Public_key.t; private_key: Private_key.t [@sexp.opaque]} + { public_key : Public_key.t; private_key : Private_key.t [@sexp.opaque] } [@@deriving sexp] - let compare {public_key= pk1; private_key= _} - {public_key= pk2; private_key= _} = + let compare { public_key = pk1; private_key = _ } + { public_key = pk2; private_key = _ } = Public_key.compare pk1 pk2 let to_yojson = Stable.Latest.to_yojson @@ -33,7 +34,7 @@ include Comparable.Make (T) let of_private_key_exn private_key = let public_key = Public_key.of_private_key_exn private_key in - {public_key; private_key} + { public_key; private_key } let create () = of_private_key_exn (Private_key.create ()) @@ -43,8 +44,8 @@ module And_compressed_pk = struct module T = struct type t = T.t * Public_key.Compressed.t [@@deriving sexp] - let compare ({public_key= pk1; private_key= _}, _) - ({public_key= pk2; private_key= _}, _) = + let compare ({ public_key = pk1; private_key = _ }, _) + ({ public_key = pk2; private_key = _ }, _) = Public_key.compare pk1 pk2 end diff --git a/src/lib/signature_lib/keypair.mli b/src/lib/signature_lib/keypair.mli index e7a85a2da4b..8ec4ac9847f 100644 --- a/src/lib/signature_lib/keypair.mli +++ b/src/lib/signature_lib/keypair.mli @@ -3,8 +3,9 @@ open Core_kernel module Stable : sig module V1 : sig type t = - { public_key: Public_key.Stable.V1.t - ; private_key: Private_key.Stable.V1.t [@sexp.opaque] } + { public_key : Public_key.Stable.V1.t + ; private_key : Private_key.Stable.V1.t [@sexp.opaque] + } [@@deriving sexp, bin_io, version, to_yojson] end @@ -12,7 +13,7 @@ module Stable : sig end type t = Stable.Latest.t = - {public_key: Public_key.t; private_key: Private_key.t [@sexp.opaque] } + { public_key : Public_key.t; private_key : Private_key.t [@sexp.opaque] } [@@deriving sexp, compare, to_yojson] include Comparable.S with type t := t diff --git a/src/lib/signature_lib/private_key.ml b/src/lib/signature_lib/private_key.ml index 71515858139..165a545bd82 100644 --- a/src/lib/signature_lib/private_key.ml +++ b/src/lib/signature_lib/private_key.ml @@ -1,10 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params.Tick @@ -26,8 +24,7 @@ module Stable = struct let to_latest = Fn.id - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] let gen = let open Snark_params.Tick.Inner_curve.Scalar in @@ -46,8 +43,7 @@ module Stable = struct but also whether the serializations for the consensus and nonconsensus code are identical *) - [%%if - curve_size = 255] + [%%if curve_size = 255] let%test "private key serialization v1" = let pk = @@ -68,11 +64,9 @@ module Stable = struct end end] -[%%define_locally -Stable.Latest.(gen)] +[%%define_locally Stable.Latest.(gen)] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let create () = (* This calls into libsnark which uses /dev/urandom *) @@ -117,8 +111,7 @@ let create () : t = Snarkette.Pasta.Fq.of_bigint (Snarkette.Nat.of_bytes (String.init 32 ~f:(fun i -> - Char.of_int_exn (Js.Optdef.get (Js.array_get x i) byte_undefined) - ))) + Char.of_int_exn (Js.Optdef.get (Js.array_get x i) byte_undefined)))) [%%endif] @@ -147,10 +140,10 @@ let to_yojson t = `String (to_base58_check t) let of_yojson = function | `String x -> ( - try Ok (of_base58_check_exn x) with - | Failure str -> - Error str - | exn -> - Error ("Signature_lib.Private_key.of_yojson: " ^ Exn.to_string exn) ) + try Ok (of_base58_check_exn x) with + | Failure str -> + Error str + | exn -> + Error ("Signature_lib.Private_key.of_yojson: " ^ Exn.to_string exn) ) | _ -> Error "Signature_lib.Private_key.of_yojson: Expected a string" diff --git a/src/lib/signature_lib/public_key.ml b/src/lib/signature_lib/public_key.ml index d6b93f96f4b..ad41a2ee171 100644 --- a/src/lib/signature_lib/public_key.ml +++ b/src/lib/signature_lib/public_key.ml @@ -1,10 +1,8 @@ (* public_key.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] include Non_zero_curve_point module Inner_curve = Snark_params.Tick.Inner_curve @@ -14,6 +12,7 @@ let of_private_key_exn p = of_inner_curve_exn Inner_curve.(scale one p) [%%else] include Non_zero_curve_point_nonconsensus.Non_zero_curve_point + module Inner_curve = Snark_params_nonconsensus.Inner_curve let of_private_key_exn p = of_inner_curve_exn Inner_curve.(scale one p) diff --git a/src/lib/signature_lib/public_key.mli b/src/lib/signature_lib/public_key.mli index f95dbba329f..030a03960fb 100644 --- a/src/lib/signature_lib/public_key.mli +++ b/src/lib/signature_lib/public_key.mli @@ -45,17 +45,16 @@ val of_private_key_exn : Private_key.t -> t module Compressed : sig module Poly : sig - type ('field, 'boolean) t = {x: 'field; is_odd: 'boolean} + type ('field, 'boolean) t = { x : 'field; is_odd : 'boolean } - module Stable : - sig - module V1 : sig - type ('field, 'boolean) t - end - - module Latest = V1 + module Stable : sig + module V1 : sig + type ('field, 'boolean) t end - with type ('field, 'boolean) V1.t = ('field, 'boolean) t + + module Latest = V1 + end + with type ('field, 'boolean) V1.t = ('field, 'boolean) t end type t = (Field.t, bool) Poly.t [@@deriving sexp, hash] @@ -64,8 +63,7 @@ module Compressed : sig module Stable : sig module V1 : sig - type nonrec t = t - [@@deriving sexp, bin_io, equal, compare, hash, version] + type nonrec t = t [@@deriving sexp, bin_io, equal, compare, hash, version] include Codable.S with type t := t end diff --git a/src/lib/signature_lib/schnorr.ml b/src/lib/signature_lib/schnorr.ml index 2b614cccc40..f9b9e450902 100644 --- a/src/lib/signature_lib/schnorr.ml +++ b/src/lib/signature_lib/schnorr.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] module Bignum_bigint = Bigint open Core_kernel @@ -13,8 +12,7 @@ module type Message_intf = sig type curve_scalar - val derive : - t -> private_key:curve_scalar -> public_key:curve -> curve_scalar + val derive : t -> private_key:curve_scalar -> public_key:curve -> curve_scalar val hash : t -> public_key:curve -> r:field -> curve_scalar @@ -38,8 +36,7 @@ module type Message_intf = sig [%%endif] end -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module type S = sig module Impl : Snarky_backendless.Snark_intf.S @@ -57,21 +54,21 @@ module type S = sig module Shifted : sig module type S = Snarky_curves.Shifted_intf - with type curve_var := curve_var - and type boolean_var := Boolean.var - and type ('a, 'b) checked := ('a, 'b) Checked.t + with type curve_var := curve_var + and type boolean_var := Boolean.var + and type ('a, 'b) checked := ('a, 'b) Checked.t end module Message : Message_intf - with type boolean_var := Boolean.var - and type curve_scalar := curve_scalar - and type curve_scalar_var := curve_scalar_var - and type ('a, 'b) checked := ('a, 'b) Checked.t - and type curve := curve - and type curve_var := curve_var - and type field := Field.t - and type field_var := Field.Var.t + with type boolean_var := Boolean.var + and type curve_scalar := curve_scalar + and type curve_scalar_var := curve_scalar_var + and type ('a, 'b) checked := ('a, 'b) Checked.t + and type curve := curve + and type curve_var := curve_var + and type field := Field.t + and type field_var := Field.Var.t module Signature : sig type t = field * curve_scalar [@@deriving sexp] @@ -118,66 +115,65 @@ end module Make (Impl : Snarky_backendless.Snark_intf.S) (Curve : sig - open Impl + open Impl - module Scalar : sig - type t [@@deriving sexp, equal] + module Scalar : sig + type t [@@deriving sexp, equal] - type var + type var - val typ : (var, t) Typ.t + val typ : (var, t) Typ.t - val zero : t + val zero : t - val ( * ) : t -> t -> t + val ( * ) : t -> t -> t - val ( + ) : t -> t -> t + val ( + ) : t -> t -> t - val negate : t -> t + val negate : t -> t - module Checked : sig - val to_bits : - var -> Boolean.var Bitstring_lib.Bitstring.Lsb_first.t - end + module Checked : sig + val to_bits : var -> Boolean.var Bitstring_lib.Bitstring.Lsb_first.t end + end - type t [@@deriving sexp] + type t [@@deriving sexp] - type var = Field.Var.t * Field.Var.t + type var = Field.Var.t * Field.Var.t - module Checked : - Snarky_curves.Weierstrass_checked_intf + module Checked : + Snarky_curves.Weierstrass_checked_intf with module Impl := Impl and type t = var and type unchecked := t - val one : t + val one : t - val ( + ) : t -> t -> t + val ( + ) : t -> t -> t - val negate : t -> t + val negate : t -> t - val scale : t -> Scalar.t -> t + val scale : t -> Scalar.t -> t - val to_affine_exn : t -> Field.t * Field.t + val to_affine_exn : t -> Field.t * Field.t end) (Message : Message_intf - with type boolean_var := Impl.Boolean.var - and type curve_scalar_var := Curve.Scalar.var - and type curve_scalar := Curve.Scalar.t - and type curve := Curve.t - and type curve_var := Curve.var - and type field := Impl.Field.t - and type field_var := Impl.Field.Var.t - and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t) : + with type boolean_var := Impl.Boolean.var + and type curve_scalar_var := Curve.Scalar.var + and type curve_scalar := Curve.Scalar.t + and type curve := Curve.t + and type curve_var := Curve.var + and type field := Impl.Field.t + and type field_var := Impl.Field.Var.t + and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t) : S - with module Impl := Impl - and type curve := Curve.t - and type curve_var := Curve.var - and type curve_scalar := Curve.Scalar.t - and type curve_scalar_var := Curve.Scalar.var - and module Shifted := Curve.Checked.Shifted - and module Message := Message = struct + with module Impl := Impl + and type curve := Curve.t + and type curve_var := Curve.var + and type curve_scalar := Curve.Scalar.t + and type curve_scalar_var := Curve.Scalar.var + and module Shifted := Curve.Checked.Shifted + and module Message := Message = struct open Impl module Signature = struct @@ -228,8 +224,7 @@ module Make | exception _ -> false - [%%if - call_logger] + [%%if call_logger] let verify s pk m = Mina_debug.Call_logger.record_call "Signature_lib.Schnorr.verify" ; @@ -248,8 +243,7 @@ module Make let is_even y = let%map bs = Field.Checked.unpack_full y in - Bitstring_lib.Bitstring.Lsb_first.to_list bs - |> List.hd_exn |> Boolean.not + Bitstring_lib.Bitstring.Lsb_first.to_list bs |> List.hd_exn |> Boolean.not (* returning r_point as a representable point ensures it is nonzero so the nonzero * check does not have to explicitly be performed *) @@ -257,8 +251,8 @@ module Make let%snarkydef verifier (type s) ~equal ~final_check ((module Shifted) as shifted : (module Curve.Checked.Shifted.S with type t = s)) - ((r, s) : Signature.var) (public_key : Public_key.var) - (m : Message.var) = + ((r, s) : Signature.var) (public_key : Public_key.var) (m : Message.var) + = let%bind e = Message.hash_checked m ~public_key ~r in (* s * g - e * public_key *) let%bind e_pk = @@ -304,9 +298,9 @@ module type S = sig module Message : Message_intf - with type curve_scalar := curve_scalar - and type curve := curve - and type field := Field.t + with type curve_scalar := curve_scalar + and type curve := curve + and type field := Field.t module Signature : sig type t = Field.t * curve_scalar [@@deriving sexp] @@ -327,40 +321,40 @@ end module Make (Impl : module type of Snark_params_nonconsensus) (Curve : sig - open Impl + open Impl - module Scalar : sig - type t [@@deriving sexp, equal] + module Scalar : sig + type t [@@deriving sexp, equal] - val zero : t + val zero : t - val ( * ) : t -> t -> t + val ( * ) : t -> t -> t - val ( + ) : t -> t -> t + val ( + ) : t -> t -> t - val negate : t -> t - end + val negate : t -> t + end - type t [@@deriving sexp] + type t [@@deriving sexp] - val one : t + val one : t - val ( + ) : t -> t -> t + val ( + ) : t -> t -> t - val negate : t -> t + val negate : t -> t - val scale : t -> Scalar.t -> t + val scale : t -> Scalar.t -> t - val to_affine_exn : t -> Field.t * Field.t + val to_affine_exn : t -> Field.t * Field.t end) (Message : Message_intf - with type curve := Curve.t - and type curve_scalar := Curve.Scalar.t - and type field := Impl.Field.t) : + with type curve := Curve.t + and type curve_scalar := Curve.Scalar.t + and type field := Impl.Field.t) : S - with type curve := Curve.t - and type curve_scalar := Curve.Scalar.t - and module Message := Message = struct + with type curve := Curve.t + and type curve_scalar := Curve.Scalar.t + and module Message := Message = struct module Private_key = struct type t = Curve.Scalar.t [@@deriving sexp] end @@ -425,11 +419,12 @@ module Message = struct let input = let x, y = Tick.Inner_curve.to_affine_exn public_key in Random_oracle.Input.append t - { field_elements= [|x; y|] - ; bitstrings= + { field_elements = [| x; y |] + ; bitstrings = [| Tock.Field.unpack private_key - ; Fold_lib.Fold.( - to_list (string_bits (String.of_char network_id))) |] } + ; Fold_lib.Fold.(to_list (string_bits (String.of_char network_id))) + |] + } in Random_oracle.Input.to_bits ~unpack:Field.unpack input |> Array.of_list |> Blake2.bits_to_string |> Blake2.digest_string @@ -441,15 +436,14 @@ module Message = struct let input = let px, py = Inner_curve.to_affine_exn public_key in Random_oracle.Input.append t - {field_elements= [|px; py; r|]; bitstrings= [||]} + { field_elements = [| px; py; r |]; bitstrings = [||] } in let open Random_oracle in hash ~init:Hash_prefix_states.signature (pack_input input) |> Digest.to_bits ~length:Field.size_in_bits |> Inner_curve.Scalar.of_bits - [%%ifdef - consensus_mechanism] + [%%ifdef consensus_mechanism] type var = (Field.Var.t, Boolean.var) Random_oracle.Input.t @@ -457,34 +451,33 @@ module Message = struct let input = let px, py = public_key in Random_oracle.Input.append t - {field_elements= [|px; py; r|]; bitstrings= [||]} + { field_elements = [| px; py; r |]; bitstrings = [||] } in make_checked (fun () -> let open Random_oracle.Checked in hash ~init:Hash_prefix_states.signature (pack_input input) |> Digest.to_bits ~length:Field.size_in_bits - |> Bitstring_lib.Bitstring.Lsb_first.of_list ) + |> Bitstring_lib.Bitstring.Lsb_first.of_list) [%%endif] end module S = Make (Tick) (Tick.Inner_curve) (Message) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] let gen = let open Quickcheck.Let_syntax in let%map pk = Private_key.gen and msg = Tick.Field.gen in - (pk, Random_oracle.Input.field_elements [|msg|]) + (pk, Random_oracle.Input.field_elements [| msg |]) (* Use for reading only. *) let message_typ () : (Message.var, Message.t) Tick.Typ.t = let open Tick.Typ in - { alloc= Alloc.return (Random_oracle.Input.field_elements [||]) - ; store= + { alloc = Alloc.return (Random_oracle.Input.field_elements [||]) + ; store = Store.Let_syntax.( - fun {Random_oracle.Input.field_elements; bitstrings} -> + fun { Random_oracle.Input.field_elements; bitstrings } -> let%bind field_elements = Store.all @@ Array.to_list @@ Array.map ~f:(store Tick.Field.typ) field_elements @@ -492,13 +485,14 @@ let message_typ () : (Message.var, Message.t) Tick.Typ.t = let%map bitstrings = Store.all @@ Array.to_list @@ Array.map bitstrings ~f:(fun l -> - Store.all @@ List.map ~f:(store Tick.Boolean.typ) l ) + Store.all @@ List.map ~f:(store Tick.Boolean.typ) l) in - { Random_oracle.Input.field_elements= Array.of_list field_elements - ; bitstrings= Array.of_list bitstrings }) - ; read= + { Random_oracle.Input.field_elements = Array.of_list field_elements + ; bitstrings = Array.of_list bitstrings + }) + ; read = Read.Let_syntax.( - fun {Random_oracle.Input.field_elements; bitstrings} -> + fun { Random_oracle.Input.field_elements; bitstrings } -> let%bind field_elements = Read.all @@ Array.to_list @@ Array.map ~f:(read Tick.Field.typ) field_elements @@ -506,11 +500,13 @@ let message_typ () : (Message.var, Message.t) Tick.Typ.t = let%map bitstrings = Read.all @@ Array.to_list @@ Array.map bitstrings ~f:(fun l -> - Read.all @@ List.map ~f:(read Tick.Boolean.typ) l ) + Read.all @@ List.map ~f:(read Tick.Boolean.typ) l) in - { Random_oracle.Input.field_elements= Array.of_list field_elements - ; bitstrings= Array.of_list bitstrings }) - ; check= (fun _ -> Tick.Checked.return ()) } + { Random_oracle.Input.field_elements = Array.of_list field_elements + ; bitstrings = Array.of_list bitstrings + }) + ; check = (fun _ -> Tick.Checked.return ()) + } let%test_unit "schnorr checked + unchecked" = Quickcheck.test ~trials:5 gen ~f:(fun (pk, msg) -> @@ -518,17 +514,16 @@ let%test_unit "schnorr checked + unchecked" = let pubkey = Tick.Inner_curve.(scale one pk) in assert (S.verify s pubkey msg) ; (Tick.Test.test_equal ~sexp_of_t:[%sexp_of: bool] ~equal:Bool.equal - Tick.Typ.( - tuple3 Tick.Inner_curve.typ (message_typ ()) S.Signature.typ) + Tick.Typ.(tuple3 Tick.Inner_curve.typ (message_typ ()) S.Signature.typ) Tick.Boolean.typ (fun (public_key, msg, s) -> let open Tick.Checked in let%bind (module Shifted) = Tick.Inner_curve.Checked.Shifted.create () in - S.Checked.verifies (module Shifted) s public_key msg ) + S.Checked.verifies (module Shifted) s public_key msg) (fun _ -> true)) - (pubkey, msg, s) ) + (pubkey, msg, s)) [%%endif] diff --git a/src/lib/snark_bits/bits.ml b/src/lib/snark_bits/bits.ml index 1b708619304..b41405e9549 100644 --- a/src/lib/snark_bits/bits.ml +++ b/src/lib/snark_bits/bits.ml @@ -1,7 +1,6 @@ (* bits.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel open Fold_lib @@ -68,12 +67,13 @@ module Vector = struct type t = V.t let fold t = - { Fold.fold= + { Fold.fold = (fun ~init ~f -> let rec go acc i = if i = V.length then acc else go (f acc (V.get t i)) (i + 1) in - go init 0 ) } + go init 0) + } let iter t ~f = for i = 0 to V.length - 1 do @@ -95,8 +95,7 @@ module UInt64 : Bits_intf.Convertible_bits with type t := Unsigned.UInt64.t = module UInt32 : Bits_intf.Convertible_bits with type t := Unsigned.UInt32.t = Vector.Make (Vector.UInt32) -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] module type Big_int_intf = sig include Snarky_backendless.Bigint_intf.S @@ -107,21 +106,22 @@ end module Make_field0 (Field : Snarky_backendless.Field_intf.S) (Bigint : Big_int_intf with type field := Field.t) (M : sig - val bit_length : int + val bit_length : int end) : Bits_intf.S with type t = Field.t = struct open M type t = Field.t let fold t = - { Fold.fold= + { Fold.fold = (fun ~init ~f -> let n = Bigint.of_field t in let rec go acc i = if i = bit_length then acc else go (f acc (Bigint.test_bit n i)) (i + 1) in - go init 0 ) } + go init 0) + } let iter t ~f = let n = Bigint.of_field t in @@ -151,7 +151,7 @@ module Make_field module Small (Field : Snarky_backendless.Field_intf.S) (Bigint : Big_int_intf with type field := Field.t) (M : sig - val bit_length : int + val bit_length : int end) : Bits_intf.S with type t = Field.t = struct let () = assert (M.bit_length < Field.size_in_bits) @@ -161,26 +161,26 @@ end module Snarkable = struct module Small_bit_vector (Impl : Snarky_backendless.Snark_intf.S) (V : sig - type t + type t - val empty : t + val empty : t - val length : int + val length : int - val get : t -> int -> bool + val get : t -> int -> bool - val set : t -> int -> bool -> t + val set : t -> int -> bool -> t end) : Bits_intf.Snarkable.Small - with type ('a, 'b) typ := ('a, 'b) Impl.Typ.t - and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t - and type boolean_var := Impl.Boolean.var - and type field_var := Impl.Field.Var.t - and type Packed.var = Impl.Field.Var.t - and type Packed.value = V.t - and type Unpacked.var = Impl.Boolean.var list - and type Unpacked.value = V.t - and type comparison_result := Impl.Field.Checked.comparison_result = + with type ('a, 'b) typ := ('a, 'b) Impl.Typ.t + and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t + and type boolean_var := Impl.Boolean.var + and type field_var := Impl.Field.Var.t + and type Packed.var = Impl.Field.Var.t + and type Packed.value = V.t + and type Unpacked.var = Impl.Boolean.var list + and type Unpacked.value = V.t + and type comparison_result := Impl.Field.Checked.comparison_result = struct open Impl @@ -213,16 +213,14 @@ module Snarkable = struct let rec go two_to_the_i i acc = if i = V.length then acc else - let acc = - if V.get t i then Field.add two_to_the_i acc else acc - in + let acc = if V.get t i then Field.add two_to_the_i acc else acc in go (Field.add two_to_the_i two_to_the_i) (i + 1) acc in Store.store (go Field.one 0 Field.zero) in let alloc = Alloc.alloc in let check _ = Checked.return () in - {read; store; alloc; check} + { read; store; alloc; check } let size_in_bits = size_in_bits end @@ -232,7 +230,7 @@ module Snarkable = struct let v_of_list vs = List.foldi vs ~init:V.empty ~f:(fun i acc b -> - if i < V.length then V.set acc i b else acc ) + if i < V.length then V.set acc i b else acc) let pack_var = Field.Var.project @@ -290,11 +288,11 @@ module Snarkable = struct let%snarkydef assert_equal_var (n : Unpacked.var) (n' : Unpacked.var) = Field.Checked.Assert.equal (pack_var n) (pack_var n') - let if_ (cond : Boolean.var) ~(then_ : Unpacked.var) - ~(else_ : Unpacked.var) : (Unpacked.var, _) Checked.t = + let if_ (cond : Boolean.var) ~(then_ : Unpacked.var) ~(else_ : Unpacked.var) + : (Unpacked.var, _) Checked.t = match List.map2 then_ else_ ~f:(fun then_ else_ -> - Boolean.if_ cond ~then_ ~else_ ) + Boolean.if_ cond ~then_ ~else_) with | Ok result -> Checked.List.all result @@ -309,7 +307,7 @@ module Snarkable = struct module Field_backed (Impl : Snarky_backendless.Snark_intf.S) (M : sig - val bit_length : int + val bit_length : int end) = struct open Impl @@ -366,13 +364,13 @@ module Snarkable = struct module Field (Impl : Snarky_backendless.Snark_intf.S) : Bits_intf.Snarkable.Lossy - with type ('a, 'b) typ := ('a, 'b) Impl.Typ.t - and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t - and type boolean_var := Impl.Boolean.var - and type Packed.var = Impl.Field.Var.t - and type Packed.value = Impl.Field.t - and type Unpacked.var = Impl.Boolean.var list - and type Unpacked.value = Impl.Field.t = + with type ('a, 'b) typ := ('a, 'b) Impl.Typ.t + and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t + and type boolean_var := Impl.Boolean.var + and type Packed.var = Impl.Field.Var.t + and type Packed.value = Impl.Field.t + and type Unpacked.var = Impl.Boolean.var list + and type Unpacked.value = Impl.Field.t = Field_backed (Impl) (struct @@ -381,16 +379,16 @@ module Snarkable = struct module Small (Impl : Snarky_backendless.Snark_intf.S) (M : sig - val bit_length : int + val bit_length : int end) : Bits_intf.Snarkable.Faithful - with type ('a, 'b) typ := ('a, 'b) Impl.Typ.t - and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t - and type boolean_var := Impl.Boolean.var - and type Packed.var = Impl.Field.Var.t - and type Packed.value = Impl.Field.t - and type Unpacked.var = Impl.Boolean.var list - and type Unpacked.value = Impl.Field.t = struct + with type ('a, 'b) typ := ('a, 'b) Impl.Typ.t + and type ('a, 'b) checked := ('a, 'b) Impl.Checked.t + and type boolean_var := Impl.Boolean.var + and type Packed.var = Impl.Field.Var.t + and type Packed.value = Impl.Field.t + and type Unpacked.var = Impl.Boolean.var list + and type Unpacked.value = Impl.Field.t = struct let () = assert (M.bit_length < Impl.Field.size_in_bits) include Field_backed (Impl) (M) @@ -407,7 +405,7 @@ end module Make_unpacked (Impl : Snarky_backendless.Snark_intf.S) (M : sig - val bit_length : int + val bit_length : int end) = struct open Impl diff --git a/src/lib/snark_bits/bits_intf.ml b/src/lib/snark_bits/bits_intf.ml index 6de40ce6f14..2bfb0d1aaff 100644 --- a/src/lib/snark_bits/bits_intf.ml +++ b/src/lib/snark_bits/bits_intf.ml @@ -1,7 +1,6 @@ (* bits_intf.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Fold_lib @@ -27,8 +26,7 @@ module type Convertible_bits = sig val of_bits : bool list -> t end -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Tuple_lib diff --git a/src/lib/snark_keys/gen_keys/gen_keys.ml b/src/lib/snark_keys/gen_keys/gen_keys.ml index 351d46d0251..3b7f0b62d35 100644 --- a/src/lib/snark_keys/gen_keys/gen_keys.ml +++ b/src/lib/snark_keys/gen_keys/gen_keys.ml @@ -32,8 +32,7 @@ let from_disk_expr ~loc id = Pickles.Verification_key.load ~cache:Cache_dir.cache (Sexp.of_string_conv_exn [%e - estring - (Pickles.Verification_key.Id.sexp_of_t id |> Sexp.to_string)] + estring (Pickles.Verification_key.Id.sexp_of_t id |> Sexp.to_string)] Pickles.Verification_key.Id.t_of_sexp) >>| Or_error.ok_exn in @@ -71,56 +70,56 @@ let handle_dirty dirty = Stdlib.exit 0 ; match dirty with | `Generated_something | `Locally_generated -> ( - (* If we generated any keys, then we need to make sure to upload these keys - * to some central store to keep our builds compatible with one-another. - * - * We used to have a process where we manually upload keys whenever we want - * to persist a new change. This is an attempt to make that process - * automatic. - * - * We don't want to force an upload on every change as during development - * you could churn on changes. Instead, we force uploads on CI jobs that - * build testnet artifacts (as these are the binaries we want to make sure - * we can retrieve keys for). - * Uploads occur out-of-process in CI. - * - * See the background section of https://bkase.dev/posts/ocaml-writer - * for more info on how this system works. - * - * NOTE: This behaviour is overriden or external contributors, because they - * cannot upload new keys if they have modified the snark. See branch - * referencing "CIRCLE_PR_USERNAME" below. - *) - match (Sys.getenv "CI", Sys.getenv "DUNE_PROFILE") with - | Some _, Some _ when Option.is_some (Sys.getenv "CIRCLE_PR_USERNAME") -> - (* External contributors cannot upload new keys to AWS, but we would - still like to run CI for their pull requests if they have modified the - snark. - *) - ( match dirty with - | `Generated_something -> - Format.eprintf - "No keys were found in the cache, but this pull-request is from \ - an external contributor.@ Generated fresh keys for this \ - build.@." - | `Locally_generated -> - Format.eprintf - "Only locally-generated keys were found in the cache, but this \ - pull-request is from an external contributor.@ Using the local \ - keys@." - | `Cache_hit -> - (* Excluded above. *) - assert false ) ; - Deferred.unit - | Some _, Some profile - when String.is_substring ~substring:"testnet" profile -> - (* We are intentionally aborting the build here with a special error code - * because we do not want builds to succeed if keys are not uploaded. - * - * Exit code is 0xc1 for "CI" *) - exit 0xc1 - | Some _, Some _ | _, None | None, _ -> - Deferred.unit ) + (* If we generated any keys, then we need to make sure to upload these keys + * to some central store to keep our builds compatible with one-another. + * + * We used to have a process where we manually upload keys whenever we want + * to persist a new change. This is an attempt to make that process + * automatic. + * + * We don't want to force an upload on every change as during development + * you could churn on changes. Instead, we force uploads on CI jobs that + * build testnet artifacts (as these are the binaries we want to make sure + * we can retrieve keys for). + * Uploads occur out-of-process in CI. + * + * See the background section of https://bkase.dev/posts/ocaml-writer + * for more info on how this system works. + * + * NOTE: This behaviour is overriden or external contributors, because they + * cannot upload new keys if they have modified the snark. See branch + * referencing "CIRCLE_PR_USERNAME" below. + *) + match (Sys.getenv "CI", Sys.getenv "DUNE_PROFILE") with + | Some _, Some _ when Option.is_some (Sys.getenv "CIRCLE_PR_USERNAME") -> + (* External contributors cannot upload new keys to AWS, but we would + still like to run CI for their pull requests if they have modified the + snark. + *) + ( match dirty with + | `Generated_something -> + Format.eprintf + "No keys were found in the cache, but this pull-request is \ + from an external contributor.@ Generated fresh keys for this \ + build.@." + | `Locally_generated -> + Format.eprintf + "Only locally-generated keys were found in the cache, but this \ + pull-request is from an external contributor.@ Using the \ + local keys@." + | `Cache_hit -> + (* Excluded above. *) + assert false ) ; + Deferred.unit + | Some _, Some profile + when String.is_substring ~substring:"testnet" profile -> + (* We are intentionally aborting the build here with a special error code + * because we do not want builds to succeed if keys are not uploaded. + * + * Exit code is 0xc1 for "CI" *) + exit 0xc1 + | Some _, Some _ | _, None | None, _ -> + Deferred.unit ) | `Cache_hit -> Deferred.unit @@ -142,7 +141,7 @@ let str ~proof_level ~constraint_constants ~loc = handle_dirty Pickles.( List.map - [T.cache_handle; B.cache_handle] + [ T.cache_handle; B.cache_handle ] ~f:Cache_handle.generate_or_load |> List.reduce_exn ~f:Dirty.( + )) in diff --git a/src/lib/snark_keys_header/snark_keys_header.ml b/src/lib/snark_keys_header/snark_keys_header.ml index 761fe6a9f14..642ff1deda1 100644 --- a/src/lib/snark_keys_header/snark_keys_header.ml +++ b/src/lib/snark_keys_header/snark_keys_header.ml @@ -22,7 +22,7 @@ module UInt64 = struct sprintf "Snark_keys_header.UInt64.of_yojson: Could not parse string \ as UInt64: %s" - (Error.to_string_hum err) ) + (Error.to_string_hum err)) | _ -> Error "Snark_keys_header.UInt64.of_yojson: Expected a string" @@ -44,9 +44,9 @@ module Kind = struct |} *) type t = - { type_: string [@key "type"] + { type_ : string [@key "type"] (** Identifies the type of data that the file contains *) - ; identifier: string + ; identifier : string (** Identifies the specific purpose of the file's data, in a human-readable format *) @@ -63,15 +63,15 @@ module Constraint_constants = struct let to_yojson t : Yojson.Safe.t = match t with | Log_2 i -> - `Assoc [("two_to_the", `Int i)] + `Assoc [ ("two_to_the", `Int i) ] | Txns_per_second_x10 i -> - `Assoc [("txns_per_second_x10", `Int i)] + `Assoc [ ("txns_per_second_x10", `Int i) ] let of_yojson (json : Yojson.Safe.t) = match json with - | `Assoc [("two_to_the", `Int i)] -> + | `Assoc [ ("two_to_the", `Int i) ] -> Ok (Log_2 i) - | `Assoc [("txns_per_second_x10", `Int i)] -> + | `Assoc [ ("txns_per_second_x10", `Int i) ] -> Ok (Txns_per_second_x10 i) | `Assoc _ -> Error @@ -87,9 +87,10 @@ module Constraint_constants = struct module Fork_config = struct (** Fork data *) type t = - { previous_state_hash: string - ; previous_length: int - ; previous_global_slot: int } + { previous_state_hash : string + ; previous_length : int + ; previous_global_slot : int + } [@@deriving yojson, sexp, ord, equal] let opt_to_yojson t : Yojson.Safe.t = @@ -105,38 +106,42 @@ module Constraint_constants = struct (** The constants used in the constraint system. *) type t = - { sub_windows_per_window: int - ; ledger_depth: int - ; work_delay: int - ; block_window_duration_ms: int - ; transaction_capacity: Transaction_capacity.t - ; pending_coinbase_depth: int - ; coinbase_amount: UInt64.t - ; supercharged_coinbase_factor: int - ; account_creation_fee: UInt64.t - ; fork: - (Fork_config.t option[@to_yojson Fork_config.opt_to_yojson] - [@of_yojson Fork_config.opt_of_yojson]) } + { sub_windows_per_window : int + ; ledger_depth : int + ; work_delay : int + ; block_window_duration_ms : int + ; transaction_capacity : Transaction_capacity.t + ; pending_coinbase_depth : int + ; coinbase_amount : UInt64.t + ; supercharged_coinbase_factor : int + ; account_creation_fee : UInt64.t + ; fork : + (Fork_config.t option + [@to_yojson Fork_config.opt_to_yojson] + [@of_yojson Fork_config.opt_of_yojson]) + } [@@deriving yojson, sexp, ord, equal] end module Commits = struct (** Commit identifiers *) - type t = {mina: string; marlin: string} [@@deriving yojson, sexp, ord, equal] + type t = { mina : string; marlin : string } + [@@deriving yojson, sexp, ord, equal] end let header_version = 1 (** Header contents *) type t = - { header_version: int - ; kind: Kind.t - ; constraint_constants: Constraint_constants.t - ; commits: Commits.t - ; length: int - ; commit_date: string - ; constraint_system_hash: string - ; identifying_hash: string } + { header_version : int + ; kind : Kind.t + ; constraint_constants : Constraint_constants.t + ; commits : Commits.t + ; length : int + ; commit_date : string + ; constraint_system_hash : string + ; identifying_hash : string + } [@@deriving yojson, sexp, ord, equal] let prefix = "MINA_SNARK_KEYS\n" @@ -147,7 +152,7 @@ let parse_prefix (lexbuf : Lexing.lexbuf) = let open Or_error.Let_syntax in Result.map_error ~f:(fun err -> Error.tag_arg err "Could not read prefix" ("prefix", prefix) - [%sexp_of: string * string] ) + [%sexp_of: string * string]) @@ Or_error.try_with_join (fun () -> (* This roughly mirrors the behavior of [Yojson.Safe.read_ident], except that we have a known fixed length to parse, and that it is a @@ -191,12 +196,13 @@ let parse_prefix (lexbuf : Lexing.lexbuf) = (* Update the positions to match our end state *) lexbuf.lex_curr_pos <- lexbuf.lex_curr_pos + prefix_len ; lexbuf.lex_last_pos <- lexbuf.lex_last_pos ; - lexbuf.lex_curr_p - <- { lexbuf.lex_curr_p with - pos_bol= lexbuf.lex_curr_p.pos_bol + prefix_len - ; pos_cnum= lexbuf.lex_curr_p.pos_cnum + prefix_len } ; + lexbuf.lex_curr_p <- + { lexbuf.lex_curr_p with + pos_bol = lexbuf.lex_curr_p.pos_bol + prefix_len + ; pos_cnum = lexbuf.lex_curr_p.pos_cnum + prefix_len + } ; (* This matches the action given by [Yojson.Safe.read_ident]. *) - lexbuf.lex_last_action <- 1 ) + lexbuf.lex_last_action <- 1) let parse_lexbuf (lexbuf : Lexing.lexbuf) = let open Or_error.Let_syntax in @@ -208,31 +214,34 @@ let parse_lexbuf (lexbuf : Lexing.lexbuf) = 'greedy' parsing that will attempt to continue and read the file's contents beyond the header. *) - Yojson.Safe.read_t yojson_parsebuffer lexbuf ) + Yojson.Safe.read_t yojson_parsebuffer lexbuf) let%test_module "Check parsing of header" = ( module struct let valid_header = - { header_version= 1 - ; kind= {type_= "type"; identifier= "identifier"} - ; constraint_constants= - { sub_windows_per_window= 4 - ; ledger_depth= 8 - ; work_delay= 1000 - ; block_window_duration_ms= 1000 - ; transaction_capacity= Log_2 3 - ; pending_coinbase_depth= 12 - ; coinbase_amount= Unsigned.UInt64.of_int 1 - ; supercharged_coinbase_factor= 1 - ; account_creation_fee= Unsigned.UInt64.of_int 1 - ; fork= None } - ; commits= - { mina= "7e1fb2cd9138af1d0f24e78477efd40a2a0fcd07" - ; marlin= "75836c41fc4947acce9c938da1b2f506843e90ed" } - ; length= 4096 - ; commit_date= "2020-01-01 00:00:00.000000Z" - ; constraint_system_hash= "ABCDEF1234567890" - ; identifying_hash= "ABCDEF1234567890" } + { header_version = 1 + ; kind = { type_ = "type"; identifier = "identifier" } + ; constraint_constants = + { sub_windows_per_window = 4 + ; ledger_depth = 8 + ; work_delay = 1000 + ; block_window_duration_ms = 1000 + ; transaction_capacity = Log_2 3 + ; pending_coinbase_depth = 12 + ; coinbase_amount = Unsigned.UInt64.of_int 1 + ; supercharged_coinbase_factor = 1 + ; account_creation_fee = Unsigned.UInt64.of_int 1 + ; fork = None + } + ; commits = + { mina = "7e1fb2cd9138af1d0f24e78477efd40a2a0fcd07" + ; marlin = "75836c41fc4947acce9c938da1b2f506843e90ed" + } + ; length = 4096 + ; commit_date = "2020-01-01 00:00:00.000000Z" + ; constraint_system_hash = "ABCDEF1234567890" + ; identifying_hash = "ABCDEF1234567890" + } let valid_header_string = Yojson.Safe.to_string (to_yojson valid_header) @@ -259,8 +268,7 @@ let%test_module "Check parsing of header" = let partial_prefix = String.sub prefix ~pos:0 ~len:(prefix_len - 1) ^ " " in - parse_lexbuf - (Lexing.from_string (partial_prefix ^ valid_header_string)) + parse_lexbuf (Lexing.from_string (partial_prefix ^ valid_header_string)) |> Or_error.is_error let%test "doesn't parse with short file" = @@ -326,7 +334,7 @@ let%test_module "Check parsing of header" = Bytes.From_string.blit ~src:str ~src_pos:!offset ~dst:buffer ~dst_pos:0 ~len ; offset := !offset + len ; - len ) ) + len )) in (* Load the initial content into the buffer *) lexbuf.refill_buff lexbuf ; @@ -357,7 +365,9 @@ let write_with_header ~expected_max_size_log2 ~append_data header filename = failwith "Snark_keys_header.write_header: expected_max_size_log2 is too large, \ the resulting length underflows" ; - let header_string = Yojson.Safe.to_string (to_yojson {header with length}) in + let header_string = + Yojson.Safe.to_string (to_yojson { header with length }) + in (* We look for the "length" field first, to ensure that we find our length and not some other data that happens to match it. Due to the JSON-encoding, we will only find the first field named "length", which is @@ -381,13 +391,13 @@ let write_with_header ~expected_max_size_log2 ~append_data header filename = (* Newline, to allow [head -n 2 path/to/file | tail -n 1] to easily extract the header. *) - Out_channel.output_char out_channel '\n' ) ; + Out_channel.output_char out_channel '\n') ; append_data filename ; (* Core doesn't let us open a file without appending or truncating, so we use stdlib instead. *) let out_channel = - Stdlib.open_out_gen [Open_wronly; Open_binary] 0 filename + Stdlib.open_out_gen [ Open_wronly; Open_binary ] 0 filename in let true_length = Out_channel.length out_channel |> Int.of_int64_exn in if true_length > length then @@ -444,7 +454,5 @@ let read_with_header ~read_data filename = (("header length", header.length), ("file length", file_length)) [%sexp_of: (string * int) * (string * int)] in - let%map data = - Or_error.try_with (fun () -> read_data ~offset filename) - in - (header, data) ) + let%map data = Or_error.try_with (fun () -> read_data ~offset filename) in + (header, data)) diff --git a/src/lib/snark_params/snark_intf.ml b/src/lib/snark_params/snark_intf.ml index 80cb78b09c2..28c226cb260 100644 --- a/src/lib/snark_params/snark_intf.ml +++ b/src/lib/snark_params/snark_intf.ml @@ -15,15 +15,15 @@ module type S = sig module Bits : sig module type Faithful = Bits_intf.Snarkable.Faithful - with type ('a, 'b) typ := ('a, 'b) Typ.t - and type ('a, 'b) checked := ('a, 'b) Checked.t - and type boolean_var := Boolean.var + with type ('a, 'b) typ := ('a, 'b) Typ.t + and type ('a, 'b) checked := ('a, 'b) Checked.t + and type boolean_var := Boolean.var module type Lossy = Bits_intf.Snarkable.Lossy - with type ('a, 'b) typ := ('a, 'b) Typ.t - and type ('a, 'b) checked := ('a, 'b) Checked.t - and type boolean_var := Boolean.var + with type ('a, 'b) typ := ('a, 'b) Typ.t + and type ('a, 'b) checked := ('a, 'b) Checked.t + and type boolean_var := Boolean.var end end end diff --git a/src/lib/snark_params/snark_params.ml b/src/lib/snark_params/snark_params.ml index 4cad1617345..e88b50791e0 100644 --- a/src/lib/snark_params/snark_params.ml +++ b/src/lib/snark_params/snark_params.ml @@ -16,23 +16,23 @@ module Make_snarkable (Impl : Snarky_backendless.Snark_intf.S) = struct module Bits = struct module type Lossy = Bits_intf.Snarkable.Lossy - with type ('a, 'b) typ := ('a, 'b) Typ.t - and type ('a, 'b) checked := ('a, 'b) Checked.t - and type boolean_var := Boolean.var + with type ('a, 'b) typ := ('a, 'b) Typ.t + and type ('a, 'b) checked := ('a, 'b) Checked.t + and type boolean_var := Boolean.var module type Faithful = Bits_intf.Snarkable.Faithful - with type ('a, 'b) typ := ('a, 'b) Typ.t - and type ('a, 'b) checked := ('a, 'b) Checked.t - and type boolean_var := Boolean.var + with type ('a, 'b) typ := ('a, 'b) Typ.t + and type ('a, 'b) checked := ('a, 'b) Checked.t + and type boolean_var := Boolean.var module type Small = Bits_intf.Snarkable.Small - with type ('a, 'b) typ := ('a, 'b) Typ.t - and type ('a, 'b) checked := ('a, 'b) Checked.t - and type boolean_var := Boolean.var - and type comparison_result := Field.Checked.comparison_result - and type field_var := Field.Var.t + with type ('a, 'b) typ := ('a, 'b) Typ.t + and type ('a, 'b) checked := ('a, 'b) Checked.t + and type boolean_var := Boolean.var + and type comparison_result := Field.Checked.comparison_result + and type field_var := Field.Var.t end end @@ -58,7 +58,7 @@ let%test_unit "group-map test" = (module M) ~params (M.Field.constant t) in - fun () -> M.As_prover.(read_var x, read_var y) ) + fun () -> M.As_prover.(read_var x, read_var y)) () |> Or_error.ok_exn in @@ -71,11 +71,9 @@ let%test_unit "group-map test" = + (Tick0.Inner_curve.Params.a * x) + Tick0.Inner_curve.Params.b) Tick0.Field.(y * y) ; - [%test_eq: Tick0.Field.t * Tick0.Field.t] checked_output actual ) + [%test_eq: Tick0.Field.t * Tick0.Field.t] checked_output actual) -module Make_inner_curve_scalar - (Impl : Snark_intf.S) - (Other_impl : Snark_intf.S) = +module Make_inner_curve_scalar (Impl : Snark_intf.S) (Other_impl : Snark_intf.S) = struct module T = Other_impl.Field @@ -142,7 +140,8 @@ module Tock = struct module Inner_curve = struct include Tock0.Inner_curve - include Sexpable.Of_sexpable (struct + include Sexpable.Of_sexpable + (struct type t = Field.t * Field.t [@@deriving sexp] end) (struct @@ -176,8 +175,8 @@ module Tick = struct include ( Tick0 : module type of Tick0 - with module Field := Tick0.Field - and module Inner_curve := Tick0.Inner_curve ) + with module Field := Tick0.Field + and module Inner_curve := Tick0.Inner_curve ) module Field = struct include Tick0.Field @@ -192,7 +191,8 @@ module Tick = struct module Inner_curve = struct include Crypto_params.Tick.Inner_curve - include Sexpable.Of_sexpable (struct + include Sexpable.Of_sexpable + (struct type t = Field.t * Field.t [@@deriving sexp] end) (struct @@ -215,9 +215,9 @@ module Tick = struct (fun p1 p2 -> let c = Run.make_checked (fun () -> - Pickles.Step_main_inputs.Ops.add_fast p1 p2 ) + Pickles.Step_main_inputs.Ops.add_fast p1 p2) in - Tick0.with_state (As_prover.return ()) c ) + Tick0.with_state (As_prover.return ()) c) end) let add_known_unsafe t x = add_unsafe t (constant x) diff --git a/src/lib/snark_params/snark_util.ml b/src/lib/snark_params/snark_util.ml index aa963211b8f..470105251da 100644 --- a/src/lib/snark_params/snark_util.ml +++ b/src/lib/snark_params/snark_util.ml @@ -38,8 +38,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct let nth_bit x ~n = (x lsr n) land 1 = 1 - let apply_mask mask bs = - Checked.all (List.map2_exn mask bs ~f:Boolean.( && )) + let apply_mask mask bs = Checked.all (List.map2_exn mask bs ~f:Boolean.( && )) let pack_unsafe (bs0 : Boolean.var list) = let n = List.length bs0 in @@ -68,8 +67,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct map (read_var n) ~f:(fun n -> List.init total_length ~f:(fun i -> Bigint.( - compare (of_field (Field.of_int i)) (of_field n) < 0) ) - )) + compare (of_field (Field.of_int i)) (of_field n) < 0)))) in let%map () = Field.Checked.Assert.equal @@ -147,7 +145,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct let y = random () in let (), (less, less_or_equal) = run_and_check - (let%map {less; less_or_equal} = + (let%map { less; less_or_equal } = Field.Checked.compare ~bit_length (Field.Var.constant x) (Field.Var.constant y) in @@ -172,7 +170,8 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct (Checked.all_unit [ boolean_assert_lte Boolean.false_ Boolean.false_ ; boolean_assert_lte Boolean.false_ Boolean.true_ - ; boolean_assert_lte Boolean.true_ Boolean.true_ ]) + ; boolean_assert_lte Boolean.true_ Boolean.true_ + ]) ()) ; assert ( Or_error.is_error @@ -182,21 +181,21 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct let decreasing bs = check (assert_decreasing (List.map ~f:Boolean.var_of_value bs)) () in - Or_error.ok_exn (decreasing [true; true; true; false]) ; - Or_error.ok_exn (decreasing [true; true; false; false]) ; - assert (Or_error.is_error (decreasing [true; true; false; true])) + Or_error.ok_exn (decreasing [ true; true; true; false ]) ; + Or_error.ok_exn (decreasing [ true; true; false; false ]) ; + assert (Or_error.is_error (decreasing [ true; true; false; true ])) let%test_unit "n_ones" = let total_length = 6 in let test n = let t = n_ones ~total_length (Field.Var.constant (Field.of_int n)) in let handle_with (resp : bool list) = - handle t (fun (With {request; respond}) -> + handle t (fun (With { request; respond }) -> match request with | N_ones -> respond (Provide resp) | _ -> - unhandled ) + unhandled) in let correct = Int.pow 2 n - 1 in let to_bits k = @@ -221,34 +220,34 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct let f k bs = assert (num_bits_upper_bound_unchecked (Field.project bs) = k) in - f 3 [true; true; true; false; false] ; - f 4 [true; true; true; true; false] ; - f 3 [true; false; true; false; false] ; - f 5 [true; false; true; false; true] + f 3 [ true; true; true; false; false ] ; + f 4 [ true; true; true; true; false ] ; + f 3 [ true; false; true; false; false ] ; + f 5 [ true; false; true; false; true ] (*let%test_unit "num_bits_upper_bound" = - let max_length = Field.size_in_bits - 1 in - let test x = - let handle_with resp = - handle - (num_bits_upper_bound ~max_length (Field.Var.constant x)) - (fun (With {request; respond}) -> - match request with - | Num_bits_upper_bound -> respond (Field.of_int resp) - | _ -> unhandled) + let max_length = Field.size_in_bits - 1 in + let test x = + let handle_with resp = + handle + (num_bits_upper_bound ~max_length (Field.Var.constant x)) + (fun (With {request; respond}) -> + match request with + | Num_bits_upper_bound -> respond (Field.of_int resp) + | _ -> unhandled) + in + let true_answer = num_bits_upper_bound_unchecked x in + for i = 0 to true_answer - 1 do + if check (handle_with i) () + then begin + let n = Bigint.of_field x in + failwithf !"Shouldn't have passed: x=%s, i=%d" + (String.init max_length ~f:(fun j -> if Bigint.test_bit n j then '1' else '0')) + i (); + end; + done; + assert (check (handle_with true_answer) ()) in - let true_answer = num_bits_upper_bound_unchecked x in - for i = 0 to true_answer - 1 do - if check (handle_with i) () - then begin - let n = Bigint.of_field x in - failwithf !"Shouldn't have passed: x=%s, i=%d" - (String.init max_length ~f:(fun j -> if Bigint.test_bit n j then '1' else '0')) - i (); - end; - done; - assert (check (handle_with true_answer) ()) - in - test (random_n_bit_field_elt max_length)*) + test (random_n_bit_field_elt max_length)*) end ) end diff --git a/src/lib/snark_work_lib/work.ml b/src/lib/snark_work_lib/work.ml index 1cb6b7096ec..9bb9550d4d2 100644 --- a/src/lib/snark_work_lib/work.ml +++ b/src/lib/snark_work_lib/work.ml @@ -69,7 +69,7 @@ module Single = struct let%map p1, p2 = tuple2 gen_proof gen_proof in Merge (statement, p1, p2) in - union [gen_transition; gen_merge] + union [ gen_transition; gen_merge ] end end @@ -80,24 +80,27 @@ module Spec = struct module V1 = struct type 'single t = - { instances: 'single One_or_two.Stable.V1.t - ; fee: Currency.Fee.Stable.V1.t } + { instances : 'single One_or_two.Stable.V1.t + ; fee : Currency.Fee.Stable.V1.t + } [@@deriving fields, sexp, to_yojson] - let to_latest single_latest {instances; fee} = - {instances= One_or_two.Stable.V1.to_latest single_latest instances; fee} + let to_latest single_latest { instances; fee } = + { instances = One_or_two.Stable.V1.to_latest single_latest instances + ; fee + } - let of_latest single_latest {instances; fee} = + let of_latest single_latest { instances; fee } = let open Result.Let_syntax in let%map instances = One_or_two.Stable.V1.of_latest single_latest instances in - {instances; fee} + { instances; fee } end end] type 'single t = 'single Stable.Latest.t = - {instances: 'single One_or_two.t; fee: Currency.Fee.t} + { instances : 'single One_or_two.t; fee : Currency.Fee.t } [@@deriving fields, sexp, to_yojson] end @@ -106,12 +109,13 @@ module Result = struct module Stable = struct module V1 = struct type ('spec, 'single) t = - { proofs: 'single One_or_two.Stable.V1.t - ; metrics: - (Core.Time.Stable.Span.V1.t * [`Transition | `Merge]) + { proofs : 'single One_or_two.Stable.V1.t + ; metrics : + (Core.Time.Stable.Span.V1.t * [ `Transition | `Merge ]) One_or_two.Stable.V1.t - ; spec: 'spec - ; prover: Signature_lib.Public_key.Compressed.Stable.V1.t } + ; spec : 'spec + ; prover : Signature_lib.Public_key.Compressed.Stable.V1.t + } [@@deriving fields] end end] diff --git a/src/lib/snark_worker/functor.ml b/src/lib/snark_worker/functor.ml index 7cb97f24d97..afd415fba09 100644 --- a/src/lib/snark_worker/functor.ml +++ b/src/lib/snark_worker/functor.ml @@ -4,33 +4,31 @@ open Mina_base type Structured_log_events.t += | Merge_snark_generated of - { time: - (Time.Span.t[@to_yojson - fun total -> `String (Time.Span.to_string_hum total)] - [@of_yojson - function - | `String time -> - Ok (Time.Span.of_string time) - | _ -> - Error - "Snark_worker.Functor: Could not parse timespan"]) + { time : + (Time.Span.t + [@to_yojson fun total -> `String (Time.Span.to_string_hum total)] + [@of_yojson + function + | `String time -> + Ok (Time.Span.of_string time) + | _ -> + Error "Snark_worker.Functor: Could not parse timespan"]) } - [@@deriving register_event {msg= "Merge SNARK generated in $time"}] + [@@deriving register_event { msg = "Merge SNARK generated in $time" }] type Structured_log_events.t += | Base_snark_generated of - { time: - (Time.Span.t[@to_yojson - fun total -> `String (Time.Span.to_string_hum total)] - [@of_yojson - function - | `String time -> - Ok (Time.Span.of_string time) - | _ -> - Error - "Snark_worker.Functor: Could not parse timespan"]) + { time : + (Time.Span.t + [@to_yojson fun total -> `String (Time.Span.to_string_hum total)] + [@of_yojson + function + | `String time -> + Ok (Time.Span.of_string time) + | _ -> + Error "Snark_worker.Functor: Could not parse timespan"]) } - [@@deriving register_event {msg= "Base SNARK generated in $time"}] + [@@deriving register_event { msg = "Base SNARK generated in $time" }] module Make (Inputs : Intf.Inputs_intf) : Intf.S0 with type ledger_proof := Inputs.Ledger_proof.t = struct @@ -65,7 +63,7 @@ module Make (Inputs : Intf.Inputs_intf) : end let perform (s : Worker_state.t) public_key - ({instances; fee} as spec : Work.Spec.t) = + ({ instances; fee } as spec : Work.Spec.t) = One_or_two.Deferred_result.map instances ~f:(fun w -> let open Deferred.Or_error.Let_syntax in let%map proof, time = @@ -75,18 +73,20 @@ module Make (Inputs : Intf.Inputs_intf) : in ( proof , (time, match w with Transition _ -> `Transition | Merge _ -> `Merge) - ) ) + )) |> Deferred.Or_error.map ~f:(function | `One (proof1, metrics1) -> - { Snark_work_lib.Work.Result.proofs= `One proof1 - ; metrics= `One metrics1 + { Snark_work_lib.Work.Result.proofs = `One proof1 + ; metrics = `One metrics1 ; spec - ; prover= public_key } + ; prover = public_key + } | `Two ((proof1, metrics1), (proof2, metrics2)) -> - { Snark_work_lib.Work.Result.proofs= `Two (proof1, proof2) - ; metrics= `Two (metrics1, metrics2) + { Snark_work_lib.Work.Result.proofs = `Two (proof1, proof2) + ; metrics = `Two (metrics1, metrics2) ; spec - ; prover= public_key } ) + ; prover = public_key + }) let dispatch rpc shutdown_on_disconnect query address = let%map res = @@ -100,7 +100,8 @@ module Make (Inputs : Intf.Inputs_intf) : Mina_compile_config.rpc_heartbeat_timeout_sec) ~send_every: (Time_ns.Span.of_sec - Mina_compile_config.rpc_heartbeat_send_every_sec) ()) + Mina_compile_config.rpc_heartbeat_send_every_sec) + ()) (Tcp.Where_to_connect.of_host_and_port address) (fun conn -> Rpc.Rpc.dispatch rpc conn query) in @@ -126,12 +127,12 @@ module Make (Inputs : Intf.Inputs_intf) : Mina_metrics.( Cryptography.Snark_work_histogram.observe Cryptography.snark_work_merge_time_sec (Time.Span.to_sec time)) ; - [%str_log info] (Merge_snark_generated {time}) + [%str_log info] (Merge_snark_generated { time }) | `Transition -> Mina_metrics.( Cryptography.Snark_work_histogram.observe Cryptography.snark_work_base_time_sec (Time.Span.to_sec time)) ; - [%str_log info] (Base_snark_generated {time}) ) + [%str_log info] (Base_snark_generated { time })) let main (module Rpcs_versioned : Intf.Rpcs_versioned_S @@ -168,7 +169,7 @@ module Make (Inputs : Intf.Inputs_intf) : let%bind cwd = Sys.getcwd () in [%log debug] !"Snark worker working directory $dir" - ~metadata:[("dir", `String cwd)] ; + ~metadata:[ ("dir", `String cwd) ] ; let path = "snark_coordinator" in match%bind Sys.file_exists path with | `Yes -> ( @@ -180,7 +181,7 @@ module Make (Inputs : Intf.Inputs_intf) : in [%log debug] !"Snark worker using daemon $addr" - ~metadata:[("addr", `String (Host_and_port.to_string daemon_address))] ; + ~metadata:[ ("addr", `String (Host_and_port.to_string daemon_address)) ] ; match%bind dispatch Rpcs_versioned.Get_work.Latest.rpc shutdown_on_disconnect () daemon_address @@ -194,7 +195,7 @@ module Make (Inputs : Intf.Inputs_intf) : in (* No work to be done -- quietly take a brief nap *) [%log info] "No jobs available. Napping for $time seconds" - ~metadata:[("time", `Float random_delay)] ; + ~metadata:[ ("time", `Float random_delay) ] ; let%bind () = wait ~sec:random_delay () in go () | Ok (Some (work, public_key)) -> ( @@ -206,7 +207,8 @@ module Make (Inputs : Intf.Inputs_intf) : ; ( "work_ids" , Transaction_snark_work.Statement.compact_json (One_or_two.map (Work.Spec.instances work) - ~f:Work.Single.Spec.statement) ) ] ; + ~f:Work.Single.Spec.statement) ) + ] ; let%bind () = wait () in (* Pause to wait for stdout to flush *) match%bind perform state public_key work with @@ -214,15 +216,14 @@ module Make (Inputs : Intf.Inputs_intf) : log_and_retry "performing work" e (retry_pause 10.) go | Ok result -> emit_proof_metrics result.metrics logger ; - [%log info] - "Submitted completed SNARK work $work_ids to $address" + [%log info] "Submitted completed SNARK work $work_ids to $address" ~metadata: - [ ( "address" - , `String (Host_and_port.to_string daemon_address) ) + [ ("address", `String (Host_and_port.to_string daemon_address)) ; ( "work_ids" , Transaction_snark_work.Statement.compact_json (One_or_two.map (Work.Spec.instances work) - ~f:Work.Single.Spec.statement) ) ] ; + ~f:Work.Single.Spec.statement) ) + ] ; let rec submit_work () = match%bind dispatch Rpcs_versioned.Submit_work.Latest.rpc @@ -244,27 +245,28 @@ module Make (Inputs : Intf.Inputs_intf) : Command.async ~summary:"Snark worker" (let open Command.Let_syntax in let%map_open daemon_port = - flag "--daemon-address" ~aliases:["daemon-address"] + flag "--daemon-address" ~aliases:[ "daemon-address" ] (required (Arg_type.create Host_and_port.of_string)) ~doc:"HOST-AND-PORT address daemon is listening on" and proof_level = - flag "--proof-level" ~aliases:["proof-level"] + flag "--proof-level" ~aliases:[ "proof-level" ] (optional (Arg_type.create Genesis_constants.Proof_level.of_string)) ~doc:"full|check|none" and shutdown_on_disconnect = - flag "--shutdown-on-disconnect" ~aliases:["shutdown-on-disconnect"] + flag "--shutdown-on-disconnect" + ~aliases:[ "shutdown-on-disconnect" ] (optional bool) ~doc: "true|false Shutdown when disconnected from daemon (default:true)" in fun () -> let logger = - Logger.create () ~metadata:[("process", `String "Snark Worker")] + Logger.create () ~metadata:[ ("process", `String "Snark Worker") ] in - Signal.handle [Signal.term] ~f:(fun _signal -> + Signal.handle [ Signal.term ] ~f:(fun _signal -> [%log info] !"Received signal to terminate. Aborting snark worker process" ; - Core.exit 0 ) ; + Core.exit 0) ; let proof_level = Option.value ~default:Genesis_constants.Proof_level.compiled proof_level @@ -280,5 +282,6 @@ module Make (Inputs : Intf.Inputs_intf) : ; "-proof-level" ; Genesis_constants.Proof_level.to_string proof_level ; "-shutdown-on-disconnect" - ; Bool.to_string shutdown_on_disconnect ] + ; Bool.to_string shutdown_on_disconnect + ] end diff --git a/src/lib/snark_worker/intf.ml b/src/lib/snark_worker/intf.ml index e077928b542..8e50bcfeca7 100644 --- a/src/lib/snark_worker/intf.ml +++ b/src/lib/snark_worker/intf.ml @@ -24,10 +24,7 @@ module type Inputs_intf = sig val perform_single : Worker_state.t -> message:Mina_base.Sok_message.t - -> ( Transaction.t - , Transaction_witness.t - , Ledger_proof.t ) - Work.Single.Spec.t + -> (Transaction.t, Transaction_witness.t, Ledger_proof.t) Work.Single.Spec.t -> (Ledger_proof.t * Time.Span.t) Deferred.Or_error.t end @@ -123,14 +120,14 @@ module type S0 = sig module Rpcs : sig module Get_work : Rpc_master - with type Master.T.query = unit - and type Master.T.response = - (Work.Spec.t * Signature_lib.Public_key.Compressed.t) option + with type Master.T.query = unit + and type Master.T.response = + (Work.Spec.t * Signature_lib.Public_key.Compressed.t) option module Submit_work : Rpc_master - with type Master.T.query = Work.Result.t - and type Master.T.response = unit + with type Master.T.query = Work.Result.t + and type Master.T.response = unit end val command_from_rpcs : diff --git a/src/lib/snark_worker/prod.ml b/src/lib/snark_worker/prod.ml index 072199c919b..d82164245a6 100644 --- a/src/lib/snark_worker/prod.ml +++ b/src/lib/snark_worker/prod.ml @@ -25,9 +25,10 @@ module Inputs = struct module type S = Transaction_snark.S type t = - { m: (module S) option - ; cache: Cache.t - ; proof_level: Genesis_constants.Proof_level.t } + { m : (module S) option + ; cache : Cache.t + ; proof_level : Genesis_constants.Proof_level.t + } let create ~constraint_constants ~proof_level () = let m = @@ -38,12 +39,11 @@ module Inputs = struct let constraint_constants = constraint_constants let proof_level = proof_level - end) - : S ) + end) : S ) | Check | None -> None in - Deferred.return {m; cache= Cache.create (); proof_level} + Deferred.return { m; cache = Cache.create (); proof_level } let worker_wait_time = 5. end @@ -55,7 +55,7 @@ module Inputs = struct Snark_work_lib.Work.Single.Spec.t [@@deriving sexp] - let perform_single ({m; cache; proof_level} : Worker_state.t) ~message = + let perform_single ({ m; cache; proof_level } : Worker_state.t) ~message = let open Deferred.Or_error.Let_syntax in let open Snark_work_lib in let sok_digest = Mina_base.Sok_message.digest message in @@ -74,10 +74,10 @@ module Inputs = struct [ ("error", Error_json.error_to_yojson e) ; ( "spec" (* the [@sexp.opaque] in Work.Single.Spec.t means we can't derive yojson, - so we use the less-desirable sexp here - *) - , `String (Sexp.to_string (sexp_of_single_spec single)) - ) ] ; + so we use the less-desirable sexp here + *) + , `String (Sexp.to_string (sexp_of_single_spec single)) ) + ] ; Error.raise e | Ok res -> Cache.add cache ~statement ~proof:res ; @@ -88,54 +88,54 @@ module Inputs = struct | Some proof -> Deferred.Or_error.return (proof, Time.Span.zero) | None -> ( - match single with - | Work.Single.Spec.Transition - (input, t, (w : Transaction_witness.t)) -> - process (fun () -> - let%bind t = - Deferred.return - @@ - (* Validate the received transaction *) - match t with - | Command (Signed_command cmd) -> ( - match Signed_command.check cmd with - | Some cmd -> - ( Ok (Command (Signed_command cmd)) - : Transaction.Valid.t Or_error.t ) - | None -> - Or_error.errorf "Command has an invalid signature" - ) - | Command (Snapp_command cmd) -> - Ok (Command (Snapp_command cmd)) - | Fee_transfer ft -> - Ok (Fee_transfer ft) - | Coinbase cb -> - Ok (Coinbase cb) - in - let snapp_account1, snapp_account2 = - Sparse_ledger.snapp_accounts w.ledger - (Transaction.forget t) - in - Deferred.Or_error.try_with ~here:[%here] (fun () -> - M.of_transaction ~sok_digest ~snapp_account1 - ~snapp_account2 - ~source:input.Transaction_snark.Statement.source - ~target:input.target - { Transaction_protocol_state.Poly.transaction= t - ; block_data= w.protocol_state_body } - ~init_stack:w.init_stack - ~next_available_token_before: - input.next_available_token_before - ~next_available_token_after: - input.next_available_token_after - ~pending_coinbase_stack_state: - input - .Transaction_snark.Statement - .pending_coinbase_stack_state - (unstage (Mina_base.Sparse_ledger.handler w.ledger)) - ) ) - | Merge (_, proof1, proof2) -> - process (fun () -> M.merge ~sok_digest proof1 proof2) ) ) + match single with + | Work.Single.Spec.Transition + (input, t, (w : Transaction_witness.t)) -> + process (fun () -> + let%bind t = + Deferred.return + @@ + (* Validate the received transaction *) + match t with + | Command (Signed_command cmd) -> ( + match Signed_command.check cmd with + | Some cmd -> + ( Ok (Command (Signed_command cmd)) + : Transaction.Valid.t Or_error.t ) + | None -> + Or_error.errorf + "Command has an invalid signature" ) + | Command (Snapp_command cmd) -> + Ok (Command (Snapp_command cmd)) + | Fee_transfer ft -> + Ok (Fee_transfer ft) + | Coinbase cb -> + Ok (Coinbase cb) + in + let snapp_account1, snapp_account2 = + Sparse_ledger.snapp_accounts w.ledger + (Transaction.forget t) + in + Deferred.Or_error.try_with ~here:[%here] (fun () -> + M.of_transaction ~sok_digest ~snapp_account1 + ~snapp_account2 + ~source:input.Transaction_snark.Statement.source + ~target:input.target + { Transaction_protocol_state.Poly.transaction = t + ; block_data = w.protocol_state_body + } + ~init_stack:w.init_stack + ~next_available_token_before: + input.next_available_token_before + ~next_available_token_after: + input.next_available_token_after + ~pending_coinbase_stack_state: + input + .Transaction_snark.Statement + .pending_coinbase_stack_state + (unstage (Mina_base.Sparse_ledger.handler w.ledger)))) + | Merge (_, proof1, proof2) -> + process (fun () -> M.merge ~sok_digest proof1 proof2) ) ) | Check | None -> (* Use a dummy proof. *) let stmt = @@ -148,8 +148,7 @@ module Inputs = struct Deferred.Or_error.return @@ ( Transaction_snark.create ~source:stmt.source ~target:stmt.target ~supply_increase:stmt.supply_increase - ~pending_coinbase_stack_state: - stmt.pending_coinbase_stack_state + ~pending_coinbase_stack_state:stmt.pending_coinbase_stack_state ~next_available_token_before:stmt.next_available_token_before ~next_available_token_after:stmt.next_available_token_after ~fee_excess:stmt.fee_excess ~sok_digest diff --git a/src/lib/snark_worker/snark_worker.ml b/src/lib/snark_worker/snark_worker.ml index 327e6df031d..60a1bb0a87f 100644 --- a/src/lib/snark_worker/snark_worker.ml +++ b/src/lib/snark_worker/snark_worker.ml @@ -18,7 +18,7 @@ module Worker = struct module Get_work = struct module V1 = struct module T = struct - type query = unit [@@deriving bin_io, version {rpc}] + type query = unit [@@deriving bin_io, version { rpc }] type response = ( ( Transaction.Stable.V1.t @@ -28,7 +28,7 @@ module Worker = struct Snark_work_lib.Work.Spec.Stable.V1.t * Public_key.Compressed.Stable.V1.t ) option - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id @@ -57,9 +57,9 @@ module Worker = struct Snark_work_lib.Work.Spec.Stable.V1.t , Ledger_proof.Stable.V1.t ) Snark_work_lib.Work.Result.Stable.V1.t - [@@deriving bin_io, version {rpc}] + [@@deriving bin_io, version { rpc }] - type response = unit [@@deriving bin_io, version {rpc}] + type response = unit [@@deriving bin_io, version { rpc }] let query_of_caller_model = Fn.id diff --git a/src/lib/snark_worker/standalone/run_snark_worker.ml b/src/lib/snark_worker/standalone/run_snark_worker.ml index 3984182da9e..b26f16e18ea 100644 --- a/src/lib/snark_worker/standalone/run_snark_worker.ml +++ b/src/lib/snark_worker/standalone/run_snark_worker.ml @@ -14,7 +14,8 @@ let command = (Command.Arg_type.of_alist_exn [ ("Full", Genesis_constants.Proof_level.Full) ; ("Check", Check) - ; ("None", None) ])) + ; ("None", None) + ])) in fun () -> let open Async in diff --git a/src/lib/snarky_blake2/snarky_blake2.ml b/src/lib/snarky_blake2/snarky_blake2.ml index 0ea04d3fd00..a7708584f1d 100644 --- a/src/lib/snarky_blake2/snarky_blake2.ml +++ b/src/lib/snarky_blake2/snarky_blake2.ml @@ -37,13 +37,13 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : let ( := ) i t = (v, i) := t in let open UInt32 in let xorrot t1 t2 k = xor t1 t2 >>| Fn.flip UInt32.rotr k in - let%bind () = a := sum [v.(a); v.(b); x] in + let%bind () = a := sum [ v.(a); v.(b); x ] in let%bind () = d := xorrot v.(d) v.(a) r1 in - let%bind () = c := sum [v.(c); v.(d)] in + let%bind () = c := sum [ v.(c); v.(d) ] in let%bind () = b := xorrot v.(b) v.(c) r2 in - let%bind () = a := sum [v.(a); v.(b); y] in + let%bind () = a := sum [ v.(a); v.(b); y ] in let%bind () = d := xorrot v.(d) v.(a) r3 in - let%bind () = c := sum [v.(c); v.(d)] in + let%bind () = c := sum [ v.(c); v.(d) ] in let%bind () = b := xorrot v.(b) v.(c) r4 in return () @@ -57,7 +57,8 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : ; 0x510E527F ; 0x9B05688C ; 0x1F83D9AB - ; 0x5BE0CD19 |] + ; 0x5BE0CD19 + |] let splitu64 u = let open Unsigned.UInt64 in @@ -77,16 +78,17 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : go 0 let sigma = - [| [|0; 1; 2; 3; 4; 5; 6; 7; 8; 9; 10; 11; 12; 13; 14; 15|] - ; [|14; 10; 4; 8; 9; 15; 13; 6; 1; 12; 0; 2; 11; 7; 5; 3|] - ; [|11; 8; 12; 0; 5; 2; 15; 13; 10; 14; 3; 6; 7; 1; 9; 4|] - ; [|7; 9; 3; 1; 13; 12; 11; 14; 2; 6; 5; 10; 4; 0; 15; 8|] - ; [|9; 0; 5; 7; 2; 4; 10; 15; 14; 1; 11; 12; 6; 8; 3; 13|] - ; [|2; 12; 6; 10; 0; 11; 8; 3; 4; 13; 7; 5; 15; 14; 1; 9|] - ; [|12; 5; 1; 15; 14; 13; 4; 10; 0; 7; 6; 3; 9; 2; 8; 11|] - ; [|13; 11; 7; 14; 12; 1; 3; 9; 5; 0; 15; 4; 8; 6; 2; 10|] - ; [|6; 15; 14; 9; 11; 3; 0; 8; 12; 2; 13; 7; 1; 4; 10; 5|] - ; [|10; 2; 8; 4; 7; 6; 1; 5; 15; 11; 9; 14; 3; 12; 13; 0|] |] + [| [| 0; 1; 2; 3; 4; 5; 6; 7; 8; 9; 10; 11; 12; 13; 14; 15 |] + ; [| 14; 10; 4; 8; 9; 15; 13; 6; 1; 12; 0; 2; 11; 7; 5; 3 |] + ; [| 11; 8; 12; 0; 5; 2; 15; 13; 10; 14; 3; 6; 7; 1; 9; 4 |] + ; [| 7; 9; 3; 1; 13; 12; 11; 14; 2; 6; 5; 10; 4; 0; 15; 8 |] + ; [| 9; 0; 5; 7; 2; 4; 10; 15; 14; 1; 11; 12; 6; 8; 3; 13 |] + ; [| 2; 12; 6; 10; 0; 11; 8; 3; 4; 13; 7; 5; 15; 14; 1; 9 |] + ; [| 12; 5; 1; 15; 14; 13; 4; 10; 0; 7; 6; 3; 9; 2; 8; 11 |] + ; [| 13; 11; 7; 14; 12; 1; 3; 9; 5; 0; 15; 4; 8; 6; 2; 10 |] + ; [| 6; 15; 14; 9; 11; 3; 0; 8; 12; 2; 13; 7; 1; 4; 10; 5 |] + ; [| 10; 2; 8; 4; 7; 6; 1; 5; 15; 11; 9; 14; 3; 12; 13; 0 |] + |] let compression h (m : UInt32.t array) t f = assert (Array.length h = 8) ; @@ -114,12 +116,12 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : let%bind () = mix 1 6 11 12 10 11 in let%bind () = mix 2 7 8 13 12 13 in let%bind () = mix 3 4 9 14 14 15 in - return () ) + return ()) in let%bind () = for_ 8 ~f:(fun i -> let%bind () = (h, i) := xor h.(i) v.(i) in - (h, i) := xor h.(i) v.(Int.(i + 8)) ) + (h, i) := xor h.(i) v.(Int.(i + 8))) in return () @@ -139,7 +141,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : let concat_int32s (ts : UInt32.t array) = let n = Array.length ts in Array.init (n * UInt32.length_in_bits) ~f:(fun i -> - ts.(i / UInt32.length_in_bits).(i mod UInt32.length_in_bits) ) + ts.(i / UInt32.length_in_bits).(i mod UInt32.length_in_bits)) let default_personalization = String.init 8 ~f:(fun _ -> '\000') @@ -166,7 +168,8 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : ; 0x510E527F ; 0x9B05688C ; 0x1F83D9AB lxor p 0 - ; 0x5BE0CD19 lxor p 4 |] + ; 0x5BE0CD19 lxor p 4 + |] in let padded = pad_input input in let blocks : UInt32.t array array = @@ -174,15 +177,15 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : if n = 0 then [| Array.create ~len:(block_size_in_bits / UInt32.length_in_bits) - UInt32.zero |] + UInt32.zero + |] else Array.init (n / block_size_in_bits) ~f:(fun i -> - Array.init (block_size_in_bits / UInt32.length_in_bits) - ~f:(fun j -> + Array.init (block_size_in_bits / UInt32.length_in_bits) ~f:(fun j -> Array.init UInt32.length_in_bits ~f:(fun k -> padded.((block_size_in_bits * i) + (UInt32.length_in_bits * j) - + k) ) ) ) + + k)))) in let%bind () = for_ @@ -190,7 +193,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : ~f:(fun i -> compression h blocks.(i) Unsigned.UInt64.(Infix.((of_int i + one) * of_int 64)) - false ) + false) in let input_length_in_bytes = (Array.length input + 7) / 8 in let%bind () = diff --git a/src/lib/snarky_blake2/test/test.ml b/src/lib/snarky_blake2/test/test.ml index 2f2fa26733c..cf87325b6d9 100644 --- a/src/lib/snarky_blake2/test/test.ml +++ b/src/lib/snarky_blake2/test/test.ml @@ -33,7 +33,7 @@ let%test_module "blake2-equality test" = let to_bitstring bits = String.init (Array.length bits) ~f:(fun i -> - if bits.(i) then '1' else '0' ) + if bits.(i) then '1' else '0') let%test_unit "constraint count" = assert ( @@ -63,5 +63,5 @@ let%test_module "blake2-equality test" = ~sexp_of_t:(Fn.compose [%sexp_of: string] to_bitstring) input_typ output_typ (blake2s ?personalization:None) - blake2_unchecked input ) + blake2_unchecked input) end ) diff --git a/src/lib/snarky_blake2/uint32.ml b/src/lib/snarky_blake2/uint32.ml index b7c1ebaff69..608c2a301ca 100644 --- a/src/lib/snarky_blake2/uint32.ml +++ b/src/lib/snarky_blake2/uint32.ml @@ -53,7 +53,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : ~there:(fun x -> Array.init length ~f:(get_bit x)) ~back:(fun arr -> Array.foldi arr ~init:zero ~f:(fun i acc b -> - if b then acc lor (one lsl i) else acc ) ) + if b then acc lor (one lsl i) else acc)) let xor t1 t2 = let res = Array.create ~len:length Boolean.false_ in @@ -79,7 +79,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : (Option.value_exn (Field.Var.to_constant (b :> Field.Var.t))) Field.one in - if b then acc lor (one lsl i) else acc )) + if b then acc lor (one lsl i) else acc)) with _ -> None let pack (t : t) = @@ -87,7 +87,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : Array.fold t ~init:(Field.one, Field.Var.constant Field.zero) ~f:(fun (x, acc) b -> - (Field.(x + x), Field.Checked.(acc + (x * (b :> Field.Var.t)))) ) + (Field.(x + x), Field.Checked.(acc + (x * (b :> Field.Var.t))))) in acc @@ -99,7 +99,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : | Some x -> (Unchecked.to_int x + c, vs) | None -> - (c, t :: vs) ) + (c, t :: vs)) in match vars with | [] -> @@ -108,9 +108,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) : let max_length = Int.( ceil_log2 - ( c - + (List.length vars * Unchecked.to_int Unsigned.UInt32.max_int) - )) + (c + (List.length vars * Unchecked.to_int Unsigned.UInt32.max_int))) in let%map bits = Field.Checked.choose_preimage_var ~length:max_length diff --git a/src/lib/snarky_curves/snarky_curves.ml b/src/lib/snarky_curves/snarky_curves.ml index 6148ac41240..95a0e9ea540 100644 --- a/src/lib/snarky_curves/snarky_curves.ml +++ b/src/lib/snarky_curves/snarky_curves.ml @@ -73,9 +73,9 @@ module type Weierstrass_checked_intf = sig module Shifted : sig module type S = Shifted_intf - with type ('a, 'b) checked := ('a, 'b) Checked.t - and type curve_var := t - and type boolean_var := Boolean.var + with type ('a, 'b) checked := ('a, 'b) Checked.t + and type curve_var := t + and type boolean_var := Boolean.var type 'a m = (module S with type t = 'a) @@ -87,7 +87,7 @@ module type Weierstrass_checked_intf = sig val constant : unchecked -> t val add_unsafe : - t -> t -> ([`I_thought_about_this_very_carefully of t], _) Checked.t + t -> t -> ([ `I_thought_about_this_very_carefully of t ], _) Checked.t val if_ : Boolean.var -> then_:t -> else_:t -> (t, _) Checked.t @@ -120,9 +120,9 @@ end module Make_weierstrass_checked (F : Snarky_field_extensions.Intf.S) (Scalar : sig - type t + type t - val of_int : int -> t + val of_int : int -> t end) (Curve : sig type t @@ -141,13 +141,13 @@ module Make_weierstrass_checked val scale : t -> Scalar.t -> t end) (Params : Params_intf with type field := F.Unchecked.t) (Override : sig - val add : - (F.t * F.t -> F.t * F.t -> (F.t * F.t, _) F.Impl.Checked.t) option + val add : + (F.t * F.t -> F.t * F.t -> (F.t * F.t, _) F.Impl.Checked.t) option end) : Weierstrass_checked_intf - with module Impl := F.Impl - and type unchecked := Curve.t - and type t = F.t * F.t = struct + with module Impl := F.Impl + and type unchecked := Curve.t + and type t = F.t * F.t = struct open F.Impl type t = F.t * F.t @@ -165,7 +165,7 @@ module Make_weierstrass_checked Typ.(tuple2 F.typ F.typ) ~there:Curve.to_affine_exn ~back:Curve.of_affine in - {unchecked with check= assert_on_curve} + { unchecked with check = assert_on_curve } let negate ((x, y) : t) : t = (x, F.negate y) @@ -201,7 +201,7 @@ module Make_weierstrass_checked let%bind () = (* lambda^2 = cx + ax + bx cx = lambda^2 - (ax + bc) - *) + *) assert_square lambda F.(cx + ax + bx) in let%bind cy = @@ -222,8 +222,8 @@ module Make_weierstrass_checked match Override.add with Some add -> add p1 p2 | None -> add' ~div p1 p2 (* This function MUST NOT be called UNLESS you are certain the two points - on which it is called are not equal. If it is called on equal points, - the prover can return almost any curve point they want to from this function. *) + on which it is called are not equal. If it is called on equal points, + the prover can return almost any curve point they want to from this function. *) let add_unsafe p q = let%map r = add' ~div:F.div_unsafe p q in `I_thought_about_this_very_carefully r @@ -239,9 +239,9 @@ module Make_weierstrass_checked module Shifted = struct module type S = Shifted_intf - with type ('a, 'b) checked := ('a, 'b) Checked.t - and type curve_var := t - and type boolean_var := Boolean.var + with type ('a, 'b) checked := ('a, 'b) Checked.t + and type curve_var := t + and type boolean_var := Boolean.var type 'a m = (module S with type t = 'a) @@ -284,8 +284,7 @@ module Make_weierstrass_checked As_prover.( map2 (read typ x_squared) (read typ ay) ~f:(fun x_squared ay -> let open F.Unchecked in - (x_squared + x_squared + x_squared + Params.a) * inv (ay + ay) - )) + (x_squared + x_squared + x_squared + Params.a) * inv (ay + ay))) in let%bind bx = exists typ @@ -293,7 +292,7 @@ module Make_weierstrass_checked As_prover.( map2 (read typ lambda) (read typ ax) ~f:(fun lambda ax -> let open F.Unchecked in - square lambda - (ax + ax) )) + square lambda - (ax + ax))) in let%bind by = exists typ @@ -321,7 +320,7 @@ module Make_weierstrass_checked let choose a1 a2 = let open Field.Checked in F.map2_ a1 a2 ~f:(fun a1 a2 -> - (a1 * cond) + (a2 * (Field.Var.constant Field.one - cond)) ) + (a1 * cond) + (a2 * (Field.Var.constant Field.one - cond))) in (choose x1 x2, choose y1 y2) @@ -347,9 +346,9 @@ module Make_weierstrass_checked go 0 c init t (* This 'looks up' a field element from a lookup table of size 2^2 = 4 with - a 2 bit index. See https://github.com/zcash/zcash/issues/2234#issuecomment-383736266 for - a discussion of this trick. -*) + a 2 bit index. See https://github.com/zcash/zcash/issues/2234#issuecomment-383736266 for + a discussion of this trick. + *) let lookup_point (b0, b1) (t1, t2, t3, t4) = let%map b0_and_b1 = Boolean.( && ) b0 b1 in let lookup_one (a1, a2, a3, a4) = @@ -384,6 +383,7 @@ module Make_weierstrass_checked let sigma = t in let n = List.length b in let sigma_count = (n + 1) / 2 in + (* = ceil (n / 2.0) *) (* We implement a complicated optimzation so that in total this costs roughly (1 + 3) * (n / 2) constraints, rather than @@ -415,6 +415,7 @@ module Make_weierstrass_checked , Curve.(sigma + two_to_the_i_plus_1) , Curve.(sigma + two_to_the_i + two_to_the_i_plus_1) ) in + (* Say b = b0, b1, .., b_{n-1}. We compute @@ -438,7 +439,7 @@ module Make_weierstrass_checked match bits with | [] -> return acc - | [b_i] -> + | [ b_i ] -> let term = lookup_single_bit b_i (sigma, Curve.(sigma + two_to_the_i)) in diff --git a/src/lib/snarky_field_extensions/field_extensions.ml b/src/lib/snarky_field_extensions/field_extensions.ml index 9e8596128a5..9aa9914fbd8 100644 --- a/src/lib/snarky_field_extensions/field_extensions.ml +++ b/src/lib/snarky_field_extensions/field_extensions.ml @@ -23,7 +23,7 @@ module Make_test (F : Intf.Basic) = struct !"%s test failure: %{sexp:arg} -> %{sexp:F.Unchecked.t} vs \ %{sexp:F.Unchecked.t}" label x r1 r2 () - else () ) + else ()) let test1 l f g = test F.typ F.Unchecked.gen F.Unchecked.sexp_of_t l f g @@ -171,11 +171,7 @@ struct try Some (A.map t ~f:(fun x -> - match F.to_constant x with - | Some x -> - x - | None -> - raise None_exn )) + match F.to_constant x with Some x -> x | None -> raise None_exn)) with None_exn -> None let if_ b ~then_ ~else_ = @@ -254,7 +250,7 @@ struct type 'a t_ = 'a - let to_list x = [x] + let to_list x = [ x ] type t = Field.Var.t @@ -298,15 +294,15 @@ end the field F(sqrt(s)) = F[x] / (x^2 - s) *) module E2 (F : Intf.S) (Params : sig - val non_residue : F.Unchecked.t + val non_residue : F.Unchecked.t - val mul_by_non_residue : F.t -> F.t + val mul_by_non_residue : F.t -> F.t end) : sig include Intf.S_with_primitive_element - with module Impl = F.Impl - and module Base = F - and type 'a A.t = 'a * 'a + with module Impl = F.Impl + and module Base = F + and type 'a A.t = 'a * 'a val unitary_inverse : t -> t end = struct @@ -333,7 +329,7 @@ end = struct let to_list (x, y) = F.to_list x @ F.to_list y (* A value [(a, b) : t] should be thought of as the field element - a + b sqrt(s). Then all operations are just what follow algebraically. *) + a + b sqrt(s). Then all operations are just what follow algebraically. *) include Make_applicative (Base) (A) @@ -436,18 +432,18 @@ end module E3 (F : Intf.S) (Params : sig - val non_residue : F.Unchecked.t + val non_residue : F.Unchecked.t - val frobenius_coeffs_c1 : F.Unchecked.t array + val frobenius_coeffs_c1 : F.Unchecked.t array - val frobenius_coeffs_c2 : F.Unchecked.t array + val frobenius_coeffs_c2 : F.Unchecked.t array - val mul_by_non_residue : F.t -> F.t + val mul_by_non_residue : F.t -> F.t end) : Intf.S_with_primitive_element - with module Impl = F.Impl - and module Base = F - and type 'a A.t = 'a * 'a * 'a = struct + with module Impl = F.Impl + and module Base = F + and type 'a A.t = 'a * 'a * 'a = struct module T = struct module Base = F module Unchecked = Snarkette.Fields.Make_fp3 (F.Unchecked) (Params) @@ -594,24 +590,23 @@ end module F3 (F : Intf.S with type 'a A.t = 'a and type 'a Base.t_ = 'a) (Params : sig - val non_residue : F.Unchecked.t + val non_residue : F.Unchecked.t - val frobenius_coeffs_c1 : F.Unchecked.t array + val frobenius_coeffs_c1 : F.Unchecked.t array - val frobenius_coeffs_c2 : F.Unchecked.t array + val frobenius_coeffs_c2 : F.Unchecked.t array end) : Intf.S_with_primitive_element - with module Impl = F.Impl - and module Base = F - and type 'a A.t = 'a * 'a * 'a = struct + with module Impl = F.Impl + and module Base = F + and type 'a A.t = 'a * 'a * 'a = struct module T = struct module Base = F module Unchecked = Snarkette.Fields.Make_fp3 (F.Unchecked) (Params) module Impl = F.Impl open Impl - let mul_by_primitive_element (a, b, c) = - (F.scale c Params.non_residue, a, b) + let mul_by_primitive_element (a, b, c) = (F.scale c Params.non_residue, a, b) module A = struct include T3 @@ -621,7 +616,7 @@ module F3 (x, y, z) end - let to_list (x, y, z) = [x; y; z] + let to_list (x, y, z) = [ x; y; z ] include Make_applicative (Base) (A) @@ -698,9 +693,10 @@ module Cyclotomic_square = struct module Make_F6 (F2 : Intf.S_with_primitive_element - with type 'a A.t = 'a * 'a - and type 'a Base.t_ = 'a) (Params : sig - val cubic_non_residue : F2.Impl.Field.t + with type 'a A.t = 'a * 'a + and type 'a Base.t_ = 'a) + (Params : sig + val cubic_non_residue : F2.Impl.Field.t end) = struct let cyclotomic_square ((x00, x01, x02), (x10, x11, x12)) = @@ -727,22 +723,23 @@ end module F6 (Fq : Intf.S with type 'a A.t = 'a and type 'a Base.t_ = 'a) (Fq2 : Intf.S_with_primitive_element - with module Impl = Fq.Impl - and type 'a A.t = 'a * 'a - and type 'a Base.t_ = 'a Fq.t_) (Fq3 : sig - include - Intf.S_with_primitive_element + with module Impl = Fq.Impl + and type 'a A.t = 'a * 'a + and type 'a Base.t_ = 'a Fq.t_) + (Fq3 : sig + include + Intf.S_with_primitive_element with module Impl = Fq.Impl and type 'a A.t = 'a * 'a * 'a and type 'a Base.t_ = 'a Fq.t_ - module Params : sig - val non_residue : Fq.Unchecked.t + module Params : sig + val non_residue : Fq.Unchecked.t - val frobenius_coeffs_c1 : Fq.Unchecked.t array + val frobenius_coeffs_c1 : Fq.Unchecked.t array - val frobenius_coeffs_c2 : Fq.Unchecked.t array - end + val frobenius_coeffs_c2 : Fq.Unchecked.t array + end end) (Params : sig val frobenius_coeffs_c1 : Fq.Unchecked.t array end) = @@ -750,8 +747,7 @@ struct include E2 (Fq3) (struct - let non_residue : Fq3.Unchecked.t = - Fq.Unchecked.(zero, one, zero) + let non_residue : Fq3.Unchecked.t = Fq.Unchecked.(zero, one, zero) let mul_by_non_residue = Fq3.mul_by_primitive_element end) @@ -783,15 +779,15 @@ struct As_prover.( map2 ~f:Fq3.Unchecked.( * ) (read Fq3.typ a0) (read Fq3.typ b0)) (* v0 - = (a00 + s a01 s^2 a02) (s^2 b02) - = non_residue a01 b02 + non_residue s a02 b02 + s^2 a00 b02 *) + = (a00 + s a01 s^2 a02) (s^2 b02) + = non_residue a01 b02 + non_residue s a02 b02 + s^2 a00 b02 *) in let%map () = let%map () = Fq.assert_r1cs a01 (Fq.scale b02 Fq3.Params.non_residue) (Field.Var.linear_combination - [(Field.one, c00); (Field.negate Fq3.Params.non_residue, v12)]) + [ (Field.one, c00); (Field.negate Fq3.Params.non_residue, v12) ]) and () = Fq.assert_r1cs a02 (Fq.scale b02 Fq3.Params.non_residue) Fq.(c01 - v10) and () = Fq.assert_r1cs a00 b02 Fq.(c02 - v11) in @@ -839,9 +835,10 @@ end module F4 (Fq2 : Intf.S_with_primitive_element - with type 'a A.t = 'a * 'a - and type 'a Base.t_ = 'a) (Params : sig - val frobenius_coeffs_c1 : Fq2.Impl.Field.t array + with type 'a A.t = 'a * 'a + and type 'a Base.t_ = 'a) + (Params : sig + val frobenius_coeffs_c1 : Fq2.Impl.Field.t array end) = struct include E2 diff --git a/src/lib/snarky_field_extensions/intf.ml b/src/lib/snarky_field_extensions/intf.ml index fa6a2bfa5b2..7a1c76412fa 100644 --- a/src/lib/snarky_field_extensions/intf.ml +++ b/src/lib/snarky_field_extensions/intf.ml @@ -65,13 +65,13 @@ module type Basic = sig val if_ : Boolean.var -> then_:t -> else_:t -> (t, _) Checked.t (* These definitions are shadowed in the below interface *) - val assert_square : [`Define | `Custom of t -> t -> (unit, _) Checked.t] + val assert_square : [ `Define | `Custom of t -> t -> (unit, _) Checked.t ] - val ( * ) : [`Define | `Custom of t -> t -> (t, _) Checked.t] + val ( * ) : [ `Define | `Custom of t -> t -> (t, _) Checked.t ] - val square : [`Define | `Custom of t -> (t, _) Checked.t] + val square : [ `Define | `Custom of t -> (t, _) Checked.t ] - val inv_exn : [`Define | `Custom of t -> (t, _) Checked.t] + val inv_exn : [ `Define | `Custom of t -> (t, _) Checked.t ] val real_part : 'a t_ -> 'a end @@ -92,7 +92,7 @@ module type S = sig val square : t -> (t, _) Checked.t (* This function MUST NOT be called on two arguments which are both potentially - zero *) + zero *) val div_unsafe : t -> t -> (t, _) Checked.t val inv_exn : t -> (t, _) Checked.t diff --git a/src/lib/snarky_group_map/checked_map.ml b/src/lib/snarky_group_map/checked_map.ml index 35973d50176..6fdcf852871 100644 --- a/src/lib/snarky_group_map/checked_map.ml +++ b/src/lib/snarky_group_map/checked_map.ml @@ -11,7 +11,7 @@ module Aux (Impl : Snarky_backendless.Snark_intf.Run) = struct let sqrt_exn x = let y = exists Field.typ ~compute:(fun () -> - Field.Constant.sqrt (As_prover.read_var x) ) + Field.Constant.sqrt (As_prover.read_var x)) in assert_square y x ; y @@ -31,11 +31,10 @@ module Aux (Impl : Snarky_backendless.Snark_intf.Run) = struct *) let is_square = exists Boolean.typ ~compute:(fun () -> - Field.Constant.is_square (As_prover.read_var x) ) + Field.Constant.is_square (As_prover.read_var x)) in let m = Lazy.force non_residue in - ( sqrt_exn (Field.if_ is_square ~then_:x ~else_:(Field.scale x m)) - , is_square ) + (sqrt_exn (Field.if_ is_square ~then_:x ~else_:(Field.scale x m)), is_square) end let wrap (type f) ((module Impl) : f Snarky_backendless.Snark0.m) ~potential_xs @@ -48,17 +47,16 @@ let wrap (type f) ((module Impl) : f Snarky_backendless.Snark0.m) ~potential_xs let y1, b1 = sqrt_flagged (y_squared ~x:x1) and y2, b2 = sqrt_flagged (y_squared ~x:x2) and y3, b3 = sqrt_flagged (y_squared ~x:x3) in - Boolean.Assert.any [b1; b2; b3] ; + Boolean.Assert.any [ b1; b2; b3 ] ; let x1_is_first = (b1 :> Field.t) and x2_is_first = (Boolean.((not b1) && b2) :> Field.t) and x3_is_first = (Boolean.((not b1) && (not b2) && b3) :> Field.t) in ( Field.((x1_is_first * x1) + (x2_is_first * x2) + (x3_is_first * x3)) - , Field.((x1_is_first * y1) + (x2_is_first * y2) + (x3_is_first * y3)) ) - ) + , Field.((x1_is_first * y1) + (x2_is_first * y2) + (x3_is_first * y3)) )) module Make (M : Snarky_backendless.Snark_intf.Run) (P : sig - val params : M.field Group_map.Params.t + val params : M.field Group_map.Params.t end) = struct open P @@ -66,7 +64,7 @@ struct open M let to_group = - let {Group_map.Spec.a; b} = Group_map.Params.spec params in + let { Group_map.Spec.a; b } = Group_map.Params.spec params in unstage (wrap (module M) diff --git a/src/lib/snarky_group_map/checked_map.mli b/src/lib/snarky_group_map/checked_map.mli index a79094de42c..171d7cd2a98 100644 --- a/src/lib/snarky_group_map/checked_map.mli +++ b/src/lib/snarky_group_map/checked_map.mli @@ -9,7 +9,8 @@ val wrap : module Make (M : Snarky_backendless.Snark_intf.Run) (Params : sig - val params : M.field Group_map.Params.t + val params : M.field Group_map.Params.t end) : sig val to_group : M.Field.t -> M.Field.t * M.Field.t -end [@@warning "-67"] +end +[@@warning "-67"] diff --git a/src/lib/snarky_group_map/snarky_group_map.ml b/src/lib/snarky_group_map/snarky_group_map.ml index 0a39cad8763..c799bc2580e 100644 --- a/src/lib/snarky_group_map/snarky_group_map.ml +++ b/src/lib/snarky_group_map/snarky_group_map.ml @@ -15,14 +15,14 @@ let to_group (type t) let params = params end) in - let {Group_map.Spec.a; b} = Params.spec params in + let { Group_map.Spec.a; b } = Params.spec params in let try_decode x = let f x = F.((x * x * x) + (a * x) + b) in let y = f x in if F.is_square y then Some (x, F.sqrt y) else None in let x1, x2, x3 = M.potential_xs t in - List.find_map [x1; x2; x3] ~f:try_decode |> Option.value_exn + List.find_map [ x1; x2; x3 ] ~f:try_decode |> Option.value_exn module Checked = struct open Snarky_backendless diff --git a/src/lib/snarky_log/examples/election/election.ml b/src/lib/snarky_log/examples/election/election.ml index 1515b781eeb..27af9894a1a 100644 --- a/src/lib/snarky_log/examples/election/election.ml +++ b/src/lib/snarky_log/examples/election/election.ml @@ -32,9 +32,9 @@ module Ballot = struct let to_bits (nonce, vote) = Nonce.to_bits nonce @ Vote.to_bits vote (* This is our first explicit example of a checked computation. It simply says that to - convert an opened ballot into bits, one converts the nonce and the vote into bits and - concatenates them. Behind the scenes, this function would set up all the constraints - necessary to certify the correctness of this computation with a snark. *) + convert an opened ballot into bits, one converts the nonce and the vote into bits and + concatenates them. Behind the scenes, this function would set up all the constraints + necessary to certify the correctness of this computation with a snark. *) let var_to_bits (nonce, vote) = let%map nonce_bits = Nonce.var_to_bits nonce and vote_bits = Vote.var_to_bits vote in @@ -76,7 +76,7 @@ let open_ballot i (commitment : Ballot.Closed.var) = let%map _, vote = request Ballot.Opened.typ (Open_ballot i) ~such_that:(fun opened -> let%bind implied = close_ballot_var opened in - Ballot.Closed.assert_equal commitment implied ) + Ballot.Closed.assert_equal commitment implied) in vote @@ -88,7 +88,7 @@ let count_pepperoni_votes vs = let%bind pepperoni_vote = Vote.(v = var Pepperoni) in Number.if_ pepperoni_vote ~then_:(acc + constant Field.one) - ~else_:(acc + constant Field.zero) ) + ~else_:(acc + constant Field.zero)) (* Aside for experts: This function could be much more efficient since a Candidate is just a bool which can be coerced to a cvar (thus requiring literally no constraints @@ -115,7 +115,7 @@ let check_winner commitments claimed_winner = (* This specifies the data that will be exposed in the statement we're proving: a list of closed ballots (commitments to votes) and the winner. *) let exposed () = - Data_spec.[Typ.list ~length:number_of_voters Ballot.Closed.typ; Vote.typ] + Data_spec.[ Typ.list ~length:number_of_voters Ballot.Closed.typ; Vote.typ ] let keypair = generate_keypair check_winner ~exposing:(exposed ()) @@ -125,27 +125,26 @@ let winner (ballots : Ballot.Opened.t array) = | _, Pepperoni -> true | _, Mushroom -> - false ) + false) in if pepperoni_votes > Array.length ballots / 2 then Vote.Pepperoni else Mushroom -let handled_check (ballots : Ballot.Opened.t array) commitments claimed_winner - = +let handled_check (ballots : Ballot.Opened.t array) commitments claimed_winner = (* As mentioned before, a checked computation can request help from outside. Here is where we answer those requests (or at least some of them). *) handle (check_winner commitments claimed_winner) - (fun (With {request; respond}) -> + (fun (With { request; respond }) -> match request with | Open_ballot i -> respond (Provide ballots.(i)) | _ -> - unhandled ) + unhandled) let tally_and_prove (ballots : Ballot.Opened.t array) = let commitments = List.init number_of_voters ~f:(fun i -> - Hash.hash (Ballot.Opened.to_bits ballots.(i)) ) + Hash.hash (Ballot.Opened.to_bits ballots.(i))) in let winner = winner ballots in ( commitments diff --git a/src/lib/snarky_log/examples/election/election_main.ml b/src/lib/snarky_log/examples/election/election_main.ml index 2ca328a25e2..4a795cfa797 100644 --- a/src/lib/snarky_log/examples/election/election_main.ml +++ b/src/lib/snarky_log/examples/election/election_main.ml @@ -38,8 +38,7 @@ let () = (* Mock data *) let received_ballots = Array.init number_of_voters ~f:(fun _ -> - Ballot.Opened.create (if Random.bool () then Pepperoni else Mushroom) - ) + Ballot.Opened.create (if Random.bool () then Pepperoni else Mushroom)) in let commitments, winner, proof = tally_and_prove received_ballots in let log_events = diff --git a/src/lib/snarky_log/examples/election/import.ml b/src/lib/snarky_log/examples/election/import.ml index 399e8e3b71d..2f632aa4ac5 100644 --- a/src/lib/snarky_log/examples/election/import.ml +++ b/src/lib/snarky_log/examples/election/import.ml @@ -6,6 +6,5 @@ module Field = struct let to_bits = Field.unpack - let var_to_bits = - Field.Checked.choose_preimage_var ~length:Field.size_in_bits + let var_to_bits = Field.Checked.choose_preimage_var ~length:Field.size_in_bits end diff --git a/src/lib/snarky_log/snarky_log.ml b/src/lib/snarky_log/snarky_log.ml index d95cdf216b8..1ea08457bf2 100644 --- a/src/lib/snarky_log/snarky_log.ml +++ b/src/lib/snarky_log/snarky_log.ml @@ -28,7 +28,7 @@ module Constraints (Snarky_backendless : Snark_intf.Basic) = struct create_event label ~phase:(if start then Measure_start else Measure_end) ~timestamp:count - :: !rev_events ) + :: !rev_events) in List.rev !rev_events diff --git a/src/lib/snarky_pairing/g1_precomputation.ml b/src/lib/snarky_pairing/g1_precomputation.ml index da5c718cac4..d225238f9da 100644 --- a/src/lib/snarky_pairing/g1_precomputation.ml +++ b/src/lib/snarky_pairing/g1_precomputation.ml @@ -5,16 +5,15 @@ module type S = sig open Impl - type t = {p: Field.Var.t * Field.Var.t; py_twist_squared: Fqe.t} + type t = { p : Field.Var.t * Field.Var.t; py_twist_squared : Fqe.t } val create : Field.Var.t * Field.Var.t -> t end module Make (Impl : Snarky_backendless.Snark_intf.S) - (Fqe : Snarky_field_extensions.Intf.S with module Impl = Impl) - (Params : sig - val twist : Fqe.Unchecked.t + (Fqe : Snarky_field_extensions.Intf.S with module Impl = Impl) (Params : sig + val twist : Fqe.Unchecked.t end) = struct module Impl = Impl @@ -23,12 +22,13 @@ struct type g1 = Field.Var.t * Field.Var.t - type t = {p: g1; py_twist_squared: Fqe.t} + type t = { p : g1; py_twist_squared : Fqe.t } let create p = let _, y = p in let twist_squared = Fqe.Unchecked.square Params.twist in { p - ; py_twist_squared= - Fqe.map_ twist_squared ~f:(fun c -> Field.Var.scale y c) } + ; py_twist_squared = + Fqe.map_ twist_squared ~f:(fun c -> Field.Var.scale y c) + } end diff --git a/src/lib/snarky_pairing/g2_precomputation.ml b/src/lib/snarky_pairing/g2_precomputation.ml index eabafd2400d..1037a76afb5 100644 --- a/src/lib/snarky_pairing/g2_precomputation.ml +++ b/src/lib/snarky_pairing/g2_precomputation.ml @@ -6,10 +6,10 @@ module type S = sig module Fqe : Snarky_field_extensions.Intf.S with module Impl = Impl module Coeff : sig - type t = {rx: Fqe.t; ry: Fqe.t; gamma: Fqe.t; gamma_x: Fqe.t} + type t = { rx : Fqe.t; ry : Fqe.t; gamma : Fqe.t; gamma_x : Fqe.t } end - type t = {q: Fqe.t * Fqe.t; coeffs: Coeff.t list} + type t = { q : Fqe.t * Fqe.t; coeffs : Coeff.t list } val create : Fqe.t * Fqe.t -> (t, _) Impl.Checked.t end @@ -17,22 +17,22 @@ end module Make (Fqe : Snarky_field_extensions.Intf.S) (N : Snarkette.Nat_intf.S) (Params : sig - val coeff_a : Fqe.Unchecked.t + val coeff_a : Fqe.Unchecked.t - val loop_count : N.t + val loop_count : N.t end) = struct module Fqe = Fqe module Impl = Fqe.Impl module Coeff = struct - type t = {rx: Fqe.t; ry: Fqe.t; gamma: Fqe.t; gamma_x: Fqe.t} + type t = { rx : Fqe.t; ry : Fqe.t; gamma : Fqe.t; gamma_x : Fqe.t } [@@deriving fields] end type g2 = Fqe.t * Fqe.t - type t = {q: g2; coeffs: Coeff.t list} + type t = { q : g2; coeffs : Coeff.t list } open Impl open Let_syntax @@ -49,7 +49,7 @@ struct and ry = c ry and gamma = c gamma and gamma_x = c gamma_x in - {rx; ry; gamma; gamma_x} + { rx; ry; gamma; gamma_x } let if_ b ~then_ ~else_ = let%map q = if_g2 b ~then_:then_.q ~else_:else_.q @@ -57,9 +57,9 @@ struct Checked.List.map (List.zip_exn then_.coeffs else_.coeffs) ~f:(fun (t, e) -> if_coeff b ~then_:t ~else_:e) in - {q; coeffs} + { q; coeffs } - type 'fqe loop_state = {rx: 'fqe; ry: 'fqe} + type 'fqe loop_state = { rx : 'fqe; ry : 'fqe } let length (a, b, c) = let l = Field.Var.length in @@ -74,44 +74,47 @@ struct in let c = let gamma_x = gamma * s.rx in - { Coeff.rx= constant s.rx - ; ry= constant s.ry - ; gamma= constant gamma - ; gamma_x= constant gamma_x } + { Coeff.rx = constant s.rx + ; ry = constant s.ry + ; gamma = constant gamma + ; gamma_x = constant gamma_x + } in let s = let rx = square gamma - (s.rx + s.rx) in let ry = (gamma * (s.rx - rx)) - s.ry in - {rx; ry} + { rx; ry } in (s, c) - let addition_step_unchecked naf_i ~q:(qx, qy) - (s : Fqe.Unchecked.t loop_state) = + let addition_step_unchecked naf_i ~q:(qx, qy) (s : Fqe.Unchecked.t loop_state) + = let open Fqe in let open Unchecked in let gamma = let top = if naf_i > 0 then s.ry - qy else s.ry + qy in - (* This [div_unsafe] is definitely safe in the context of pre-processing - a verification key. The reason is the following. The top hash of the SNARK commits - the prover to using the correct verification key inside the SNARK, and we know for - that verification key that we will not hit a 0/0 case. - In the general pairing context (e.g., for precomputing on G2 elements in the proof), - I am not certain about this use of [div_unsafe]. *) + (* This [div_unsafe] is definitely safe in the context of pre-processing + a verification key. The reason is the following. The top hash of the SNARK commits + the prover to using the correct verification key inside the SNARK, and we know for + that verification key that we will not hit a 0/0 case. + + In the general pairing context (e.g., for precomputing on G2 elements in the proof), + I am not certain about this use of [div_unsafe]. *) top / (s.rx - qx) in let c = let gamma_x = gamma * qx in - { Coeff.rx= constant s.rx - ; ry= constant s.ry - ; gamma= constant gamma - ; gamma_x= constant gamma_x } + { Coeff.rx = constant s.rx + ; ry = constant s.ry + ; gamma = constant gamma + ; gamma_x = constant gamma_x + } in let s = let rx = square gamma - (s.rx + qx) in let ry = (gamma * (s.rx - rx)) - s.ry in - {rx; ry} + { rx; ry } in (s, c) @@ -130,11 +133,11 @@ struct go (i - 1) found_nonzero s acc else go (i - 1) found_nonzero s acc in - let coeffs = go (Array.length naf - 1) false {rx= qx; ry= qy} [] in - {q= Fqe.(constant qx, constant qy); coeffs} + let coeffs = go (Array.length naf - 1) false { rx = qx; ry = qy } [] in + { q = Fqe.(constant qx, constant qy); coeffs } (* I verified using sage that if the input [s] satisfies ry^2 = rx^3 + a rx + b, then - so does the output. *) + so does the output. *) let doubling_step (s : Fqe.t loop_state) = with_label __LOC__ (let open Fqe in @@ -150,7 +153,7 @@ struct since our curve has prime order. *) in let%map gamma_x = gamma * s.rx in - {Coeff.rx= s.rx; ry= s.ry; gamma; gamma_x} + { Coeff.rx = s.rx; ry = s.ry; gamma; gamma_x } in let%map s = with_label __LOC__ @@ -165,8 +168,8 @@ struct Fqe.Unchecked.(square gamma - (srx + srx)))) in (* rx = c.gamma^2 - 2 * s.rx - rx + 2 * s.rx = c.gamma^2 - *) + rx + 2 * s.rx = c.gamma^2 + *) let%map () = assert_square c.gamma (res + scale s.rx (Field.of_int 2)) in @@ -191,7 +194,7 @@ struct let%map () = assert_r1cs c.gamma (s.rx - rx) (res + s.ry) in res in - {rx; ry}) + { rx; ry }) in (s, c)) @@ -202,23 +205,24 @@ struct let%bind c = let%bind gamma = let top = if naf_i > 0 then s.ry - qy else s.ry + qy in - (* This [div_unsafe] is definitely safe in the context of pre-processing - a verification key. The reason is the following. The top hash of the SNARK commits - the prover to using the correct verification key inside the SNARK, and we know for - that verification key that we will not hit a 0/0 case. - In the general pairing context (e.g., for precomputing on G2 elements in the proof), - I am not certain about this use of [div_unsafe]. *) + (* This [div_unsafe] is definitely safe in the context of pre-processing + a verification key. The reason is the following. The top hash of the SNARK commits + the prover to using the correct verification key inside the SNARK, and we know for + that verification key that we will not hit a 0/0 case. + + In the general pairing context (e.g., for precomputing on G2 elements in the proof), + I am not certain about this use of [div_unsafe]. *) div_unsafe top (s.rx - qx) in let%map gamma_x = gamma * qx in - {Coeff.rx= s.rx; ry= s.ry; gamma; gamma_x} + { Coeff.rx = s.rx; ry = s.ry; gamma; gamma_x } in let%map s = let%bind rx = (* rx = c.gamma^2 - (s.rx + qx) - c.gamma^2 = rx + s.rx + qx - *) + c.gamma^2 = rx + s.rx + qx + *) let%bind res = exists Fqe.typ ~compute: @@ -234,8 +238,8 @@ struct in let%map ry = (* ry = c.gamma * (s.rx - rx) - s.ry - c.gamma * (s.rx - rx) = ry + s.ry - *) + c.gamma * (s.rx - rx) = ry + s.ry + *) let%bind res = exists Fqe.typ ~compute: @@ -250,7 +254,7 @@ struct let%map () = assert_r1cs c.gamma (s.rx - rx) (res + s.ry) in res in - {rx; ry} + { rx; ry } in (s, c)) @@ -272,6 +276,8 @@ struct else go (i - 1) found_nonzero s acc in with_label __LOC__ - (let%map coeffs = go (Array.length naf - 1) false {rx= qx; ry= qy} [] in - {q; coeffs}) + (let%map coeffs = + go (Array.length naf - 1) false { rx = qx; ry = qy } [] + in + { q; coeffs }) end diff --git a/src/lib/snarky_pairing/miller_loop.ml b/src/lib/snarky_pairing/miller_loop.ml index 5772eb8e5e7..3cc8929ae76 100644 --- a/src/lib/snarky_pairing/miller_loop.ml +++ b/src/lib/snarky_pairing/miller_loop.ml @@ -6,21 +6,21 @@ module type Inputs_intf = sig module Fq : Snarky_field_extensions.Intf.S - with module Impl = Impl - and type 'a A.t = 'a - and type 'a Base.t_ = 'a + with module Impl = Impl + and type 'a A.t = 'a + and type 'a Base.t_ = 'a module Fqe : Snarky_field_extensions.Intf.S_with_primitive_element - with module Impl = Impl - and module Base = Fq + with module Impl = Impl + and module Base = Fq module Fqk : sig include Snarky_field_extensions.Intf.S - with module Impl = Impl - and type 'a Base.t_ = 'a Fqe.t_ - and type 'a A.t = 'a * 'a + with module Impl = Impl + and type 'a Base.t_ = 'a Fqe.t_ + and type 'a A.t = 'a * 'a val special_mul : t -> t -> (t, _) Impl.Checked.t @@ -31,15 +31,15 @@ module type Inputs_intf = sig module G1_precomputation : G1_precomputation.S - with module Impl = Impl - and type 'a Fqe.Base.t_ = 'a Fqe.Base.t_ - and type 'a Fqe.A.t = 'a Fqe.A.t + with module Impl = Impl + and type 'a Fqe.Base.t_ = 'a Fqe.Base.t_ + and type 'a Fqe.A.t = 'a Fqe.A.t module G2_precomputation : G2_precomputation.S - with module Impl = Impl - and type 'a Fqe.Base.t_ = 'a Fqe.Base.t_ - and type 'a Fqe.A.t = 'a Fqe.A.t + with module Impl = Impl + and type 'a Fqe.Base.t_ = 'a Fqe.Base.t_ + and type 'a Fqe.A.t = 'a Fqe.A.t module N : Snarkette.Nat_intf.S @@ -55,8 +55,8 @@ module Make (Inputs : Inputs_intf) = struct open Impl open Let_syntax - let double_line_eval (p : G1_precomputation.t) - (c : G2_precomputation.Coeff.t) : (Fqk.t, _) Checked.t = + let double_line_eval (p : G1_precomputation.t) (c : G2_precomputation.Coeff.t) + : (Fqk.t, _) Checked.t = with_label __LOC__ (let px, _ = p.p in let%map c1 = @@ -82,8 +82,7 @@ module Make (Inputs : Inputs_intf) = struct let uncons_exn = function [] -> failwith "uncons_exn" | x :: xs -> (x, xs) - let finalize = - if Params.loop_count_is_neg then Fqk.unitary_inverse else Fn.id + let finalize = if Params.loop_count_is_neg then Fqk.unitary_inverse else Fn.id let miller_loop (p : G1_precomputation.t) (q : G2_precomputation.t) = let naf = Snarkette.Fields.find_wnaf (module N) 1 Params.loop_count in @@ -120,11 +119,11 @@ module Make (Inputs : Inputs_intf) = struct | Neg -> Fqk.special_div acc a (* TODO: Use an unsafe div here if appropriate. I think it should be fine - since py_twisted is py (a curve y-coorindate, guaranteed to be non-zero) - times a constant. - *) + since py_twisted is py (a curve y-coorindate, guaranteed to be non-zero) + times a constant. + *) in - (acc, (sgn, p, {q with G2_precomputation.coeffs})) ) + (acc, (sgn, p, { q with G2_precomputation.coeffs }))) in let rec go i found_nonzero pairs f = if i < 0 then return f diff --git a/src/lib/snarky_taylor/floating_point.ml b/src/lib/snarky_taylor/floating_point.ml index 6ecdb00ef52..ec30ad60679 100644 --- a/src/lib/snarky_taylor/floating_point.ml +++ b/src/lib/snarky_taylor/floating_point.ml @@ -13,7 +13,7 @@ module B = Bigint value / 2^precision *) -type 'f t = {value: 'f Cvar.t; precision: int} +type 'f t = { value : 'f Cvar.t; precision : int } let precision t = t.precision @@ -33,12 +33,12 @@ let mul (type f) ~m:((module I) : f m) x y = let open I in let new_precision = x.precision + y.precision in assert (new_precision < Field.Constant.size_in_bits) ; - {value= Field.(x.value * y.value); precision= new_precision} + { value = Field.(x.value * y.value); precision = new_precision } let constant (type f) ~m:((module M) as m : f m) ~value ~precision = assert (B.(value < one lsl precision)) ; let open M in - {value= Field.constant (bigint_to_field ~m value); precision} + { value = Field.constant (bigint_to_field ~m value); precision } (* x, x^2, ..., x^n *) let powers ~m x n = @@ -73,7 +73,7 @@ let add_signed (type f) ~m:((module M) : f m) t1 (sgn, t2) = let f = match sgn with `Pos -> ( + ) | `Neg -> ( - ) in f (pow2 add ~one Int.(t2.precision - t1.precision) * t1.value) t2.value in - {precision; value} + { precision; value } let add ~m x y = add_signed ~m x (`Pos, y) @@ -117,8 +117,8 @@ let le (type f) ~m:((module M) : f m) t1 t2 = *) let of_quotient ~m ~precision ~top ~bottom ~top_is_less_than_bottom:() = let q, _r = Integer.(div_mod ~m (shift_left ~m top precision) bottom) in - {value= Integer.to_field q; precision} + { value = Integer.to_field q; precision } let of_bits (type f) ~m:((module M) : f m) bits ~precision = assert (List.length bits <= precision) ; - {value= M.Field.pack bits; precision} + { value = M.Field.pack bits; precision } diff --git a/src/lib/snarky_taylor/floating_point.mli b/src/lib/snarky_taylor/floating_point.mli index 6e912eedb22..2dd7aa71c19 100644 --- a/src/lib/snarky_taylor/floating_point.mli +++ b/src/lib/snarky_taylor/floating_point.mli @@ -14,7 +14,7 @@ val add : m:'f m -> 'f t -> 'f t -> 'f t val sub : m:'f m -> 'f t -> 'f t -> 'f t -val add_signed : m:'f m -> 'f t -> [`Pos | `Neg] * 'f t -> 'f t +val add_signed : m:'f m -> 'f t -> [ `Pos | `Neg ] * 'f t -> 'f t val of_quotient : m:'f m diff --git a/src/lib/snarky_taylor/snarky_taylor.ml b/src/lib/snarky_taylor/snarky_taylor.ml index 875c2dc4310..afdbc6829a4 100644 --- a/src/lib/snarky_taylor/snarky_taylor.ml +++ b/src/lib/snarky_taylor/snarky_taylor.ml @@ -45,7 +45,7 @@ let log ~terms x = let open Sequence in unfold ~init:(a, 1) ~f:(fun (ai, i) -> let t = ai / of_int i in - Some ((if Int.(i mod 2 = 0) then neg t else t), (ai * a, Int.(i + 1))) ) + Some ((if Int.(i mod 2 = 0) then neg t else t), (ai * a, Int.(i + 1)))) |> Fn.flip take terms |> fold ~init:zero ~f:( + ) (* This computes the number of terms of a taylor series one needs to compute @@ -62,10 +62,9 @@ let terms_needed ~derivative_magnitude_upper_bound ~bits_of_precision:k = least ~such_that:(fun n -> let nn = B.of_int n in let d = derivative_magnitude_upper_bound Int.(n + 1) in - Bignum.(of_bigint (factorial nn) / d > lower_bound) ) + Bignum.(of_bigint (factorial nn) / d > lower_bound)) -let ceil_log2 n = - least ~such_that:(fun i -> B.(pow (of_int 2) (of_int i) >= n)) +let ceil_log2 n = least ~such_that:(fun i -> B.(pow (of_int 2) (of_int i) >= n)) let binary_expansion x = assert (Bignum.(x < one)) ; @@ -75,10 +74,10 @@ let binary_expansion x = ~f:(fun (rem, pt) -> let b = Bignum.(rem >= pt) in let rem = if b then Bignum.(rem - pt) else rem in - Some (b, Bignum.(rem, pt / two)) ) + Some (b, Bignum.(rem, pt / two))) module Coeff_integer_part = struct - type t = [`Zero | `One] [@@deriving sexp] + type t = [ `Zero | `One ] [@@deriving sexp] let of_int_exn : int -> t = function | 0 -> @@ -93,12 +92,13 @@ end module Params = struct type t = - { total_precision: int - ; per_term_precision: int - ; terms_needed: int + { total_precision : int + ; per_term_precision : int + ; terms_needed : int (* As a special case, we permite the x^1 coefficient to have absolute value < 2 (rather than < 1) *) - ; linear_term_integer_part: Coeff_integer_part.t - ; coefficients: ([`Neg | `Pos] * B.t) array } + ; linear_term_integer_part : Coeff_integer_part.t + ; coefficients : ([ `Neg | `Pos ] * B.t) array + } end (* This module constructs a snarky function for computing the function @@ -109,7 +109,7 @@ end *) module Exp = struct (* An upper bound on the magnitude nth derivative of base^x in [0, 1) is - |log(base)|^n *) + |log(base)|^n *) let derivative_magnitude_upper_bound n ~log_base = Bignum.(log_base ** n) @@ -120,16 +120,16 @@ module Exp = struct ~bits_of_precision type bit_params = - {total_precision: int; terms_needed: int; per_term_precision: int} + { total_precision : int; terms_needed : int; per_term_precision : int } (* This figures out how many bits we can hope to calculate given our field - size. This is because computing the terms + size. This is because computing the terms - x^k + x^k - in the taylor series will start to overflow when k is too large. E.g., - if our field has 298 bits and x has 32 bits, then we cannot easily compute - x^10, since representing it exactly requires 320 bits. *) + in the taylor series will start to overflow when k is too large. E.g., + if our field has 298 bits and x has 32 bits, then we cannot easily compute + x^10, since representing it exactly requires 320 bits. *) let bit_params ~field_size_in_bits ~log_base = let using_linear_term_whole_part = true in greatest ~such_that:(fun k -> @@ -141,8 +141,8 @@ module Exp = struct in let per_term_precision = ceil_log2 (B.of_int n) + k in if (n * per_term_precision) + per_term_precision < field_size_in_bits - then Some {per_term_precision; terms_needed= n; total_precision= k} - else None ) + then Some { per_term_precision; terms_needed = n; total_precision = k } + else None) let params ~field_size_in_bits ~base = let abs_log_base = @@ -150,9 +150,10 @@ module Exp = struct assert (Bignum.(log_base < zero)) ; Bignum.abs log_base in - let {total_precision; terms_needed; per_term_precision} = + let { total_precision; terms_needed; per_term_precision } = bit_params ~field_size_in_bits ~log_base:abs_log_base in + (* Precompute the coefficeints log(base)^i / i ! @@ -180,7 +181,7 @@ module Exp = struct c ) in ( (if i mod 2 = 0 then `Neg else `Pos) - , c_frac |> bignum_as_fixed_point per_term_precision ) ) + , c_frac |> bignum_as_fixed_point per_term_precision )) in (coefficients, !linear_term_integer_part) in @@ -188,7 +189,8 @@ module Exp = struct ; terms_needed ; per_term_precision ; coefficients - ; linear_term_integer_part } + ; linear_term_integer_part + } module Unchecked = struct let one_minus_exp (params : Params.t) x = @@ -200,7 +202,7 @@ module Exp = struct let x_i = Bignum.(x_i * x) in let c = Bignum.(of_bigint c / denom) in let c = match sgn with `Pos -> c | `Neg -> Bignum.neg c in - (Bignum.(acc + (x_i * c)), x_i) ) + (Bignum.(acc + (x_i * c)), x_i)) |> fst |> fun acc -> Bignum.( @@ -217,10 +219,10 @@ module Exp = struct let term = Floating_point.(mul ~m ci xi) in match sum with | None -> - assert ([%equal: [`Pos | `Neg]] sgn `Pos) ; + assert ([%equal: [ `Pos | `Neg ]] sgn `Pos) ; Some term | Some s -> - Some (Floating_point.add_signed ~m s (sgn, term)) ) + Some (Floating_point.add_signed ~m s (sgn, term))) |> Option.value_exn in match linear_term_integer_part with @@ -230,17 +232,17 @@ module Exp = struct Floating_point.add ~m acc x_powers.(0) let one_minus_exp ~m - { Params.total_precision= _ + { Params.total_precision = _ ; terms_needed ; per_term_precision ; linear_term_integer_part - ; coefficients } x = + ; coefficients + } x = let powers = Floating_point.powers ~m x terms_needed in let coefficients = Array.map coefficients ~f:(fun (sgn, c) -> ( sgn - , Floating_point.constant ~m ~value:c ~precision:per_term_precision - ) ) + , Floating_point.constant ~m ~value:c ~precision:per_term_precision )) in taylor_sum ~m powers coefficients linear_term_integer_part end diff --git a/src/lib/snarky_taylor/tests/floating_point_test.ml b/src/lib/snarky_taylor/tests/floating_point_test.ml index 0cb1a40d863..2481e51a90d 100644 --- a/src/lib/snarky_taylor/tests/floating_point_test.ml +++ b/src/lib/snarky_taylor/tests/floating_point_test.ml @@ -26,7 +26,7 @@ let%test_unit "of-quotient" = of_quotient ~m ~precision ~top:(Integer.constant ~m a) ~bottom:(Integer.constant ~m b) ~top_is_less_than_bottom:() in - to_bignum ~m t ) + to_bignum ~m t) () |> Or_error.ok_exn in @@ -37,4 +37,4 @@ let%test_unit "of-quotient" = if not good then failwithf "got %s, expected %s\n" (Bignum.to_string_hum res) (Bignum.to_string_hum actual) - () ) + ()) diff --git a/src/lib/snarky_taylor/tests/snarky_taylor_test.ml b/src/lib/snarky_taylor/tests/snarky_taylor_test.ml index f3b83748653..98f1ef59fe0 100644 --- a/src/lib/snarky_taylor/tests/snarky_taylor_test.ml +++ b/src/lib/snarky_taylor/tests/snarky_taylor_test.ml @@ -16,10 +16,11 @@ let%test_unit "instantiate" = let c () = let arg = Floating_point.of_quotient ~m - ~top:(Integer.of_bits ~m (Bitstring.Lsb_first.of_list Boolean.[true_])) + ~top: + (Integer.of_bits ~m (Bitstring.Lsb_first.of_list Boolean.[ true_ ])) ~bottom: (Integer.of_bits ~m - (Bitstring.Lsb_first.of_list Boolean.[false_; true_])) + (Bitstring.Lsb_first.of_list Boolean.[ false_; true_ ])) ~top_is_less_than_bottom:() ~precision:2 in Floating_point.to_bignum ~m (Exp.one_minus_exp ~m params arg) diff --git a/src/lib/snarky_verifier/bowe_gabizon.ml b/src/lib/snarky_verifier/bowe_gabizon.ml index 6000e78ab72..87c899f80a1 100644 --- a/src/lib/snarky_verifier/bowe_gabizon.ml +++ b/src/lib/snarky_verifier/bowe_gabizon.ml @@ -20,22 +20,25 @@ module Make (Inputs : Inputs_intf) = struct module Verification_key = struct type ('g1, 'g2, 'fqk) t_ = - {query_base: 'g1; query: 'g1 list; delta: 'g2; alpha_beta: 'fqk} + { query_base : 'g1; query : 'g1 list; delta : 'g2; alpha_beta : 'fqk } [@@deriving fields, hlist] let typ ~input_size = let spec = - Data_spec.[G1.typ; Typ.list ~length:input_size G1.typ; G2.typ; Fqk.typ] + Data_spec. + [ G1.typ; Typ.list ~length:input_size G1.typ; G2.typ; Fqk.typ ] in Typ.of_hlistable spec ~var_to_hlist:t__to_hlist ~var_of_hlist:t__of_hlist ~value_to_hlist:t__to_hlist ~value_of_hlist:t__of_hlist - let to_field_elements {query_base; query; delta; alpha_beta} = - let g1 (x, y) = [|x; y|] in - let g2 (x, y) = List.concat_map ~f:Fqe.to_list [x; y] |> Array.of_list in + let to_field_elements { query_base; query; delta; alpha_beta } = + let g1 (x, y) = [| x; y |] in + let g2 (x, y) = + List.concat_map ~f:Fqe.to_list [ x; y ] |> Array.of_list + in let fqk = g2 in Array.concat - (List.map ~f:g1 (query_base :: query) @ [g2 delta; fqk alpha_beta]) + (List.map ~f:g1 (query_base :: query) @ [ g2 delta; fqk alpha_beta ]) include Summary.Make (Inputs) @@ -43,14 +46,16 @@ module Make (Inputs : Inputs_intf) = struct summary_length_in_bits ~twist_extension_degree ~g1_count:(input_size + 1) ~g2_count:1 ~gt_count:1 - let summary_input {query_base; query; delta; alpha_beta} = - {Summary.Input.g1s= query_base :: query; g2s= [delta]; gts= [alpha_beta]} + let summary_input { query_base; query; delta; alpha_beta } = + { Summary.Input.g1s = query_base :: query + ; g2s = [ delta ] + ; gts = [ alpha_beta ] + } type ('a, 'b, 'c) vk = ('a, 'b, 'c) t_ let if_pair if_ b ~then_:(tx, ty) ~else_:(ex, ey) = - let%map x = if_ b ~then_:tx ~else_:ex - and y = if_ b ~then_:ty ~else_:ey in + let%map x = if_ b ~then_:tx ~else_:ex and y = if_ b ~then_:ty ~else_:ey in (x, y) let if_g1 b ~then_ ~else_ = if_pair Field.Checked.if_ b ~then_ ~else_ @@ -59,7 +64,7 @@ module Make (Inputs : Inputs_intf) = struct let if_list if_ b ~then_ ~else_ = Checked.List.map (List.zip_exn then_ else_) ~f:(fun (t, e) -> - if_ b ~then_:t ~else_:e ) + if_ b ~then_:t ~else_:e) let if_ b ~then_ ~else_ = let c if_ p = if_ b ~then_:(p then_) ~else_:(p else_) in @@ -67,46 +72,46 @@ module Make (Inputs : Inputs_intf) = struct and query = c (if_list if_g1) query and delta = c if_g2 delta and alpha_beta = c Fqk.if_ alpha_beta in - {query_base; query; delta; alpha_beta} + { query_base; query; delta; alpha_beta } module Precomputation = struct - type t = {delta: G2_precomputation.t} + type t = { delta : G2_precomputation.t } let create (vk : (_, _, _) vk) = let%map delta = G2_precomputation.create vk.delta in - {delta} + { delta } let create_constant (vk : (_, _, _) vk) = - {delta= G2_precomputation.create_constant vk.delta} + { delta = G2_precomputation.create_constant vk.delta } let if_ b ~then_ ~else_ = let%map delta = G2_precomputation.if_ b ~then_:then_.delta ~else_:else_.delta in - {delta} + { delta } end end module Proof = struct - type ('g1, 'g2) t_ = {a: 'g1; b: 'g2; c: 'g1; delta_prime: 'g2; z: 'g1} + type ('g1, 'g2) t_ = + { a : 'g1; b : 'g2; c : 'g1; delta_prime : 'g2; z : 'g1 } [@@deriving sexp, hlist] let typ = Typ.of_hlistable - Data_spec.[G1.typ; G2.typ; G1.typ; G2.typ; G1.typ] + Data_spec.[ G1.typ; G2.typ; G1.typ; G2.typ; G1.typ ] ~var_to_hlist:t__to_hlist ~var_of_hlist:t__of_hlist ~value_to_hlist:t__to_hlist ~value_of_hlist:t__of_hlist end let verify ?message (vk : (_, _, _) Verification_key.t_) (vk_precomp : Verification_key.Precomputation.t) inputs - {Proof.a; b; c; delta_prime; z} = + { Proof.a; b; c; delta_prime; z } = let%bind acc = let%bind (module Shifted) = G1.Shifted.create () in let%bind init = Shifted.(add zero vk.query_base) in Checked.List.fold (List.zip_exn vk.query inputs) ~init - ~f:(fun acc (g, input) -> G1.scale (module Shifted) g input ~init:acc - ) + ~f:(fun acc (g, input) -> G1.scale (module Shifted) g input ~init:acc) >>= Shifted.unshift_nonzero in let%bind delta_prime_pc = G2_precomputation.create delta_prime in @@ -117,13 +122,15 @@ module Make (Inputs : Inputs_intf) = struct , G1_precomputation.create acc , G2_precomputation.create_constant G2.Unchecked.one ) ; (Neg, G1_precomputation.create c, delta_prime_pc) - ; (Pos, G1_precomputation.create a, b) ] + ; (Pos, G1_precomputation.create a, b) + ] >>= final_exponentiation >>= Fqk.equal vk.alpha_beta and test2 = let%bind ys = hash ?message ~a ~b ~c ~delta_prime in batch_miller_loop [ (Pos, G1_precomputation.create ys, delta_prime_pc) - ; (Neg, G1_precomputation.create z, vk_precomp.delta) ] + ; (Neg, G1_precomputation.create z, vk_precomp.delta) + ] >>= final_exponentiation >>= Fqk.equal Fqk.one in Boolean.(test1 && test2) diff --git a/src/lib/snarky_verifier/groth.ml b/src/lib/snarky_verifier/groth.ml index a88b987079e..cc07e069fe6 100644 --- a/src/lib/snarky_verifier/groth.ml +++ b/src/lib/snarky_verifier/groth.ml @@ -8,7 +8,7 @@ module Make (Inputs : Inputs.S) = struct module Verification_key = struct type ('g1, 'g2, 'fqk) t_ = - {query_base: 'g1; query: 'g1 list; delta: 'g2; alpha_beta: 'fqk} + { query_base : 'g1; query : 'g1 list; delta : 'g2; alpha_beta : 'fqk } [@@deriving fields] include Summary.Make (Inputs) @@ -17,14 +17,16 @@ module Make (Inputs : Inputs.S) = struct summary_length_in_bits ~twist_extension_degree ~g1_count:(input_size + 1) ~g2_count:1 ~gt_count:1 - let summary_input {query_base; query; delta; alpha_beta} = - {Summary.Input.g1s= query_base :: query; g2s= [delta]; gts= [alpha_beta]} + let summary_input { query_base; query; delta; alpha_beta } = + { Summary.Input.g1s = query_base :: query + ; g2s = [ delta ] + ; gts = [ alpha_beta ] + } type ('a, 'b, 'c) vk = ('a, 'b, 'c) t_ let if_pair if_ b ~then_:(tx, ty) ~else_:(ex, ey) = - let%map x = if_ b ~then_:tx ~else_:ex - and y = if_ b ~then_:ty ~else_:ey in + let%map x = if_ b ~then_:tx ~else_:ex and y = if_ b ~then_:ty ~else_:ey in (x, y) let if_g1 b ~then_ ~else_ = if_pair Field.Checked.if_ b ~then_ ~else_ @@ -33,7 +35,7 @@ module Make (Inputs : Inputs.S) = struct let if_list if_ b ~then_ ~else_ = Checked.List.map (List.zip_exn then_ else_) ~f:(fun (t, e) -> - if_ b ~then_:t ~else_:e ) + if_ b ~then_:t ~else_:e) let if_ b ~then_ ~else_ = let c if_ p = if_ b ~then_:(p then_) ~else_:(p else_) in @@ -41,44 +43,44 @@ module Make (Inputs : Inputs.S) = struct and query = c (if_list if_g1) query and delta = c if_g2 delta and alpha_beta = c Fqk.if_ alpha_beta in - {query_base; query; delta; alpha_beta} + { query_base; query; delta; alpha_beta } module Precomputation = struct - type t = {delta: G2_precomputation.t} + type t = { delta : G2_precomputation.t } let create (vk : (_, _, _) vk) = let%map delta = G2_precomputation.create vk.delta in - {delta} + { delta } let create_constant (vk : (_, _, _) vk) = - {delta= G2_precomputation.create_constant vk.delta} + { delta = G2_precomputation.create_constant vk.delta } let if_ b ~then_ ~else_ = let%map delta = G2_precomputation.if_ b ~then_:then_.delta ~else_:else_.delta in - {delta} + { delta } end end module Proof = struct - type ('g1, 'g2) t_ = {a: 'g1; b: 'g2; c: 'g1} [@@deriving sexp, hlist] + type ('g1, 'g2) t_ = { a : 'g1; b : 'g2; c : 'g1 } [@@deriving sexp, hlist] let typ = Typ.of_hlistable - Data_spec.[G1.typ; G2.typ; G1.typ] + Data_spec.[ G1.typ; G2.typ; G1.typ ] ~var_to_hlist:t__to_hlist ~var_of_hlist:t__of_hlist ~value_to_hlist:t__to_hlist ~value_of_hlist:t__of_hlist end let verify (vk : (_, _, _) Verification_key.t_) - (vk_precomp : Verification_key.Precomputation.t) inputs {Proof.a; b; c} = + (vk_precomp : Verification_key.Precomputation.t) inputs { Proof.a; b; c } + = let%bind acc = let%bind (module Shifted) = G1.Shifted.create () in let%bind init = Shifted.(add zero vk.query_base) in Checked.List.fold (List.zip_exn vk.query inputs) ~init - ~f:(fun acc (g, input) -> G1.scale (module Shifted) g input ~init:acc - ) + ~f:(fun acc (g, input) -> G1.scale (module Shifted) g input ~init:acc) >>= Shifted.unshift_nonzero in let%bind test = @@ -89,7 +91,8 @@ module Make (Inputs : Inputs.S) = struct , G1_precomputation.create acc , G2_precomputation.create_constant G2.Unchecked.one ) ; (Neg, G1_precomputation.create c, vk_precomp.delta) - ; (Pos, G1_precomputation.create a, b) ] + ; (Pos, G1_precomputation.create a, b) + ] >>= final_exponentiation in Fqk.equal test vk.alpha_beta diff --git a/src/lib/snarky_verifier/groth_maller.ml b/src/lib/snarky_verifier/groth_maller.ml index 47755ad81b6..b43c7b65468 100644 --- a/src/lib/snarky_verifier/groth_maller.ml +++ b/src/lib/snarky_verifier/groth_maller.ml @@ -8,14 +8,15 @@ module Make (Inputs : Inputs.S) = struct module Verification_key = struct type ('g1, 'g2, 'fqk) t_ = - { query_base: 'g1 - ; query: 'g1 list - ; h: 'g2 - ; g_alpha: 'g1 - ; h_beta: 'g2 - ; g_gamma: 'g1 - ; h_gamma: 'g2 - ; g_alpha_h_beta: 'fqk } + { query_base : 'g1 + ; query : 'g1 list + ; h : 'g2 + ; g_alpha : 'g1 + ; h_beta : 'g2 + ; g_gamma : 'g1 + ; h_gamma : 'g2 + ; g_alpha_h_beta : 'fqk + } [@@deriving hlist] let typ ~input_size = @@ -28,7 +29,8 @@ module Make (Inputs : Inputs.S) = struct ; G2.typ ; G1.typ ; G2.typ - ; Fqk.typ ] + ; Fqk.typ + ] in Typ.of_hlistable spec ~var_to_hlist:t__to_hlist ~var_of_hlist:t__of_hlist ~value_to_hlist:t__to_hlist ~value_of_hlist:t__of_hlist @@ -48,47 +50,50 @@ module Make (Inputs : Inputs.S) = struct ; h ; h_beta ; h_gamma - ; g_alpha_h_beta } = - { Summary.Input.g1s= g_alpha :: g_gamma :: query_base :: query - ; g2s= [h; h_beta; h_gamma] - ; gts= [g_alpha_h_beta] } + ; g_alpha_h_beta + } = + { Summary.Input.g1s = g_alpha :: g_gamma :: query_base :: query + ; g2s = [ h; h_beta; h_gamma ] + ; gts = [ g_alpha_h_beta ] + } type ('a, 'b, 'c) vk = ('a, 'b, 'c) t_ module Precomputation = struct - type t = {h_gamma: G2_precomputation.t; h: G2_precomputation.t} + type t = { h_gamma : G2_precomputation.t; h : G2_precomputation.t } let create (vk : (_, _, _) vk) = let%map h_gamma = G2_precomputation.create vk.h_gamma and h = G2_precomputation.create vk.h in - {h_gamma; h} + { h_gamma; h } let create_constant (vk : (_, _, _) vk) = let h_gamma = G2_precomputation.create_constant vk.h_gamma and h = G2_precomputation.create_constant vk.h in - {h_gamma; h} + { h_gamma; h } end end module Proof = struct - type ('g1, 'g2) t_ = {a: 'g1; b: 'g2; c: 'g1} [@@deriving hlist] + type ('g1, 'g2) t_ = { a : 'g1; b : 'g2; c : 'g1 } [@@deriving hlist] let typ = Typ.of_hlistable - Data_spec.[G1.typ; G2.typ; G1.typ] + Data_spec.[ G1.typ; G2.typ; G1.typ ] ~var_to_hlist:t__to_hlist ~var_of_hlist:t__of_hlist ~value_to_hlist:t__to_hlist ~value_of_hlist:t__of_hlist end let verify (vk : (_, _, _) Verification_key.t_) - (vk_precomp : Verification_key.Precomputation.t) inputs {Proof.a; b; c} = + (vk_precomp : Verification_key.Precomputation.t) inputs { Proof.a; b; c } + = let%bind (module G1_shifted) = G1.Shifted.create () in let%bind (module G2_shifted) = G2.Shifted.create () in let%bind acc = let%bind init = G1_shifted.(add zero vk.query_base) in Checked.List.fold (List.zip_exn vk.query inputs) ~init ~f:(fun acc (g, input) -> - G1.scale (module G1_shifted) g input ~init:acc ) + G1.scale (module G1_shifted) g input ~init:acc) >>= G1_shifted.unshift_nonzero in let%bind test1 = @@ -102,14 +107,16 @@ module Make (Inputs : Inputs.S) = struct batch_miller_loop [ (Pos, G1_precomputation.create a_g_alpha, b_h_beta) ; (Neg, G1_precomputation.create acc, vk_precomp.h_gamma) - ; (Neg, G1_precomputation.create c, vk_precomp.h) ] + ; (Neg, G1_precomputation.create c, vk_precomp.h) + ] >>= final_exponentiation >>= Fqk.equal vk.g_alpha_h_beta and test2 = let%bind b = G2_precomputation.create b in batch_miller_loop [ (Pos, G1_precomputation.create a, vk_precomp.h_gamma) - ; (Neg, G1_precomputation.create vk.g_gamma, b) ] + ; (Neg, G1_precomputation.create vk.g_gamma, b) + ] >>= final_exponentiation >>= Fqk.equal Fqk.one in Boolean.(test1 && test2) diff --git a/src/lib/snarky_verifier/inputs.ml b/src/lib/snarky_verifier/inputs.ml index d345f04c041..1cbb878a76e 100644 --- a/src/lib/snarky_verifier/inputs.ml +++ b/src/lib/snarky_verifier/inputs.ml @@ -17,9 +17,9 @@ module type S = sig module Shifted : sig module type S = Snarky_curves.Shifted_intf - with type ('a, 'b) checked := ('a, 'b) Checked.t - and type curve_var := t - and type boolean_var := Boolean.var + with type ('a, 'b) checked := ('a, 'b) Checked.t + and type curve_var := t + and type boolean_var := Boolean.var type 'a m = (module S with type t = 'a) @@ -41,9 +41,9 @@ module type S = sig module Shifted : sig module type S = Snarky_curves.Shifted_intf - with type ('a, 'b) checked := ('a, 'b) Checked.t - and type curve_var := t - and type boolean_var := Boolean.var + with type ('a, 'b) checked := ('a, 'b) Checked.t + and type curve_var := t + and type boolean_var := Boolean.var type 'a m = (module S with type t = 'a) diff --git a/src/lib/snarky_verifier/summary.ml b/src/lib/snarky_verifier/summary.ml index 755c4105fa8..38a3d1ffd3c 100644 --- a/src/lib/snarky_verifier/summary.ml +++ b/src/lib/snarky_verifier/summary.ml @@ -13,7 +13,7 @@ module type Inputs_intf = sig end module Input = struct - type ('g1, 'g2, 'gt) t = {g1s: 'g1 list; g2s: 'g2 list; gts: 'gt list} + type ('g1, 'g2, 'gt) t = { g1s : 'g1 list; g2s : 'g2 list; gts : 'gt list } end module Make (Inputs : Inputs_intf) = struct @@ -31,13 +31,13 @@ module Make (Inputs : Inputs_intf) = struct let signs = g1_count + g2_count + gt_count in (elts * Field.size_in_bits) + signs - let summary {Input.g1s; g2s; gts} = + let summary { Input.g1s; g2s; gts } = let%map elts = List.map g1s ~f:(fun (x, _) -> x) @ List.concat_map g2s ~f:(fun (x, _) -> Fqe.to_list x) @ List.concat_map gts ~f:(fun (a, _) -> Fqe.to_list a) |> Checked.List.map ~f:(fun x -> - Field.Checked.choose_preimage_var x ~length:Field.size_in_bits ) + Field.Checked.choose_preimage_var x ~length:Field.size_in_bits) >>| List.concat and signs = let parity x = @@ -56,7 +56,7 @@ module Make (Inputs : Inputs_intf) = struct in elts @ signs - let summary_unchecked {Input.g1s; g2s; gts} = + let summary_unchecked { Input.g1s; g2s; gts } = let parity x = Bigint.(test_bit (of_field x) 0) in let elts = List.map g1s ~f:(fun (x, _) -> x) diff --git a/src/lib/sparse_ledger_lib/sparse_ledger.ml b/src/lib/sparse_ledger_lib/sparse_ledger.ml index c10c3efbda1..86261c76325 100644 --- a/src/lib/sparse_ledger_lib/sparse_ledger.ml +++ b/src/lib/sparse_ledger_lib/sparse_ledger.ml @@ -28,20 +28,22 @@ module T = struct module V1 = struct type ('hash, 'key, 'account, 'token_id) t = - { indexes: ('key * int) list - ; depth: int - ; tree: ('hash, 'account) Tree.Stable.V1.t - ; next_available_token: 'token_id } + { indexes : ('key * int) list + ; depth : int + ; tree : ('hash, 'account) Tree.Stable.V1.t + ; next_available_token : 'token_id + } [@@deriving sexp, to_yojson] end end] type ('hash, 'key, 'account, 'token_id) t = ('hash, 'key, 'account, 'token_id) Stable.Latest.t = - { indexes: ('key * int) list - ; depth: int - ; tree: ('hash, 'account) Tree.t - ; next_available_token: 'token_id } + { indexes : ('key * int) list + ; depth : int + ; tree : ('hash, 'account) Tree.t + ; next_available_token : 'token_id + } [@@deriving sexp, to_yojson] end @@ -61,14 +63,14 @@ module type S = sig val get_exn : t -> int -> account - val path_exn : t -> int -> [`Left of hash | `Right of hash] list + val path_exn : t -> int -> [ `Left of hash | `Right of hash ] list val set_exn : t -> int -> account -> t val find_index_exn : t -> account_id -> int val add_path : - t -> [`Left of hash | `Right of hash] list -> account_id -> account -> t + t -> [ `Left of hash | `Right of hash ] list -> account_id -> account -> t val iteri : t -> f:(int -> account -> unit) -> unit @@ -79,10 +81,10 @@ module type S = sig val next_available_token : t -> token_id end -let tree {T.tree; _} = tree +let tree { T.tree; _ } = tree let of_hash ~depth ~next_available_token h = - {T.indexes= []; depth; tree= Hash h; next_available_token} + { T.indexes = []; depth; tree = Hash h; next_available_token } module Make (Hash : sig type t [@@deriving equal, sexp, to_yojson, compare] @@ -105,10 +107,10 @@ end) (Account : sig end) : sig include S - with type hash := Hash.t - and type token_id := Token_id.t - and type account_id := Account_id.t - and type account := Account.t + with type hash := Hash.t + and type token_id := Token_id.t + and type account_id := Account_id.t + and type account := Account.t val hash : (Hash.t, Account.t) Tree.t -> Hash.t end = struct @@ -128,11 +130,11 @@ end = struct type index = int [@@deriving sexp, to_yojson] - let depth {T.depth; _} = depth + let depth { T.depth; _ } = depth - let merkle_root {T.tree; _} = hash tree + let merkle_root { T.tree; _ } = hash tree - let next_available_token {T.next_available_token; _} = next_available_token + let next_available_token { T.next_available_token; _ } = next_available_token let add_path depth0 tree0 path0 account = let rec build_tree height p = @@ -178,11 +180,12 @@ end = struct let add_path (t : t) path account_id account = let index = List.foldi path ~init:0 ~f:(fun i acc x -> - match x with `Right _ -> acc + (1 lsl i) | `Left _ -> acc ) + match x with `Right _ -> acc + (1 lsl i) | `Left _ -> acc) in { t with - tree= add_path t.depth t.tree path account - ; indexes= (account_id, index) :: t.indexes } + tree = add_path t.depth t.tree path account + ; indexes = (account_id, index) :: t.indexes + } let iteri (t : t) ~f = let rec go acc i tree ~f = @@ -202,7 +205,7 @@ end = struct let find_index_exn (t : t) aid = List.Assoc.find_exn t.indexes ~equal:Account_id.equal aid - let get_exn ({T.tree; depth; _} as t) idx = + let get_exn ({ T.tree; depth; _ } as t) idx = let rec go i tree = match (i < 0, tree) with | true, Tree.Account acct -> @@ -257,11 +260,12 @@ end = struct in let acct_token = Account.token acct in { t with - tree= go (t.depth - 1) t.tree - ; next_available_token= - Token_id.(max t.next_available_token (next acct_token)) } + tree = go (t.depth - 1) t.tree + ; next_available_token = + Token_id.(max t.next_available_token (next acct_token)) + } - let path_exn {T.tree; depth; _} idx = + let path_exn { T.tree; depth; _ } idx = let rec go acc i tree = if i < 0 then acc else @@ -311,13 +315,13 @@ let%test_module "sparse-ledger-test" = module Account = struct module T = struct - type t = {name: string; favorite_number: int} + type t = { name : string; favorite_number : int } [@@deriving bin_io, equal, sexp, to_yojson] end include T - let key {name; _} = name + let key { name; _ } = name let data_hash t = Md5.digest_string (Binable.to_string (module T) t) @@ -327,7 +331,7 @@ let%test_module "sparse-ledger-test" = let open Quickcheck.Generator.Let_syntax in let%map name = String.quickcheck_generator and favorite_number = Int.quickcheck_generator in - {name; favorite_number} + { name; favorite_number } end module Account_id = struct @@ -342,7 +346,7 @@ let%test_module "sparse-ledger-test" = let indexes max_depth t = let rec go addr d = function | Tree.Account a -> - [(Account.key a, addr)] + [ (Account.key a, addr) ] | Hash _ -> [] | Node (_, l, r) -> @@ -356,11 +360,11 @@ let%test_module "sparse-ledger-test" = | Account a -> Account a | Node (h, l, r) -> ( - match (prune_hash_branches l, prune_hash_branches r) with - | Hash _, Hash _ -> - Hash h - | l, r -> - Node (h, l, r) ) + match (prune_hash_branches l, prune_hash_branches r) with + | Hash _, Hash _ -> + Hash h + | l, r -> + Node (h, l, r) ) in let rec gen depth = if depth = 0 then Account.gen >>| fun a -> Tree.Account a @@ -371,11 +375,11 @@ let%test_module "sparse-ledger-test" = Tree.Node (Hash.merge ~height:(depth - 1) (hash l) (hash r), l, r) in weighted_union - [(1. /. 3., Hash.gen >>| fun h -> Tree.Hash h); (2. /. 3., t)] + [ (1. /. 3., Hash.gen >>| fun h -> Tree.Hash h); (2. /. 3., t) ] in let%bind depth = Int.gen_incl 0 16 in let%map tree = gen depth >>| prune_hash_branches in - {T.tree; depth; indexes= indexes depth tree; next_available_token= ()} + { T.tree; depth; indexes = indexes depth tree; next_available_token = () } let%test_unit "iteri consistent indices with t.indexes" = Quickcheck.test gen ~f:(fun t -> @@ -385,16 +389,15 @@ let%test_module "sparse-ledger-test" = ~message: "Iteri index should be contained in the indexes auxillary \ structure" - ~expect:true (Int.Set.mem indexes i) ) ) + ~expect:true (Int.Set.mem indexes i))) let%test_unit "path_test" = Quickcheck.test gen ~f:(fun t -> - let root = {t with indexes= []; tree= Hash (merkle_root t)} in + let root = { t with indexes = []; tree = Hash (merkle_root t) } in let t' = List.fold t.indexes ~init:root ~f:(fun acc (_, index) -> let account = get_exn t index in - add_path acc (path_exn t index) (Account.key account) account - ) + add_path acc (path_exn t index) (Account.key account) account) in - assert (Tree.equal Hash.equal Account.equal t'.tree t.tree) ) + assert (Tree.equal Hash.equal Account.equal t'.tree t.tree)) end ) diff --git a/src/lib/staged_ledger/diff_creation_log.ml b/src/lib/staged_ledger/diff_creation_log.ml index f3ab2c9adf9..df4afa3286f 100644 --- a/src/lib/staged_ledger/diff_creation_log.ml +++ b/src/lib/staged_ledger/diff_creation_log.ml @@ -15,25 +15,29 @@ end module Summary = struct type resources = - { completed_work: count_and_fee - ; commands: count_and_fee - ; coinbase_work_fees: Currency.Fee.t Staged_ledger_diff.At_most_two.t } + { completed_work : count_and_fee + ; commands : count_and_fee + ; coinbase_work_fees : Currency.Fee.t Staged_ledger_diff.At_most_two.t + } [@@deriving sexp, to_yojson, lens] - type command_constraints = {insufficient_work: int; insufficient_space: int} + type command_constraints = + { insufficient_work : int; insufficient_space : int } [@@deriving sexp, to_yojson, lens] - type completed_work_constraints = {insufficient_fees: int; extra_work: int} + type completed_work_constraints = + { insufficient_fees : int; extra_work : int } [@@deriving sexp, to_yojson, lens] type t = - { partition: [`First | `Second] - ; start_resources: resources - ; available_slots: int - ; required_work_count: int - ; discarded_commands: command_constraints - ; discarded_completed_work: completed_work_constraints - ; end_resources: resources } + { partition : [ `First | `Second ] + ; start_resources : resources + ; available_slots : int + ; required_work_count : int + ; discarded_commands : command_constraints + ; discarded_completed_work : completed_work_constraints + ; end_resources : resources + } [@@deriving sexp, to_yojson, lens] let coinbase_fees @@ -70,19 +74,22 @@ module Summary = struct ~f:(fun cmd -> User_command.fee_exn (cmd.data :> User_command.t)) ) in let coinbase_work_fees = coinbase_fees coinbase in - {completed_work; commands; coinbase_work_fees} + { completed_work; commands; coinbase_work_fees } let init ~(completed_work : Transaction_snark_work.Checked.t Sequence.t) ~(commands : User_command.Valid.t With_status.t Sequence.t) ~(coinbase : Coinbase.Fee_transfer.t Staged_ledger_diff.At_most_two.t) ~partition ~available_slots ~required_work_count = let start_resources = init_resources ~completed_work ~commands ~coinbase in - let discarded_commands = {insufficient_work= 0; insufficient_space= 0} in - let discarded_completed_work = {insufficient_fees= 0; extra_work= 0} in + let discarded_commands = + { insufficient_work = 0; insufficient_space = 0 } + in + let discarded_completed_work = { insufficient_fees = 0; extra_work = 0 } in let end_resources = - { completed_work= (0, Currency.Fee.zero) - ; commands= (0, Currency.Fee.zero) - ; coinbase_work_fees= Staged_ledger_diff.At_most_two.Zero } + { completed_work = (0, Currency.Fee.zero) + ; commands = (0, Currency.Fee.zero) + ; coinbase_work_fees = Staged_ledger_diff.At_most_two.Zero + } in { partition ; available_slots @@ -90,7 +97,8 @@ module Summary = struct ; start_resources ; discarded_completed_work ; discarded_commands - ; end_resources } + ; end_resources + } let end_log t ~(completed_work : Transaction_snark_work.Checked.t Sequence.t) ~(commands : User_command.Valid.t With_status.t Sequence.t) @@ -101,7 +109,7 @@ module Summary = struct let nested_field = top.get t in top.set (nested.set (nested.get nested_field + 1) nested_field) t - let discard_command (why : [> `No_work | `No_space]) t = + let discard_command (why : [> `No_work | `No_space ]) t = match why with | `No_work -> incr discarded_commands command_constraints_insufficient_work t @@ -110,7 +118,7 @@ module Summary = struct | _ -> t - let discard_completed_work (why : [> `Insufficient_fees | `Extra_work]) t = + let discard_completed_work (why : [> `Insufficient_fees | `Extra_work ]) t = match why with | `Insufficient_fees -> incr discarded_completed_work @@ -123,11 +131,17 @@ end module Detail = struct type line = - { reason: - [`No_space | `No_work | `Insufficient_fees | `Extra_work | `Init | `End] - ; commands: count_and_fee - ; completed_work: count_and_fee - ; coinbase: Currency.Fee.t Staged_ledger_diff.At_most_two.t } + { reason : + [ `No_space + | `No_work + | `Insufficient_fees + | `Extra_work + | `Init + | `End ] + ; commands : count_and_fee + ; completed_work : count_and_fee + ; coinbase : Currency.Fee.t Staged_ledger_diff.At_most_two.t + } [@@deriving sexp, to_yojson, lens] type t = line list [@@deriving sexp, to_yojson] @@ -136,39 +150,42 @@ module Detail = struct ~(commands : User_command.Valid.t With_status.t Sequence.t) ~(coinbase : Coinbase.Fee_transfer.t Staged_ledger_diff.At_most_two.t) = let init = Summary.init_resources ~completed_work ~commands ~coinbase in - [ { reason= `Init - ; commands= init.commands - ; completed_work= init.completed_work - ; coinbase= init.coinbase_work_fees } ] - - let discard_command (why : [> `No_work | `No_space]) command = function + [ { reason = `Init + ; commands = init.commands + ; completed_work = init.completed_work + ; coinbase = init.coinbase_work_fees + } + ] + + let discard_command (why : [> `No_work | `No_space ]) command = function | [] -> failwith "Log not initialized" | x :: xs -> let new_line = { x with - reason= why - ; commands= + reason = why + ; commands = ( fst x.commands - 1 - , Currency.Fee.sub (snd x.commands) - (User_command.fee_exn command) - |> Option.value_exn ) } + , Currency.Fee.sub (snd x.commands) (User_command.fee_exn command) + |> Option.value_exn ) + } in new_line :: x :: xs - let discard_completed_work (why : [> `Insufficient_fees | `Extra_work]) + let discard_completed_work (why : [> `Insufficient_fees | `Extra_work ]) completed_work = function | [] -> failwith "Log not initialized" | x :: xs -> let new_line = { x with - reason= why - ; completed_work= + reason = why + ; completed_work = ( fst x.completed_work - 1 , Currency.Fee.sub (snd x.completed_work) (Transaction_snark_work.fee completed_work) - |> Option.value_exn ) } + |> Option.value_exn ) + } in new_line :: x :: xs @@ -177,7 +194,7 @@ module Detail = struct failwith "Log not initialized" | x :: xs -> (*Because coinbase could be updated ooutside of the check_constraints_and_update function*) - {x with reason= `End; coinbase= Summary.coinbase_fees coinbase} + { x with reason = `End; coinbase = Summary.coinbase_fees coinbase } :: x :: xs end @@ -194,8 +211,8 @@ let init ~(completed_work : Transaction_snark_work.Checked.t Sequence.t) ~(coinbase : Coinbase.Fee_transfer.t Staged_ledger_diff.At_most_two.t) ~partition ~available_slots ~required_work_count = let summary = - Summary.init ~completed_work ~commands ~coinbase ~partition - ~available_slots ~required_work_count + Summary.init ~completed_work ~commands ~coinbase ~partition ~available_slots + ~required_work_count in let detailed = Detail.init ~completed_work ~commands ~coinbase in (summary, detailed) diff --git a/src/lib/staged_ledger/pre_diff_info.ml b/src/lib/staged_ledger/pre_diff_info.ml index 57c50cf174e..8da7ba8ad04 100644 --- a/src/lib/staged_ledger/pre_diff_info.ml +++ b/src/lib/staged_ledger/pre_diff_info.ml @@ -77,10 +77,11 @@ module Error = struct end type 't t = - { transactions: 't With_status.t list - ; work: Transaction_snark_work.t list - ; commands_count: int - ; coinbases: Currency.Amount.t list } + { transactions : 't With_status.t list + ; work : Transaction_snark_work.t list + ; commands_count : int + ; coinbases : Currency.Amount.t list + } (*A Coinbase is a single transaction that accommodates the coinbase amount and a fee transfer for the work required to add the coinbase. It also @@ -128,8 +129,8 @@ let create_coinbase let%bind rem_coinbase = underflow_err coinbase_amount amt in let%bind _ = underflow_err rem_coinbase - (Option.value_map ~default:Currency.Amount.zero ft2 ~f:(fun {fee; _} -> - Currency.Amount.of_fee fee )) + (Option.value_map ~default:Currency.Amount.zero ft2 + ~f:(fun { fee; _ } -> Currency.Amount.of_fee fee)) in let%bind cb1 = coinbase_or_error @@ -139,7 +140,7 @@ let create_coinbase Coinbase.create ~amount:rem_coinbase ~receiver ~fee_transfer:ft2 |> coinbase_or_error in - [cb1; cb2] + [ cb1; cb2 ] in match coinbase_parts with | `Zero -> @@ -149,12 +150,12 @@ let create_coinbase Coinbase.create ~amount:coinbase_amount ~receiver ~fee_transfer:x |> coinbase_or_error in - [cb] + [ cb ] | `Two None -> two_parts (Currency.Amount.of_fee constraint_constants.account_creation_fee) None None - | `Two (Some (({Coinbase.Fee_transfer.fee; _} as ft1), ft2)) -> + | `Two (Some (({ Coinbase.Fee_transfer.fee; _ } as ft1), ft2)) -> let%bind amount = let%map fee = Currency.Fee.add constraint_constants.account_creation_fee fee @@ -174,14 +175,14 @@ let create_coinbase two_parts amount (Some ft1) ft2 let sum_fees xs ~f = - with_return (fun {return} -> + with_return (fun { return } -> Ok (List.fold ~init:Currency.Fee.zero xs ~f:(fun acc x -> match Currency.Fee.add acc (f x) with | None -> return (Or_error.error_string "Fee overflow") | Some res -> - res )) ) + res))) let to_staged_ledger_or_error = Result.map_error ~f:(fun error -> Error.Unexpected error) @@ -190,11 +191,12 @@ let fee_remainder (type c) (commands : c With_status.t list) completed_works coinbase_fee ~forget = let open Result.Let_syntax in let%bind budget = - sum_fees commands ~f:(fun {data= t; _} -> User_command.fee_exn (forget t)) + sum_fees commands ~f:(fun { data = t; _ } -> + User_command.fee_exn (forget t)) |> to_staged_ledger_or_error in let%bind work_fee = - sum_fees completed_works ~f:(fun {Transaction_snark_work.fee; _} -> fee) + sum_fees completed_works ~f:(fun { Transaction_snark_work.fee; _ } -> fee) |> to_staged_ledger_or_error in let total_work_fee = @@ -209,22 +211,22 @@ let fee_remainder (type c) (commands : c With_status.t list) completed_works let create_fee_transfers completed_works delta public_key coinbase_fts = let open Result.Let_syntax in let singles = - (if Currency.Fee.(equal zero delta) then [] else [(public_key, delta)]) + (if Currency.Fee.(equal zero delta) then [] else [ (public_key, delta) ]) @ List.filter_map completed_works - ~f:(fun {Transaction_snark_work.fee; prover; _} -> + ~f:(fun { Transaction_snark_work.fee; prover; _ } -> if Currency.Fee.equal fee Currency.Fee.zero then None - else Some (prover, fee) ) + else Some (prover, fee)) in let%bind singles_map = Or_error.try_with (fun () -> Public_key.Compressed.Map.of_alist_reduce singles ~f:(fun f1 f2 -> - Option.value_exn (Currency.Fee.add f1 f2) ) ) + Option.value_exn (Currency.Fee.add f1 f2))) |> to_staged_ledger_or_error in (* deduct the coinbase work fee from the singles_map. It is already part of the coinbase *) Or_error.try_with (fun () -> List.fold coinbase_fts ~init:singles_map - ~f:(fun accum {Coinbase.Fee_transfer.receiver_pk; fee= cb_fee} -> + ~f:(fun accum { Coinbase.Fee_transfer.receiver_pk; fee = cb_fee } -> match Public_key.Compressed.Map.find accum receiver_pk with | None -> accum @@ -232,32 +234,33 @@ let create_fee_transfers completed_works delta public_key coinbase_fts = let new_fee = Option.value_exn (Currency.Fee.sub fee cb_fee) in if Currency.Fee.(new_fee > Currency.Fee.zero) then Public_key.Compressed.Map.update accum receiver_pk ~f:(fun _ -> - new_fee ) - else Public_key.Compressed.Map.remove accum receiver_pk ) + new_fee) + else Public_key.Compressed.Map.remove accum receiver_pk) (* TODO: This creates a weird incentive to have a small public_key *) |> Map.to_alist ~key_order:`Increasing |> List.map ~f:(fun (receiver_pk, fee) -> Fee_transfer.Single.create ~receiver_pk ~fee - ~fee_token:Token_id.default ) + ~fee_token:Token_id.default) |> One_or_two.group_list |> List.map ~f:Fee_transfer.of_singles - |> Or_error.all ) + |> Or_error.all) |> Or_error.join |> to_staged_ledger_or_error module Transaction_data = struct type 'a t = - { commands: 'a With_status.t list - ; coinbases: Coinbase.t list - ; fee_transfers: Fee_transfer.t list } + { commands : 'a With_status.t list + ; coinbases : Coinbase.t list + ; fee_transfers : Fee_transfer.t list + } end -let get_transaction_data (type c) ~constraint_constants coinbase_parts - ~receiver ~coinbase_amount commands completed_works ~(forget : c -> _) = +let get_transaction_data (type c) ~constraint_constants coinbase_parts ~receiver + ~coinbase_amount commands completed_works ~(forget : c -> _) = let open Result.Let_syntax in let%bind coinbases = O1trace.measure "create_coinbase" (fun () -> create_coinbase ~constraint_constants coinbase_parts ~receiver - ~coinbase_amount ) + ~coinbase_amount) in let coinbase_fts = List.concat_map coinbases ~f:(fun cb -> Option.to_list cb.fee_transfer) @@ -266,8 +269,8 @@ let get_transaction_data (type c) ~constraint_constants coinbase_parts sum_fees ~f:Coinbase.Fee_transfer.fee coinbase_fts |> Or_error.ok_exn in let txn_works_others = - List.filter completed_works ~f:(fun {Transaction_snark_work.prover; _} -> - not (Public_key.Compressed.equal receiver prover) ) + List.filter completed_works ~f:(fun { Transaction_snark_work.prover; _ } -> + not (Public_key.Compressed.equal receiver prover)) in let%bind delta = fee_remainder commands txn_works_others coinbase_work_fees ~forget @@ -275,14 +278,16 @@ let get_transaction_data (type c) ~constraint_constants coinbase_parts let%map fee_transfers = create_fee_transfers txn_works_others delta receiver coinbase_fts in - {Transaction_data.commands; coinbases; fee_transfers} + { Transaction_data.commands; coinbases; fee_transfers } let get_individual_info (type c) ~constraint_constants coinbase_parts ~receiver ~coinbase_amount ~internal_command_balances commands completed_works ~(forget : c -> _) = let open Result.Let_syntax in - let%bind {Transaction_data.commands; coinbases= coinbase_parts; fee_transfers} - = + let%bind { Transaction_data.commands + ; coinbases = coinbase_parts + ; fee_transfers + } = get_transaction_data ~constraint_constants coinbase_parts ~receiver ~coinbase_amount commands completed_works ~forget in @@ -302,8 +307,8 @@ let get_individual_info (type c) ~constraint_constants coinbase_parts ~receiver Transaction_status.Coinbase_balance_data.to_balance_data balances in - { With_status.data= cmd - ; status= + { With_status.data = cmd + ; status = Applied (Transaction_status.Auxiliary_data.empty, balances) } | ( Transaction.Fee_transfer _ @@ -313,13 +318,13 @@ let get_individual_info (type c) ~constraint_constants coinbase_parts ~receiver Transaction_status.Fee_transfer_balance_data.to_balance_data balances in - { With_status.data= cmd - ; status= + { With_status.data = cmd + ; status = Applied (Transaction_status.Auxiliary_data.empty, balances) } | _ -> (* Caught by [try_with] above, it doesn't matter what we throw. *) - assert false ) ) + assert false)) |> Result.map_error ~f:(fun _ -> Error.Internal_command_status_mismatch) in let transactions = @@ -327,27 +332,28 @@ let get_individual_info (type c) ~constraint_constants coinbase_parts ~receiver @ internal_commands_with_statuses in { transactions - ; work= completed_works - ; commands_count= List.length commands - ; coinbases= List.map coinbase_parts ~f:(fun Coinbase.{amount; _} -> amount) + ; work = completed_works + ; commands_count = List.length commands + ; coinbases = + List.map coinbase_parts ~f:(fun Coinbase.{ amount; _ } -> amount) } let generate_statuses (type c) ~constraint_constants coinbase_parts ~receiver ~coinbase_amount commands completed_works ~(forget : c -> _) ~generate_status = let open Result.Let_syntax in - let%bind {Transaction_data.commands; coinbases; fee_transfers} = + let%bind { Transaction_data.commands; coinbases; fee_transfers } = get_transaction_data ~constraint_constants coinbase_parts ~receiver ~coinbase_amount commands completed_works ~forget in let%bind transactions = Or_error.try_with (fun () -> List.map commands ~f:(fun cmd -> - { With_status.data= cmd.With_status.data - ; status= + { With_status.data = cmd.With_status.data + ; status = Or_error.ok_exn - (generate_status (Transaction.Command (forget cmd.data))) } - ) ) + (generate_status (Transaction.Command (forget cmd.data))) + })) |> Result.map_error ~f:(fun err -> Error.Unexpected err) in let%map internal_command_balances = @@ -360,7 +366,7 @@ let generate_statuses (type c) ~constraint_constants coinbase_parts ~receiver let open Transaction_status in Internal_command_balance_data.Coinbase (Coinbase_balance_data.of_balance_data_exn - (balance_data status)) ) + (balance_data status))) in let fee_transfers = List.map fee_transfers ~f:(fun t -> @@ -370,9 +376,9 @@ let generate_statuses (type c) ~constraint_constants coinbase_parts ~receiver let open Transaction_status in Internal_command_balance_data.Fee_transfer (Fee_transfer_balance_data.of_balance_data_exn - (balance_data status)) ) + (balance_data status))) in - coinbases @ fee_transfers ) + coinbases @ fee_transfers) |> Result.map_error ~f:(fun err -> Error.Unexpected err) in (transactions, internal_command_balances) @@ -383,7 +389,7 @@ let check_coinbase (diff : _ Pre_diff_two.t * _ Pre_diff_one.t option) = match ( (fst diff).coinbase , Option.value_map ~default:At_most_one.Zero (snd diff) ~f:(fun d -> - d.coinbase ) ) + d.coinbase) ) with | Zero, Zero | Zero, One _ | One _, Zero | Two _, Zero -> Ok () @@ -403,13 +409,7 @@ let compute_statuses (type c) let get_statuses_pre_diff_with_at_most_two (t1 : (_, c With_status.t) Pre_diff_two.t) = let coinbase_parts = - match t1.coinbase with - | Zero -> - `Zero - | One x -> - `One x - | Two x -> - `Two x + match t1.coinbase with Zero -> `Zero | One x -> `One x | Two x -> `Two x in let%map commands, internal_command_balances = generate_statuses ~constraint_constants ~generate_status coinbase_parts @@ -417,9 +417,10 @@ let compute_statuses (type c) ~coinbase_amount ~forget in ( { commands - ; completed_works= t1.completed_works - ; coinbase= t1.coinbase - ; internal_command_balances } + ; completed_works = t1.completed_works + ; coinbase = t1.coinbase + ; internal_command_balances + } : _ Pre_diff_two.t ) in let get_statuses_pre_diff_with_at_most_one @@ -434,9 +435,10 @@ let compute_statuses (type c) in Some ( { commands - ; completed_works= t2.completed_works - ; coinbase= t2.coinbase - ; internal_command_balances } + ; completed_works = t2.completed_works + ; coinbase = t2.coinbase + ; internal_command_balances + } : _ Pre_diff_one.t ) in let%bind p1 = get_statuses_pre_diff_with_at_most_two (fst diff) in @@ -463,31 +465,23 @@ let get' (type c) constraint_constants.coinbase_amount))) ~f:(fun x -> Ok x) in - let apply_pre_diff_with_at_most_two - (t1 : (_, c With_status.t) Pre_diff_two.t) = + let apply_pre_diff_with_at_most_two (t1 : (_, c With_status.t) Pre_diff_two.t) + = let coinbase_parts = - match t1.coinbase with - | Zero -> - `Zero - | One x -> - `One x - | Two x -> - `Two x + match t1.coinbase with Zero -> `Zero | One x -> `One x | Two x -> `Two x in get_individual_info coinbase_parts ~receiver:coinbase_receiver t1.commands - t1.completed_works - ~internal_command_balances:t1.internal_command_balances ~coinbase_amount - ~forget + t1.completed_works ~internal_command_balances:t1.internal_command_balances + ~coinbase_amount ~forget in - let apply_pre_diff_with_at_most_one - (t2 : (_, c With_status.t) Pre_diff_one.t) = + let apply_pre_diff_with_at_most_one (t2 : (_, c With_status.t) Pre_diff_one.t) + = let coinbase_added = match t2.coinbase with Zero -> `Zero | One x -> `One x in get_individual_info coinbase_added ~receiver:coinbase_receiver t2.commands - t2.completed_works - ~internal_command_balances:t2.internal_command_balances ~coinbase_amount - ~forget + t2.completed_works ~internal_command_balances:t2.internal_command_balances + ~coinbase_amount ~forget in let%bind () = check_coinbase diff in let%bind p1 = @@ -498,7 +492,7 @@ let get' (type c) ~f:(fun d -> apply_pre_diff_with_at_most_one ~constraint_constants d) (snd diff) ~default: - (Ok {transactions= []; work= []; commands_count= 0; coinbases= []}) + (Ok { transactions = []; work = []; commands_count = 0; coinbases = [] }) in ( p1.transactions @ p2.transactions , p1.work @ p2.work @@ -521,8 +515,8 @@ let get ~check ~constraint_constants ~coinbase_receiver ~supercharge_coinbase t (Staged_ledger_diff.With_valid_signatures.coinbase ~constraint_constants ~supercharge_coinbase diff) -let get_unchecked ~constraint_constants ~coinbase_receiver - ~supercharge_coinbase (t : With_valid_signatures_and_proofs.t) = +let get_unchecked ~constraint_constants ~coinbase_receiver ~supercharge_coinbase + (t : With_valid_signatures_and_proofs.t) = let t = forget_proof_checks t in get' ~constraint_constants ~diff:t.diff ~coinbase_receiver ~forget:User_command.forget_check @@ -537,7 +531,7 @@ let get_transactions ~constraint_constants ~coinbase_receiver get' ~constraint_constants ~diff:sl_diff.diff ~coinbase_receiver ~forget:Fn.id ~coinbase_amount: - (Staged_ledger_diff.coinbase ~constraint_constants - ~supercharge_coinbase sl_diff) + (Staged_ledger_diff.coinbase ~constraint_constants ~supercharge_coinbase + sl_diff) in transactions diff --git a/src/lib/staged_ledger/staged_ledger.ml b/src/lib/staged_ledger/staged_ledger.ml index 2c2c0015d9b..33357ba2cf3 100644 --- a/src/lib/staged_ledger/staged_ledger.ml +++ b/src/lib/staged_ledger/staged_ledger.ml @@ -1,5 +1,4 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] (* Only show stdout for failed inline tests. *) open Inline_test_quiet_logs @@ -59,7 +58,7 @@ module T = struct ( s , Transaction_snark.Statement.hash s , Yojson.Safe.to_string - @@ Public_key.Compressed.to_yojson m.prover ) )) + @@ Public_key.Compressed.to_yojson m.prover ))) | Insufficient_work str -> str | Mismatched_statuses (transaction, status) -> @@ -90,7 +89,7 @@ module T = struct let statements () = `List (List.map proofs ~f:(fun (_, s, _) -> - Transaction_snark.Statement.to_yojson s )) + Transaction_snark.Statement.to_yojson s)) in let log_error err_str ~metadata = [%log warn] @@ -100,7 +99,8 @@ module T = struct ; ( "sok_messages" , `List (List.map proofs ~f:(fun (_, _, m) -> Sok_message.to_yojson m)) - ) ] + ) + ] @ metadata ) "Invalid transaction snark for statement $statement: $error" ; Deferred.return (Or_error.error_string err_str) @@ -110,7 +110,7 @@ module T = struct not (Transaction_snark.Statement.equal (Ledger_proof.statement proof) - statement) ) + statement)) then log_error "Statement and proof do not match" ~metadata: @@ -118,7 +118,8 @@ module T = struct , `List (List.map proofs ~f:(fun (p, _, _) -> Transaction_snark.Statement.to_yojson - (Ledger_proof.statement p) )) ) ] + (Ledger_proof.statement p))) ) + ] else let start = Time.now () in match%map @@ -132,8 +133,9 @@ module T = struct [ ( "work_id" , `List (List.map proofs ~f:(fun (_, s, _) -> - `Int (Transaction_snark.Statement.hash s) )) ) - ; ("time", `Float time_ms) ] + `Int (Transaction_snark.Statement.hash s))) ) + ; ("time", `Float time_ms) + ] "Verification in apply_diff for work $work_id took $time ms" ; Ok b | Error e -> @@ -142,24 +144,25 @@ module T = struct [ ( "statement" , `List (List.map proofs ~f:(fun (_, s, _) -> - Transaction_snark.Statement.to_yojson s )) ) - ; ("error", Error_json.error_to_yojson e) ] + Transaction_snark.Statement.to_yojson s)) ) + ; ("error", Error_json.error_to_yojson e) + ] "Verifier error when checking transaction snark for statement \ $statement: $error" ; Error e let map_opt xs ~f = - with_return (fun {return} -> + with_return (fun { return } -> Some (List.map xs ~f:(fun x -> - match f x with Some y -> y | None -> return None )) ) + match f x with Some y -> y | None -> return None))) let verify ~logger ~verifier job_msg_proofs = let open Deferred.Let_syntax in match map_opt job_msg_proofs ~f:(fun (job, msg, proof) -> Option.map (Scan_state.statement_of_job job) ~f:(fun s -> - (proof, s, msg) ) ) + (proof, s, msg))) with | None -> Deferred.return @@ -187,9 +190,9 @@ module T = struct end module Statement_scanner_proof_verifier = struct - type t = {logger: Logger.t; verifier: Verifier.t} + type t = { logger : Logger.t; verifier : Verifier.t } - let verify ~verifier:{logger; verifier} ts = + let verify ~verifier:{ logger; verifier } ts = verify_proofs ~logger ~verifier (List.map ts ~f:(fun (p, m) -> (p, Ledger_proof.statement p, m))) end @@ -200,20 +203,22 @@ module T = struct (Statement_scanner_proof_verifier) type t = - { scan_state: Scan_state.t - ; ledger: + { scan_state : Scan_state.t + ; ledger : ((* Invariant: this is the ledger after having applied all the - transactions in the above state. *) - Ledger.attached_mask[@sexp.opaque]) - ; constraint_constants: Genesis_constants.Constraint_constants.t - ; pending_coinbase_collection: Pending_coinbase.t } + transactions in the above state. *) + Ledger.attached_mask + [@sexp.opaque]) + ; constraint_constants : Genesis_constants.Constraint_constants.t + ; pending_coinbase_collection : Pending_coinbase.t + } [@@deriving sexp] let proof_txns_with_state_hashes t = Scan_state.latest_ledger_proof t.scan_state |> Option.bind ~f:(Fn.compose Non_empty_list.of_list_opt snd) - let scan_state {scan_state; _} = scan_state + let scan_state { scan_state; _ } = scan_state let all_work_pairs t ~(get_state : State_hash.t -> Mina_state.Protocol_state.value Or_error.t) @@ -223,11 +228,11 @@ module T = struct let all_work_statements_exn t = Scan_state.all_work_statements_exn t.scan_state - let pending_coinbase_collection {pending_coinbase_collection; _} = + let pending_coinbase_collection { pending_coinbase_collection; _ } = pending_coinbase_collection let get_target ((proof, _), _) = - let {Transaction_snark.Statement.target; _} = + let { Transaction_snark.Statement.target; _ } = Ledger_proof.statement proof in target @@ -238,7 +243,7 @@ module T = struct "Error verifying the parallel scan state after applying the diff." in let next_available_token_begin ((proof, _), _) = - let {Transaction_snark.Statement.next_available_token_after; _} = + let { Transaction_snark.Statement.next_available_token_after; _ } = Ledger_proof.statement proof in next_available_token_after @@ -271,7 +276,7 @@ module T = struct let of_scan_state_and_ledger_unchecked ~ledger ~scan_state ~constraint_constants ~pending_coinbase_collection = - {ledger; scan_state; constraint_constants; pending_coinbase_collection} + { ledger; scan_state; constraint_constants; pending_coinbase_collection } let of_scan_state_and_ledger ~logger ~(constraint_constants : Genesis_constants.Constraint_constants.t) @@ -285,7 +290,7 @@ module T = struct let%bind () = Statement_scanner_with_proofs.check_invariants ~constraint_constants scan_state - ~verifier:{Statement_scanner_proof_verifier.logger; verifier} + ~verifier:{ Statement_scanner_proof_verifier.logger; verifier } ~error_prefix:"Staged_ledger.of_scan_state_and_ledger" ~ledger_hash_end: (Frozen_ledger_hash.of_ledger_hash (Ledger.merkle_root ledger)) @@ -301,7 +306,7 @@ module T = struct ~pending_coinbase_collection = let open Deferred.Or_error.Let_syntax in let t = - {ledger; scan_state; constraint_constants; pending_coinbase_collection} + { ledger; scan_state; constraint_constants; pending_coinbase_collection } in let%bind () = Statement_scanner.check_invariants ~constraint_constants scan_state @@ -347,7 +352,7 @@ module T = struct Or_error.errorf !"Mismatched user command status. Expected: %{sexp: \ Transaction_status.t} Got: %{sexp: Transaction_status.t}" - tx.status computed_status ) + tx.status computed_status) in let%bind () = let staged_ledger_hash = Ledger.merkle_root snarked_ledger in @@ -366,23 +371,27 @@ module T = struct ~pending_coinbase_collection:pending_coinbases let copy - {scan_state; ledger; constraint_constants; pending_coinbase_collection} = + { scan_state; ledger; constraint_constants; pending_coinbase_collection } + = let new_mask = Ledger.Mask.create ~depth:(Ledger.depth ledger) () in { scan_state - ; ledger= Ledger.register_mask ledger new_mask + ; ledger = Ledger.register_mask ledger new_mask ; constraint_constants - ; pending_coinbase_collection } + ; pending_coinbase_collection + } let hash - {scan_state; ledger; constraint_constants= _; pending_coinbase_collection} - : Staged_ledger_hash.t = + { scan_state + ; ledger + ; constraint_constants = _ + ; pending_coinbase_collection + } : Staged_ledger_hash.t = Staged_ledger_hash.of_aux_ledger_and_coinbase_hash (Scan_state.hash scan_state) (Ledger.merkle_root ledger) pending_coinbase_collection - [%%if - call_logger] + [%%if call_logger] let hash t = Mina_debug.Call_logger.record_call "Staged_ledger.hash" ; @@ -390,16 +399,17 @@ module T = struct [%%endif] - let ledger {ledger; _} = ledger + let ledger { ledger; _ } = ledger let create_exn ~constraint_constants ~ledger : t = - { scan_state= Scan_state.empty ~constraint_constants () + { scan_state = Scan_state.empty ~constraint_constants () ; ledger ; constraint_constants - ; pending_coinbase_collection= + ; pending_coinbase_collection = Pending_coinbase.create ~depth:constraint_constants.pending_coinbase_depth () - |> Or_error.ok_exn } + |> Or_error.ok_exn + } let current_ledger_proof t = Option.map @@ -411,17 +421,17 @@ module T = struct ~message:"Cannot replace ledger since merkle_root differs" ~expect:(Ledger.merkle_root t.ledger) (Ledger.merkle_root ledger) ; - {t with ledger} + { t with ledger } let sum_fees xs ~f = - with_return (fun {return} -> + with_return (fun { return } -> Ok (List.fold ~init:Fee.zero xs ~f:(fun acc x -> match Fee.add acc (f x) with | None -> return (Or_error.error_string "Fee overflow") | Some res -> - res )) ) + res))) let working_stack pending_coinbase_collection ~is_new_stack = to_staged_ledger_or_error @@ -439,8 +449,9 @@ module T = struct module Stack_state_with_init_stack = struct type t = - { pc: Transaction_snark.Pending_coinbase_stack_state.t - ; init_stack: Pending_coinbase.Stack.t } + { pc : Transaction_snark.Pending_coinbase_stack_state.t + ; init_stack : Pending_coinbase.Stack.t + } end let coinbase_amount ~supercharge_coinbase @@ -492,21 +503,24 @@ module T = struct let next_available_token_after = Ledger.next_available_token ledger in ( applied_txn , { Transaction_snark.Statement.source - ; target= Ledger.merkle_root ledger |> Frozen_ledger_hash.of_ledger_hash + ; target = Ledger.merkle_root ledger |> Frozen_ledger_hash.of_ledger_hash ; fee_excess ; next_available_token_before ; next_available_token_after ; supply_increase - ; pending_coinbase_stack_state= - {pending_coinbase_stack_state.pc with target= pending_coinbase_target} - ; sok_digest= () } - , { Stack_state_with_init_stack.pc= - {source= pending_coinbase_target; target= pending_coinbase_target} - ; init_stack= new_init_stack } ) + ; pending_coinbase_stack_state = + { pending_coinbase_stack_state.pc with + target = pending_coinbase_target + } + ; sok_digest = () + } + , { Stack_state_with_init_stack.pc = + { source = pending_coinbase_target; target = pending_coinbase_target } + ; init_stack = new_init_stack + } ) let apply_transaction_and_get_witness ~constraint_constants ledger - pending_coinbase_stack_state s status txn_state_view state_and_body_hash - = + pending_coinbase_stack_state s status txn_state_view state_and_body_hash = let account_ids : Transaction.t -> _ = function | Fee_transfer t -> Fee_transfer.receivers t @@ -523,17 +537,15 @@ module T = struct in let ledger_witness = measure "sparse ledger" (fun () -> - Sparse_ledger.of_ledger_subset_exn ledger (account_ids s) ) + Sparse_ledger.of_ledger_subset_exn ledger (account_ids s)) in let r = measure "apply+stmt" (fun () -> apply_transaction_and_get_statement ~constraint_constants ledger - pending_coinbase_stack_state s txn_state_view ) + pending_coinbase_stack_state s txn_state_view) in let open Result.Let_syntax in - let%bind applied_txn, statement, updated_pending_coinbase_stack_state = - r - in + let%bind applied_txn, statement, updated_pending_coinbase_stack_state = r in let%map () = match status with | None -> @@ -547,14 +559,15 @@ module T = struct else Result.fail (Staged_ledger_error.Mismatched_statuses - ({With_status.data= s; status}, got_status)) + ({ With_status.data = s; status }, got_status)) in - ( { Scan_state.Transaction_with_witness.transaction_with_info= applied_txn - ; state_hash= state_and_body_hash - ; state_view= txn_state_view + ( { Scan_state.Transaction_with_witness.transaction_with_info = applied_txn + ; state_hash = state_and_body_hash + ; state_view = txn_state_view ; ledger_witness - ; init_stack= Base pending_coinbase_stack_state.init_stack - ; statement } + ; init_stack = Base pending_coinbase_stack_state.init_stack + ; statement + } , updated_pending_coinbase_stack_state ) let rec partition size = function @@ -572,8 +585,9 @@ module T = struct in let%map res_rev, pending_coinbase_stack_state = let pending_coinbase_stack_state : Stack_state_with_init_stack.t = - { pc= {source= current_stack; target= current_stack_with_state} - ; init_stack= current_stack } + { pc = { source = current_stack; target = current_stack_with_state } + ; init_stack = current_stack + } in let exception Exit of Staged_ledger_error.t in Async.try_with ~extract_exn:true (fun () -> @@ -588,7 +602,7 @@ module T = struct List.find (Transaction.public_keys t.With_status.data) ~f:(fun pk -> Option.is_none - (Signature_lib.Public_key.decompress pk) ) + (Signature_lib.Public_key.decompress pk)) with | None -> () @@ -602,12 +616,12 @@ module T = struct | Ok (res, updated_pending_coinbase_stack_state) -> (res :: acc, updated_pending_coinbase_stack_state) | Error err -> - raise (Exit err) ) ) ) + raise (Exit err)))) |> Deferred.Result.map_error ~f:(function | Exit err -> err | exn -> - raise exn ) + raise exn) in (List.rev res_rev, pending_coinbase_stack_state.pc.target) @@ -624,7 +638,7 @@ module T = struct One_or_two.( to_list (map (zip_exn jobs work.proofs) ~f:(fun (job, proof) -> - (job, message, proof) ))) ) + (job, message, proof))))) in verify jmps ~logger ~verifier @@ -641,7 +655,7 @@ module T = struct let t = d.transaction_with_info |> Ledger.Transaction_applied.transaction in - t :: acc ) + t :: acc) in let total_fee_excess txns = List.fold_until txns ~init:Fee_excess.empty ~finish:Or_error.return @@ -654,7 +668,7 @@ module T = struct | Ok fee_excess -> Continue fee_excess | Error _ as err -> - Stop err ) + Stop err) |> to_staged_ledger_or_error in let open Result.Let_syntax in @@ -674,12 +688,13 @@ module T = struct let%map x = f () in [%log debug] ~metadata: - [("time_elapsed", `Float Core.Time.(Span.to_ms @@ diff (now ()) start))] + [ ("time_elapsed", `Float Core.Time.(Span.to_ms @@ diff (now ()) start)) + ] "%s took $time_elapsed" label ; x - let update_coinbase_stack_and_get_data ~constraint_constants scan_state - ledger pending_coinbase_collection transactions current_state_view + let update_coinbase_stack_and_get_data ~constraint_constants scan_state ledger + pending_coinbase_collection transactions current_state_view state_and_body_hash = let open Deferred.Result.Let_syntax in let coinbase_exists txns = @@ -689,18 +704,18 @@ module T = struct | Transaction.Coinbase _ -> Stop true | _ -> - Continue acc ) + Continue acc) ~finish:Fn.id in - let {Scan_state.Space_partition.first= slots, _; second} = + let { Scan_state.Space_partition.first = slots, _; second } = Scan_state.partition_if_overflowing scan_state in if List.length transactions > 0 then match second with | None -> (*Single partition: - 1.Check if a new stack is required and get a working stack [working_stack] - 2.create data for enqueuing onto the scan state *) + 1.Check if a new stack is required and get a working stack [working_stack] + 2.create data for enqueuing onto the scan state *) let is_new_stack = Scan_state.next_on_new_tree scan_state in let%bind working_stack = working_stack pending_coinbase_collection ~is_new_stack @@ -716,12 +731,12 @@ module T = struct , `Update_one updated_stack ) | Some _ -> (*Two partition: - Assumption: Only one of the partition will have coinbase transaction(s)in it. - 1. Get the latest stack for coinbase in the first set of transactions - 2. get the first set of scan_state data[data1] - 3. get a new stack for the second partion because the second set of transactions would start from the begining of the next tree in the scan_state - 4. Initialize the new stack with the state from the first stack - 5. get the second set of scan_state data[data2]*) + Assumption: Only one of the partition will have coinbase transaction(s)in it. + 1. Get the latest stack for coinbase in the first set of transactions + 2. get the first set of scan_state data[data1] + 3. get a new stack for the second partion because the second set of transactions would start from the begining of the next tree in the scan_state + 4. Initialize the new stack with the state from the first stack + 5. get the second set of scan_state data[data2]*) let txns_for_partition1 = List.take transactions slots in let coinbase_in_first_partition = coinbase_exists txns_for_partition1 @@ -754,11 +769,11 @@ module T = struct (*updated_stack2 does not have coinbase and but has the state from the previous stack*) | true, false -> (*updated_stack1 has some new coinbase but parition 2 has no - data and so we have only one stack to update*) + data and so we have only one stack to update*) (Update_one, `Update_one updated_stack1) | false, true -> (*updated_stack1 just has the new state. [updated stack2] might have coinbase, definitely has some - data and therefore will have a non-dummy state.*) + data and therefore will have a non-dummy state.*) ( Update_two_coinbase_in_second , `Update_two (updated_stack1, updated_stack2) ) | false, false -> @@ -768,8 +783,7 @@ module T = struct (false, data1 @ data2, pending_coinbase_action, stack_update) else Deferred.return - (Ok - (false, [], Pending_coinbase.Update.Action.Update_none, `Update_none)) + (Ok (false, [], Pending_coinbase.Update.Action.Update_none, `Update_none)) (*update the pending_coinbase tree with the updated/new stack and delete the oldest stack if a proof was emitted*) let update_pending_coinbase_collection ~depth pending_coinbase_collection @@ -788,8 +802,8 @@ module T = struct (Ledger_proof.statement proof).pending_coinbase_stack_state.target in let%map () = - if Pending_coinbase.Stack.equal oldest_stack ledger_proof_stack - then Ok () + if Pending_coinbase.Stack.equal oldest_stack ledger_proof_stack then + Ok () else Error (Staged_ledger_error.Unexpected @@ -823,9 +837,9 @@ module T = struct let coinbase_for_blockchain_snark = function | [] -> Ok Currency.Amount.zero - | [amount] -> + | [ amount ] -> Ok amount - | [amount1; _] -> + | [ amount1; _ ] -> Ok amount1 | _ -> Error @@ -850,13 +864,11 @@ module T = struct time ~logger "update_coinbase_stack_start_time" (fun () -> update_coinbase_stack_and_get_data ~constraint_constants t.scan_state new_ledger t.pending_coinbase_collection transactions - current_state_view state_and_body_hash ) + current_state_view state_and_body_hash) in let slots = List.length data in let work_count = List.length works in - let required_pairs = - Scan_state.work_statements_for_new_diff t.scan_state - in + let required_pairs = Scan_state.work_statements_for_new_diff t.scan_state in let%bind () = time ~logger "sufficient work check" (fun () -> let required = List.length required_pairs in @@ -872,7 +884,7 @@ module T = struct !"Insufficient number of transaction snark work (slots \ occupying: %d) required %d, got %d" slots required work_count))) - else Deferred.return (Ok ()) ) + else Deferred.return (Ok ())) in let%bind () = Deferred.return (check_zero_fee_excess t.scan_state data) in let%bind res_opt, scan_state' = @@ -885,8 +897,9 @@ module T = struct let data_json = `List (List.map data - ~f:(fun {Scan_state.Transaction_with_witness.statement; _} - -> Transaction_snark.Statement.to_yojson statement )) + ~f:(fun + { Scan_state.Transaction_with_witness.statement; _ } + -> Transaction_snark.Statement.to_yojson statement)) in [%log error] ~metadata: @@ -894,10 +907,11 @@ module T = struct , `String (Scan_state.snark_job_list_json t.scan_state) ) ; ("data", data_json) ; ("error", Error_json.error_to_yojson e) - ; ("prefix", `String log_prefix) ] + ; ("prefix", `String log_prefix) + ] !"$prefix: Unexpected error when applying diff data $data to \ - the scan_state $scan_state: $error" ) ; - Deferred.return (to_staged_ledger_or_error r) ) + the scan_state $scan_state: $error") ; + Deferred.return (to_staged_ledger_or_error r)) in let%bind updated_pending_coinbase_collection' = time ~logger "update_pending_coinbase_collection" (fun () -> @@ -905,7 +919,7 @@ module T = struct ~depth:t.constraint_constants.pending_coinbase_depth t.pending_coinbase_collection stack_update ~is_new_stack ~ledger_proof:res_opt - |> Deferred.return ) + |> Deferred.return) in let%bind coinbase_amount = coinbase_for_blockchain_snark coinbases |> Deferred.return @@ -922,7 +936,7 @@ module T = struct (Frozen_ledger_hash.of_ledger_hash (Ledger.merkle_root new_ledger)) scan_state' - >>| to_staged_ledger_or_error) ) + >>| to_staged_ledger_or_error)) in [%log debug] ~metadata: @@ -931,32 +945,35 @@ module T = struct ; ("spots_available", `Int spots_available) ; ("proof_bundles_waiting", `Int proofs_waiting) ; ("work_count", `Int (List.length works)) - ; ("prefix", `String log_prefix) ] + ; ("prefix", `String log_prefix) + ] "$prefix: apply_diff block info: No of transactions \ included:$user_command_count\n\ \ Coinbase parts:$coinbase_count Spots\n\ \ available:$spots_available Pending work in the \ scan-state:$proof_bundles_waiting Work included:$work_count" ; let new_staged_ledger = - { scan_state= scan_state' - ; ledger= new_ledger - ; constraint_constants= t.constraint_constants - ; pending_coinbase_collection= updated_pending_coinbase_collection' } + { scan_state = scan_state' + ; ledger = new_ledger + ; constraint_constants = t.constraint_constants + ; pending_coinbase_collection = updated_pending_coinbase_collection' + } in ( `Hash_after_applying (hash new_staged_ledger) , `Ledger_proof res_opt , `Staged_ledger new_staged_ledger , `Pending_coinbase_update ( is_new_stack - , { Pending_coinbase.Update.Poly.action= stack_update_in_snark - ; coinbase_amount } ) ) + , { Pending_coinbase.Update.Poly.action = stack_update_in_snark + ; coinbase_amount + } ) ) let update_metrics (t : t) (witness : Staged_ledger_diff.t) = let open Or_error.Let_syntax in let commands = Staged_ledger_diff.commands witness in let work = Staged_ledger_diff.completed_works witness in let%bind total_txn_fee = - sum_fees commands ~f:(fun {data= cmd; _} -> User_command.fee_exn cmd) + sum_fees commands ~f:(fun { data = cmd; _ } -> User_command.fee_exn cmd) in let%bind total_snark_fee = sum_fees work ~f:Transaction_snark_work.fee in let%bind () = Scan_state.update_metrics t.scan_state in @@ -970,15 +987,13 @@ module T = struct (Float.of_int @@ List.length work) ; Gauge.set Scan_state_metrics.snark_work_required (Float.of_int - (List.length (Scan_state.all_work_statements_exn t.scan_state))) - ) + (List.length (Scan_state.all_work_statements_exn t.scan_state)))) - let forget_prediff_info - ((a : Transaction.Valid.t With_status.t list), b, c, d) = + let forget_prediff_info ((a : Transaction.Valid.t With_status.t list), b, c, d) + = ((a :> Transaction.t With_status.t list), b, c, d) - [%%if - feature_snapps] + [%%if feature_snapps] let check_commands ledger ~verifier (cs : User_command.t list) = match @@ -986,8 +1001,7 @@ module T = struct List.map cs ~f: (let open Ledger in - User_command.to_verifiable_exn ~ledger ~get ~location_of_account) - ) + User_command.to_verifiable_exn ~ledger ~get ~location_of_account)) with | Error e -> Deferred.return (Error e) @@ -1005,17 +1019,17 @@ module T = struct | `Valid_assuming _ -> Error (Verifier.Failure.Verification_failed - (Error.of_string "batch verification failed")) )) + (Error.of_string "batch verification failed")))) [%%else] (* imeckler: added this version because the call to the verifier was - causing super catchup to proceed more slowly than it could have otherwise. + causing super catchup to proceed more slowly than it could have otherwise. - The reason is as follows: catchup would have, say 100 blocks in the "to verify" - queue and 20 in the "already verified, to apply" queue. Those 20 would be - processed very slowly because each one would have to call the verifier, which - the other queue was trying to call as well. *) + The reason is as follows: catchup would have, say 100 blocks in the "to verify" + queue and 20 in the "already verified, to apply" queue. Those 20 would be + processed very slowly because each one would have to call the verifier, which + the other queue was trying to call as well. *) let check_commands _ledger ~verifier:_ (cs : User_command.t list) : (User_command.Valid.t list, _) result Deferred.Or_error.t = Result.all @@ -1025,13 +1039,13 @@ module T = struct (Verifier.Failure.Verification_failed (Error.of_string "check_commands: snapp commands disabled")) | Signed_command c -> ( - match Signed_command.check c with - | Some c -> - Ok (User_command.Signed_command c) - | None -> - Error - (Verifier.Failure.Verification_failed - (Error.of_string "signature failed to verify")) ) )) + match Signed_command.check c with + | Some c -> + Ok (User_command.Signed_command c) + | None -> + Error + (Verifier.Failure.Verification_failed + (Error.of_string "signature failed to verify")) ))) |> Deferred.Or_error.return [%%endif] @@ -1047,7 +1061,7 @@ module T = struct | Some `All | Some `Proofs -> return () | None -> - check_completed_works ~logger ~verifier t.scan_state work ) + check_completed_works ~logger ~verifier t.scan_state work) in let%bind prediff = Pre_diff_info.get witness ~constraint_constants ~coinbase_receiver @@ -1056,13 +1070,13 @@ module T = struct |> Deferred.map ~f: (Result.map_error ~f:(fun error -> - Staged_ledger_error.Pre_diff error )) + Staged_ledger_error.Pre_diff error)) in let apply_diff_start_time = Core.Time.now () in let%map ((_, _, `Staged_ledger new_staged_ledger, _) as res) = apply_diff ~skip_verification: - ([%equal: [`All | `Proofs] option] skip_verification (Some `All)) + ([%equal: [ `All | `Proofs ] option] skip_verification (Some `All)) ~constraint_constants t (forget_prediff_info prediff) ~logger ~current_state_view ~state_and_body_hash @@ -1071,17 +1085,16 @@ module T = struct [%log debug] ~metadata: [ ( "time_elapsed" - , `Float - Core.Time.(Span.to_ms @@ diff (now ()) apply_diff_start_time) ) + , `Float Core.Time.(Span.to_ms @@ diff (now ()) apply_diff_start_time) + ) ] "Staged_ledger.apply_diff take $time_elapsed" ; let () = Or_error.iter_error (update_metrics new_staged_ledger witness) ~f:(fun e -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] - !"Error updating metrics after applying staged_ledger diff: $error" - ) + ~metadata:[ ("error", Error_json.error_to_yojson e) ] + !"Error updating metrics after applying staged_ledger diff: $error") in res @@ -1104,42 +1117,45 @@ module T = struct module Resources = struct module Discarded = struct type t = - { commands_rev: User_command.Valid.t With_status.t Sequence.t - ; completed_work: Transaction_snark_work.Checked.t Sequence.t } + { commands_rev : User_command.Valid.t With_status.t Sequence.t + ; completed_work : Transaction_snark_work.Checked.t Sequence.t + } [@@deriving sexp_of] let add_user_command t uc = { t with - commands_rev= Sequence.append t.commands_rev (Sequence.singleton uc) + commands_rev = Sequence.append t.commands_rev (Sequence.singleton uc) } let add_completed_work t cw = { t with - completed_work= - Sequence.append (Sequence.singleton cw) t.completed_work } + completed_work = + Sequence.append (Sequence.singleton cw) t.completed_work + } end type t = - { max_space: int (*max space available currently*) - ; max_jobs: int + { max_space : int (*max space available currently*) + ; max_jobs : int (*Required amount of work for max_space that can be purchased*) - ; commands_rev: User_command.Valid.t With_status.t Sequence.t - ; completed_work_rev: Transaction_snark_work.Checked.t Sequence.t - ; fee_transfers: Fee.t Public_key.Compressed.Map.t - ; add_coinbase: bool - ; coinbase: Coinbase.Fee_transfer.t Staged_ledger_diff.At_most_two.t - ; supercharge_coinbase: bool - ; receiver_pk: Public_key.Compressed.t - ; budget: Fee.t Or_error.t - ; discarded: Discarded.t - ; is_coinbase_receiver_new: bool - ; logger: (Logger.t[@sexp.opaque]) } + ; commands_rev : User_command.Valid.t With_status.t Sequence.t + ; completed_work_rev : Transaction_snark_work.Checked.t Sequence.t + ; fee_transfers : Fee.t Public_key.Compressed.Map.t + ; add_coinbase : bool + ; coinbase : Coinbase.Fee_transfer.t Staged_ledger_diff.At_most_two.t + ; supercharge_coinbase : bool + ; receiver_pk : Public_key.Compressed.t + ; budget : Fee.t Or_error.t + ; discarded : Discarded.t + ; is_coinbase_receiver_new : bool + ; logger : (Logger.t[@sexp.opaque]) + } [@@deriving sexp_of] let coinbase_ft (cw : Transaction_snark_work.t) = (* Here we could not add the fee transfer if the prover=receiver_pk but - retaining it to preserve that information in the - staged_ledger_diff. It will be checked in apply_diff before adding*) + retaining it to preserve that information in the + staged_ledger_diff. It will be checked in apply_diff before adding*) Option.some_if Fee.(cw.fee > Fee.zero) (Coinbase.Fee_transfer.create ~receiver_pk:cw.prover ~fee:cw.fee) @@ -1156,7 +1172,7 @@ module T = struct | Some x, Some y -> if Currency.Fee.compare w.fee x.fee < 0 then (Some w, w1) else if Currency.Fee.compare w.fee y.fee < 0 then (w1, Some w) - else (w1, w2) ) + else (w1, w2)) let coinbase_work ~(constraint_constants : Genesis_constants.Constraint_constants.t) @@ -1169,7 +1185,7 @@ module T = struct Sequence.mem ws' (Transaction_snark_work.statement w) ~equal:Transaction_snark_work.Statement.equal - |> not ) + |> not) in let%bind coinbase_amount = coinbase_amount ~supercharge_coinbase ~constraint_constants @@ -1193,7 +1209,7 @@ module T = struct Staged_ledger_diff.At_most_two.Two (Option.map (coinbase_ft w) ~f:(fun ft -> (ft, None))) in - Some (cb, diff works (Sequence.of_list [stmt w])) + Some (cb, diff works (Sequence.of_list [ stmt w ])) else let cb = Staged_ledger_diff.At_most_two.Two None in Some (cb, works) @@ -1203,22 +1219,22 @@ module T = struct let cb = Staged_ledger_diff.At_most_two.Two (Option.map (coinbase_ft w1) ~f:(fun ft -> - (ft, coinbase_ft w2) )) + (ft, coinbase_ft w2))) (*Why add work without checking if work constraints are - satisfied? If we reach here then it means that we are trying to - fill the last two slots of the tree with coinbase trnasactions - and if there's any work in [works] then that has to be included, - either in the coinbase or as fee transfers that gets paid by - the transaction fees. So having it as coinbase ft will at least - reduce the slots occupied by fee transfers*) + satisfied? If we reach here then it means that we are trying to + fill the last two slots of the tree with coinbase trnasactions + and if there's any work in [works] then that has to be included, + either in the coinbase or as fee transfers that gets paid by + the transaction fees. So having it as coinbase ft will at least + reduce the slots occupied by fee transfers*) in - (cb, diff works (Sequence.of_list [stmt w1; stmt w2])) + (cb, diff works (Sequence.of_list [ stmt w1; stmt w2 ])) else if Amount.(of_fee w1.fee <= coinbase_amount) then let cb = Staged_ledger_diff.At_most_two.Two (Option.map (coinbase_ft w1) ~f:(fun ft -> (ft, None))) in - (cb, diff works (Sequence.of_list [stmt w1])) + (cb, diff works (Sequence.of_list [ stmt w1 ])) else let cb = Staged_ledger_diff.At_most_two.Two None in (cb, works) @@ -1226,10 +1242,10 @@ module T = struct Option.map min1 ~f:(fun w -> if Amount.(of_fee w.fee <= budget) then let cb = Staged_ledger_diff.At_most_two.One (coinbase_ft w) in - (cb, diff works (Sequence.of_list [stmt w])) + (cb, diff works (Sequence.of_list [ stmt w ])) else let cb = Staged_ledger_diff.At_most_two.One None in - (cb, works) ) + (cb, works)) let init_coinbase_and_fee_transfers ~constraint_constants cw_seq ~add_coinbase ~job_count ~slots ~is_coinbase_receiver_new @@ -1240,8 +1256,8 @@ module T = struct let coinbase, rem_cw = match ( add_coinbase - , coinbase_work ~constraint_constants cw_seq - ~is_coinbase_receiver_new ~supercharge_coinbase ) + , coinbase_work ~constraint_constants cw_seq ~is_coinbase_receiver_new + ~supercharge_coinbase ) with | true, Some (ft, rem_cw) -> (ft, rem_cw) @@ -1257,8 +1273,8 @@ module T = struct let rem_cw = cw_unchecked rem_cw in let singles = Sequence.filter_map rem_cw - ~f:(fun {Transaction_snark_work.fee; prover; _} -> - if Fee.equal fee Fee.zero then None else Some (prover, fee) ) + ~f:(fun { Transaction_snark_work.fee; prover; _ } -> + if Fee.equal fee Fee.zero then None else Some (prover, fee)) |> Sequence.to_list_rev in (coinbase, singles) @@ -1285,30 +1301,32 @@ module T = struct in let fee_transfers = Public_key.Compressed.Map.of_alist_reduce singles ~f:(fun f1 f2 -> - Option.value_exn (Fee.add f1 f2) ) + Option.value_exn (Fee.add f1 f2)) in let budget = Or_error.map2 (sum_fees (Sequence.to_list uc_seq) ~f:(fun t -> - User_command.fee_exn (t.data :> User_command.t) )) + User_command.fee_exn (t.data :> User_command.t))) (sum_fees (List.filter ~f:(fun (k, _) -> - not (Public_key.Compressed.equal k receiver_pk) ) + not (Public_key.Compressed.equal k receiver_pk)) singles) ~f:snd) ~f:(fun r c -> option "budget did not suffice" (Fee.sub r c)) |> Or_error.join in let discarded = - {Discarded.completed_work= Sequence.empty; commands_rev= Sequence.empty} + { Discarded.completed_work = Sequence.empty + ; commands_rev = Sequence.empty + } in - { max_space= slots - ; max_jobs= job_count - ; commands_rev= + { max_space = slots + ; max_jobs = job_count + ; commands_rev = uc_seq (*Completed work in reverse order for faster removal of proofs if budget doesn't suffice*) - ; completed_work_rev= seq_rev cw_seq + ; completed_work_rev = seq_rev cw_seq ; fee_transfers ; add_coinbase ; supercharge_coinbase @@ -1317,7 +1335,8 @@ module T = struct ; budget ; discarded ; is_coinbase_receiver_new - ; logger } + ; logger + } let reselect_coinbase_work ~constraint_constants t = let cw_unchecked work = @@ -1328,58 +1347,59 @@ module T = struct | Staged_ledger_diff.At_most_two.Zero -> (t.coinbase, t.completed_work_rev) | One _ -> ( - match - coinbase_work ~constraint_constants t.completed_work_rev - ~is_coinbase_receiver_new:t.is_coinbase_receiver_new - ~supercharge_coinbase:t.supercharge_coinbase - with - | None -> - (One None, t.completed_work_rev) - | Some (ft, rem_cw) -> - (ft, rem_cw) ) + match + coinbase_work ~constraint_constants t.completed_work_rev + ~is_coinbase_receiver_new:t.is_coinbase_receiver_new + ~supercharge_coinbase:t.supercharge_coinbase + with + | None -> + (One None, t.completed_work_rev) + | Some (ft, rem_cw) -> + (ft, rem_cw) ) | Two _ -> ( - match - coinbase_work ~constraint_constants t.completed_work_rev - ~is_two:true ~is_coinbase_receiver_new:t.is_coinbase_receiver_new - ~supercharge_coinbase:t.supercharge_coinbase - with - | None -> - (Two None, t.completed_work_rev) - (*Check for work constraint will be done in [check_constraints_and_update]*) - | Some (fts', rem_cw) -> - (fts', rem_cw) ) + match + coinbase_work ~constraint_constants t.completed_work_rev + ~is_two:true + ~is_coinbase_receiver_new:t.is_coinbase_receiver_new + ~supercharge_coinbase:t.supercharge_coinbase + with + | None -> + (Two None, t.completed_work_rev) + (*Check for work constraint will be done in [check_constraints_and_update]*) + | Some (fts', rem_cw) -> + (fts', rem_cw) ) in let rem_cw = cw_unchecked rem_cw in let singles = Sequence.filter_map rem_cw - ~f:(fun {Transaction_snark_work.fee; prover; _} -> - if Fee.equal fee Fee.zero then None else Some (prover, fee) ) + ~f:(fun { Transaction_snark_work.fee; prover; _ } -> + if Fee.equal fee Fee.zero then None else Some (prover, fee)) |> Sequence.to_list_rev in let fee_transfers = Public_key.Compressed.Map.of_alist_reduce singles ~f:(fun f1 f2 -> - Option.value_exn (Fee.add f1 f2) ) + Option.value_exn (Fee.add f1 f2)) in - {t with coinbase; fee_transfers} + { t with coinbase; fee_transfers } let rebudget t = (*get the correct coinbase and calculate the fee transfers*) let open Or_error.Let_syntax in let payment_fees = sum_fees (Sequence.to_list t.commands_rev) ~f:(fun t -> - User_command.fee_exn (t.data :> User_command.t) ) + User_command.fee_exn (t.data :> User_command.t)) in let prover_fee_others = Public_key.Compressed.Map.fold t.fee_transfers ~init:(Ok Fee.zero) ~f:(fun ~key ~data fees -> let%bind others = fees in if Public_key.Compressed.equal t.receiver_pk key then Ok others - else option "Fee overflow" (Fee.add others data) ) + else option "Fee overflow" (Fee.add others data)) in let revenue = payment_fees in let cost = prover_fee_others in Or_error.map2 revenue cost ~f:(fun r c -> - option "budget did not suffice" (Fee.sub r c) ) + option "budget did not suffice" (Fee.sub r c)) |> Or_error.join let budget_sufficient t = @@ -1448,7 +1468,7 @@ module T = struct let discarded = Discarded.add_completed_work t.discarded w in let new_t = reselect_coinbase_work ~constraint_constants - {t with completed_work_rev= rem_seq; discarded} + { t with completed_work_rev = rem_seq; discarded } in let budget = match t.budget with @@ -1457,7 +1477,7 @@ module T = struct | _ -> rebudget new_t in - ({new_t with budget}, Some w) + ({ new_t with budget }, Some w) let discard_user_command t = let decr_coinbase t = @@ -1468,20 +1488,20 @@ module T = struct ~f:(fun _ -> ft.fee) in let new_t = - {t with coinbase; fee_transfers= updated_fee_transfers} + { t with coinbase; fee_transfers = updated_fee_transfers } in let updated_budget = rebudget new_t in - {new_t with budget= updated_budget} + { new_t with budget = updated_budget } in match t.coinbase with | Staged_ledger_diff.At_most_two.Zero -> t | One None -> - {t with coinbase= Staged_ledger_diff.At_most_two.Zero} + { t with coinbase = Staged_ledger_diff.At_most_two.Zero } | Two None -> - {t with coinbase= One None} + { t with coinbase = One None } | Two (Some (ft, None)) -> - {t with coinbase= One (Some ft)} + { t with coinbase = One (Some ft) } | One (Some ft) -> update_fee_transfers t ft Zero | Two (Some (ft1, Some ft2)) -> @@ -1493,7 +1513,7 @@ module T = struct (decr_coinbase t, None) | Some (uc, rem_seq) -> let discarded = Discarded.add_user_command t.discarded uc in - let new_t = {t with commands_rev= rem_seq; discarded} in + let new_t = { t with commands_rev = rem_seq; discarded } in let budget = match t.budget with | Ok b -> @@ -1502,7 +1522,7 @@ module T = struct | _ -> rebudget new_t in - ({new_t with budget}, Some uc) + ({ new_t with budget }, Some uc) let worked_more ~constraint_constants resources = (*Is the work constraint satisfied even after discarding a work bundle? @@ -1535,16 +1555,17 @@ module T = struct let%map coinbase = incr res.coinbase in let res' = { res with - completed_work_rev= + completed_work_rev = Sequence.append (Sequence.singleton w) res.completed_work_rev - ; discarded= {res.discarded with completed_work= rem_work} - ; coinbase } + ; discarded = { res.discarded with completed_work = rem_work } + ; coinbase + } in reselect_coinbase_work ~constraint_constants res' | None -> let%bind coinbase = incr res.coinbase in - let res = {res with coinbase} in + let res = { res with coinbase } in if work_done res then Ok res else Or_error.error_string @@ -1556,7 +1577,7 @@ module T = struct res'' | Error e -> [%log' error t.logger] "Error when increasing coinbase: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] ; + ~metadata:[ ("error", Error_json.error_to_yojson e) ] ; res in match count with `One -> by_one t | `Two -> by_one (by_one t) @@ -1577,8 +1598,7 @@ module T = struct in check_constraints_and_update ~constraint_constants resources' (Option.value_map work_opt ~default:log ~f:(fun work -> - Diff_creation_log.discard_completed_work `Extra_work work log - )) + Diff_creation_log.discard_completed_work `Extra_work work log)) else if Resources.space_constraint_satisfied resources then (resources, log) else @@ -1588,7 +1608,7 @@ module T = struct (Option.value_map uc_opt ~default:log ~f:(fun uc -> Diff_creation_log.discard_command `No_space (uc.data :> User_command.t) - log )) + log)) else (* insufficient budget; reduce the cost*) let resources', work_opt = @@ -1597,7 +1617,7 @@ module T = struct check_constraints_and_update ~constraint_constants resources' (Option.value_map work_opt ~default:log ~f:(fun work -> Diff_creation_log.discard_completed_work `Insufficient_fees work - log )) + log)) else (* There isn't enough work for the transactions. Discard a transaction and check again *) let resources', uc_opt = Resources.discard_user_command resources in @@ -1605,7 +1625,7 @@ module T = struct (Option.value_map uc_opt ~default:log ~f:(fun uc -> Diff_creation_log.discard_command `No_work (uc.data :> User_command.t) - log )) + log)) let one_prediff ~constraint_constants cw_seq ts_seq ~receiver ~add_coinbase slot_job_count logger ~is_coinbase_receiver_new partition @@ -1613,8 +1633,8 @@ module T = struct O1trace.measure "one_prediff" (fun () -> let init_resources = Resources.init ~constraint_constants ts_seq cw_seq slot_job_count - ~receiver_pk:receiver ~add_coinbase logger - ~is_coinbase_receiver_new ~supercharge_coinbase + ~receiver_pk:receiver ~add_coinbase logger ~is_coinbase_receiver_new + ~supercharge_coinbase in let log = Diff_creation_log.init @@ -1624,8 +1644,7 @@ module T = struct ~available_slots:(fst slot_job_count) ~required_work_count:(snd slot_job_count) in - check_constraints_and_update ~constraint_constants init_resources log - ) + check_constraints_and_update ~constraint_constants init_resources log) let generate ~constraint_constants logger cw_seq ts_seq ~receiver ~is_coinbase_receiver_new ~supercharge_coinbase @@ -1646,22 +1665,24 @@ module T = struct Zero in (* We have to reverse here because we only know they work in THIS order *) - { Staged_ledger_diff.Pre_diff_one.commands= + { Staged_ledger_diff.Pre_diff_one.commands = Sequence.to_list_rev res.commands_rev - ; completed_works= Sequence.to_list_rev res.completed_work_rev - ; coinbase= to_at_most_one res.coinbase - ; internal_command_balances= - (* These will get filled in by the caller. *) [] } ) + ; completed_works = Sequence.to_list_rev res.completed_work_rev + ; coinbase = to_at_most_one res.coinbase + ; internal_command_balances = + (* These will get filled in by the caller. *) [] + }) in let pre_diff_with_two (res : Resources.t) : Staged_ledger_diff.With_valid_signatures_and_proofs .pre_diff_with_at_most_two_coinbase = (* We have to reverse here because we only know they work in THIS order *) - { commands= Sequence.to_list_rev res.commands_rev - ; completed_works= Sequence.to_list_rev res.completed_work_rev - ; coinbase= res.coinbase - ; internal_command_balances= - (* These will get filled in by the caller. *) [] } + { commands = Sequence.to_list_rev res.commands_rev + ; completed_works = Sequence.to_list_rev res.completed_work_rev + ; coinbase = res.coinbase + ; internal_command_balances = + (* These will get filled in by the caller. *) [] + } in let end_log ((res : Resources.t), (log : Diff_creation_log.t)) = Diff_creation_log.end_log log ~completed_work:res.completed_work_rev @@ -1670,9 +1691,9 @@ module T = struct let make_diff res1 = function | Some res2 -> ( (pre_diff_with_two (fst res1), Some (pre_diff_with_one (fst res2))) - , List.map ~f:end_log [res1; res2] ) + , List.map ~f:end_log [ res1; res2 ] ) | None -> - ((pre_diff_with_two (fst res1), None), [end_log res1]) + ((pre_diff_with_two (fst res1), None), [ end_log res1 ]) in let has_no_commands (res : Resources.t) = Sequence.length res.commands_rev = 0 @@ -1690,8 +1711,8 @@ module T = struct | None -> let res, log = one_prediff ~constraint_constants cw_seq ts_seq ~receiver - partitions.first ~add_coinbase:true logger - ~is_coinbase_receiver_new ~supercharge_coinbase `First + partitions.first ~add_coinbase:true logger ~is_coinbase_receiver_new + ~supercharge_coinbase `First in make_diff (res, log) None | Some y -> @@ -1727,8 +1748,8 @@ module T = struct in let try_with_coinbase () = one_prediff ~constraint_constants cw_seq_1 ts_seq ~receiver - partitions.first ~add_coinbase:true logger - ~is_coinbase_receiver_new ~supercharge_coinbase `First + partitions.first ~add_coinbase:true logger ~is_coinbase_receiver_new + ~supercharge_coinbase `First in let res1, res2 = if Sequence.is_empty res.commands_rev then @@ -1802,16 +1823,15 @@ module T = struct match get_completed_work w with | Some cw_checked -> (*If new provers can't pay the account-creation-fee then discard - their work unless their fee is zero in which case their account - won't be created. This is to encourage using an existing accounts - for snarking. - This also imposes new snarkers to have a min fee until one of - their snarks are purchased and their accounts get created*) + their work unless their fee is zero in which case their account + won't be created. This is to encourage using an existing accounts + for snarking. + This also imposes new snarkers to have a min fee until one of + their snarks are purchased and their accounts get created*) if Currency.Fee.(cw_checked.fee = zero) || Currency.Fee.( - cw_checked.fee - >= constraint_constants.account_creation_fee) + cw_checked.fee >= constraint_constants.account_creation_fee) || not (is_new_account cw_checked.prover) then Continue @@ -1827,7 +1847,8 @@ module T = struct ; ("snark_fee", Currency.Fee.to_yojson cw_checked.fee) ; ( "account_creation_fee" , Currency.Fee.to_yojson - constraint_constants.account_creation_fee ) ] + constraint_constants.account_creation_fee ) + ] !"Staged_ledger_diff creation: Snark fee $snark_fee \ insufficient to create the snark worker account" ; Stop (seq, count) ) @@ -1835,11 +1856,11 @@ module T = struct [%log debug] ~metadata: [ ("statement", Transaction_snark_work.Statement.to_yojson w) - ; ( "work_ids" - , Transaction_snark_work.Statement.compact_json w ) ] + ; ("work_ids", Transaction_snark_work.Statement.compact_json w) + ] !"Staged_ledger_diff creation: No snark work found for \ $statement" ; - Stop (seq, count) ) + Stop (seq, count)) ~finish:Fn.id in O1trace.trace_event "found completed work" ; @@ -1851,31 +1872,32 @@ module T = struct O1trace.measure "validate txn" (fun () -> Transaction_validator.apply_transaction ~constraint_constants validating_ledger ~txn_state_view:current_state_view - (Command (txn :> User_command.t)) ) + (Command (txn :> User_command.t))) with | Error e -> [%log error] ~metadata: [ ("user_command", User_command.Valid.to_yojson txn) - ; ("error", Error_json.error_to_yojson e) ] + ; ("error", Error_json.error_to_yojson e) + ] "Staged_ledger_diff creation: Skipping user command: \ $user_command due to error: $error" ; Continue (seq, count) | Ok status -> - let txn_with_status = {With_status.data= txn; status} in + let txn_with_status = { With_status.data = txn; status } in let seq' = Sequence.append (Sequence.singleton txn_with_status) seq in let count' = count + 1 in if count' >= Scan_state.free_space t.scan_state then Stop seq' - else Continue (seq', count') ) + else Continue (seq', count')) ~finish:fst in let diff, log = O1trace.measure "generate diff" (fun () -> generate ~constraint_constants logger completed_works_seq valid_on_this_ledger ~receiver:coinbase_receiver - ~is_coinbase_receiver_new ~supercharge_coinbase partitions ) + ~is_coinbase_receiver_new ~supercharge_coinbase partitions) in let%map diff = (* Fill in the statuses for commands. *) @@ -1884,7 +1906,7 @@ module T = struct fun txn -> O1trace.measure "get txn status" (fun () -> Transaction_validator.apply_transaction ~constraint_constants - status_ledger ~txn_state_view:current_state_view txn ) + status_ledger ~txn_state_view:current_state_view txn) in Pre_diff_info.compute_statuses ~constraint_constants ~diff ~coinbase_amount: @@ -1899,15 +1921,17 @@ module T = struct ~metadata: [ ("proof_count", `Int proof_count) ; ("txn_count", `Int (Sequence.length valid_on_this_ledger)) - ; ("diff_log", Diff_creation_log.summary_list_to_yojson summaries) ] ; + ; ("diff_log", Diff_creation_log.summary_list_to_yojson summaries) + ] ; if log_block_creation then [%log debug] "Detailed diff creation log: $diff_log" ~metadata: [ ( "diff_log" , Diff_creation_log.detail_list_to_yojson - (List.map ~f:List.rev detailed) ) ] ; + (List.map ~f:List.rev detailed) ) + ] ; trace_event "prediffs done" ; - {Staged_ledger_diff.With_valid_signatures_and_proofs.diff} + { Staged_ledger_diff.With_valid_signatures_and_proofs.diff } end include T @@ -1935,13 +1959,14 @@ let%test_module "test" = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) let supercharge_coinbase ~ledger ~winner ~global_slot = (*using staged ledger to confirm coinbase amount is correctly generated*) let epoch_ledger = Sparse_ledger.of_ledger_subset_exn ledger - (List.map [winner] ~f:(fun k -> Account_id.create k Token_id.default)) + (List.map [ winner ] ~f:(fun k -> + Account_id.create k Token_id.default)) in Sl.can_apply_supercharged_coinbase_exn ~winner ~global_slot ~epoch_ledger @@ -2004,7 +2029,8 @@ let%test_module "test" = compile_time_genesis.data |> Mina_state.Protocol_state.body in { (Mina_state.Protocol_state.Body.view state_body) with - global_slot_since_genesis } + global_slot_since_genesis + } let create_and_apply ?(coinbase_receiver = coinbase_receiver) ?(winner = self_pk) sl txns stmt_to_work = @@ -2021,7 +2047,7 @@ let%test_module "test" = ledger and a separate test ledger, after applying the given init_state to both. In the below tests we apply the same commands to the staged and test ledgers, and verify they are in the same state. - *) + *) let async_with_ledgers ledger_init_state (f : Sl.t ref -> Ledger.Mask.Attached.t -> unit Deferred.t) = Ledger.with_ephemeral_ledger ~depth:constraint_constants.ledger_depth @@ -2034,8 +2060,7 @@ let%test_module "test" = in let sl = ref @@ Sl.create_exn ~constraint_constants ~ledger in Async.Thread_safe.block_on_async_exn (fun () -> f sl test_mask) ; - ignore @@ Ledger.Maskable.unregister_mask_exn ~loc:__LOC__ test_mask - ) + ignore @@ Ledger.Maskable.unregister_mask_exn ~loc:__LOC__ test_mask) (* Assert the given staged ledger is in the correct state after applying the first n user commands passed to the given base ledger. Checks the @@ -2089,7 +2114,7 @@ let%test_module "test" = List.iter pks_to_check ~f:(fun pk -> let expect = get_account_exn test_ledger pk in let actual = get_account_exn (Sl.ledger staged_ledger) pk in - [%test_result: Account.t] ~expect actual ) ; + [%test_result: Account.t] ~expect actual) ; (* We only test that the block producer got the coinbase reward here, since calculating the exact correct amount depends on the snark fees and tx fees. *) let producer_balance_with_coinbase = (let open Option.Let_syntax in @@ -2121,7 +2146,7 @@ let%test_module "test" = fun stmts -> let prover_seed = One_or_two.fold stmts ~init:"P" ~f:(fun p stmt -> - p ^ Frozen_ledger_hash.to_bytes stmt.target ) + p ^ Frozen_ledger_hash.to_bytes stmt.target) in Quickcheck.random_value ~seed:(`Deterministic prover_seed) Public_key.Compressed.gen @@ -2130,23 +2155,25 @@ let%test_module "test" = let sok_digest = Sok_message.Digest.default in One_or_two.map stmts ~f:(fun statement -> Ledger_proof.create ~statement ~sok_digest - ~proof:Proof.transaction_dummy ) + ~proof:Proof.transaction_dummy) let stmt_to_work_random_prover (stmts : Transaction_snark_work.Statement.t) : Transaction_snark_work.Checked.t option = let prover = stmt_to_prover stmts in Some - { Transaction_snark_work.Checked.fee= work_fee - ; proofs= proofs stmts - ; prover } + { Transaction_snark_work.Checked.fee = work_fee + ; proofs = proofs stmts + ; prover + } let stmt_to_work_zero_fee ~prover (stmts : Transaction_snark_work.Statement.t) : Transaction_snark_work.Checked.t option = Some - { Transaction_snark_work.Checked.fee= Currency.Fee.zero - ; proofs= proofs stmts - ; prover } + { Transaction_snark_work.Checked.fee = Currency.Fee.zero + ; proofs = proofs stmts + ; prover + } (* Fixed public key for when there is only one snark worker. *) let snark_worker_pk = @@ -2155,7 +2182,7 @@ let%test_module "test" = let stmt_to_work_one_prover (stmts : Transaction_snark_work.Statement.t) : Transaction_snark_work.Checked.t option = - Some {fee= work_fee; proofs= proofs stmts; prover= snark_worker_pk} + Some { fee = work_fee; proofs = proofs stmts; prover = snark_worker_pk } let coinbase_first_prediff = function | Staged_ledger_diff.At_most_two.Zero -> @@ -2163,13 +2190,13 @@ let%test_module "test" = | One None -> (1, []) | One (Some ft) -> - (1, [ft]) + (1, [ ft ]) | Two None -> (2, []) | Two (Some (ft, None)) -> - (2, [ft]) + (2, [ ft ]) | Two (Some (ft1, Some ft2)) -> - (2, [ft1; ft2]) + (2, [ ft1; ft2 ]) let coinbase_second_prediff = function | Staged_ledger_diff.At_most_one.Zero -> @@ -2177,21 +2204,21 @@ let%test_module "test" = | One None -> (1, []) | One (Some ft) -> - (1, [ft]) + (1, [ ft ]) let coinbase_count (sl_diff : Staged_ledger_diff.t) = (coinbase_first_prediff (fst sl_diff.diff).coinbase |> fst) + Option.value_map ~default:0 (snd sl_diff.diff) ~f:(fun d -> - coinbase_second_prediff d.coinbase |> fst ) + coinbase_second_prediff d.coinbase |> fst) let coinbase_cost (sl_diff : Staged_ledger_diff.t) = let coinbase_fts = (coinbase_first_prediff (fst sl_diff.diff).coinbase |> snd) @ Option.value_map ~default:[] (snd sl_diff.diff) ~f:(fun d -> - coinbase_second_prediff d.coinbase |> snd ) + coinbase_second_prediff d.coinbase |> snd) in List.fold coinbase_fts ~init:Currency.Fee.zero ~f:(fun total ft -> - Currency.Fee.add total ft.fee |> Option.value_exn ) + Currency.Fee.add total ft.fee |> Option.value_exn) let () = Async.Scheduler.set_record_backtraces true ; @@ -2208,13 +2235,13 @@ let%test_module "test" = |> Sequence.map ~f:(fun (kp, _, _, _) -> Account_id.create (Public_key.compress kp.public_key) - Token_id.default ) + Token_id.default) |> Sequence.to_list (* Fee excess at top level ledger proofs should always be zero *) let assert_fee_excess : - (Ledger_proof.t * (Transaction.t With_status.t * _) list) option - -> unit = + (Ledger_proof.t * (Transaction.t With_status.t * _) list) option -> unit + = fun proof_opt -> let fee_excess = Option.value_map ~default:Fee_excess.zero proof_opt @@ -2268,10 +2295,9 @@ let%test_module "test" = -> User_command.Valid.t list -> int option list -> Sl.t ref - -> ?expected_proof_count:int option - (*Number of ledger proofs expected*) + -> ?expected_proof_count:int option (*Number of ledger proofs expected*) -> Ledger.Mask.Attached.t - -> [`One_prover | `Many_provers] + -> [ `One_prover | `Many_provers ] -> ( Transaction_snark_work.Statement.t -> Transaction_snark_work.Checked.t option) -> unit Deferred.t = @@ -2299,12 +2325,11 @@ let%test_module "test" = ( match count_opt with | Some _ -> (* There is an edge case where cmds_applied_this_iter = 0, when - there is only enough space for coinbase transactions. *) - assert ( - cmds_applied_this_iter <= Sequence.length cmds_this_iter ) ; + there is only enough space for coinbase transactions. *) + assert (cmds_applied_this_iter <= Sequence.length cmds_this_iter) ; [%test_eq: User_command.t list] (List.map (Staged_ledger_diff.commands diff) - ~f:(fun {With_status.data; _} -> data)) + ~f:(fun { With_status.data; _ } -> data)) ( Sequence.take cmds_this_iter cmds_applied_this_iter |> Sequence.to_list :> User_command.t list ) @@ -2313,10 +2338,10 @@ let%test_module "test" = let coinbase_cost = coinbase_cost diff in assert_ledger test_mask ~coinbase_cost !sl cmds_left cmds_applied_this_iter (init_pks init_state) ; - return (diff, proof_count') ) + return (diff, proof_count')) in (*Should have enough blocks to generate at least expected_proof_count - proofs*) + proofs*) if Option.is_some expected_proof_count then assert (total_ledger_proofs = Option.value_exn expected_proof_count) @@ -2328,7 +2353,7 @@ let%test_module "test" = + 1 (* n-1 extra blocks for n ledger proofs since we are already producing one - proof *) + proof *) let max_blocks_for_coverage n = min_blocks_for_first_snarked_ledger_generic + n - 1 @@ -2349,7 +2374,7 @@ let%test_module "test" = return (ledger_init_state, cmds, List.init iters ~f:(Fn.const None)) (*Same as gen_at_capacity except that the number of iterations[iters] is - the function of [extra_block_count] and is same for all generated values*) + the function of [extra_block_count] and is same for all generated values*) let gen_at_capacity_fixed_blocks extra_block_count : (Ledger.init_state * User_command.Valid.t list * int option list) Quickcheck.Generator.t = @@ -2401,7 +2426,7 @@ let%test_module "test" = async_with_ledgers ledger_init_state (fun sl test_mask -> test_simple ledger_init_state cmds iters sl ~expected_proof_count:(Some expected_proof_count) test_mask - `Many_provers stmt_to_work_random_prover ) ) + `Many_provers stmt_to_work_random_prover)) let%test_unit "Max throughput" = Quickcheck.test gen_at_capacity @@ -2413,28 +2438,29 @@ let%test_module "test" = ~f:(fun (ledger_init_state, cmds, iters) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_simple ledger_init_state cmds iters sl test_mask - `Many_provers stmt_to_work_random_prover ) ) + `Many_provers stmt_to_work_random_prover)) let%test_unit "Be able to include random number of commands" = Quickcheck.test (gen_below_capacity ()) ~trials:20 ~f:(fun (ledger_init_state, cmds, iters) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_simple ledger_init_state cmds iters sl test_mask - `Many_provers stmt_to_work_random_prover ) ) + `Many_provers stmt_to_work_random_prover)) let%test_unit "Be able to include random number of commands (One prover)" = Quickcheck.test (gen_below_capacity ()) ~trials:20 ~f:(fun (ledger_init_state, cmds, iters) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_simple ledger_init_state cmds iters sl test_mask `One_prover - stmt_to_work_one_prover ) ) + stmt_to_work_one_prover)) let%test_unit "Zero proof-fee should not create a fee transfer" = let stmt_to_work_zero_fee stmts = Some - { Transaction_snark_work.Checked.fee= Currency.Fee.zero - ; proofs= proofs stmts - ; prover= snark_worker_pk } + { Transaction_snark_work.Checked.fee = Currency.Fee.zero + ; proofs = proofs stmts + ; prover = snark_worker_pk + } in let expected_proof_count = 3 in Quickcheck.test (gen_at_capacity_fixed_blocks expected_proof_count) @@ -2448,8 +2474,7 @@ let%test_module "test" = assert ( Option.is_none (Mina_base.Ledger.location_of_account test_mask - (Account_id.create snark_worker_pk Token_id.default)) ) ) - ) + (Account_id.create snark_worker_pk Token_id.default)) ))) let compute_statuses ~ledger ~coinbase_amount diff = let generate_status = @@ -2457,7 +2482,7 @@ let%test_module "test" = fun txn -> O1trace.measure "get txn status" (fun () -> Transaction_validator.apply_transaction ~constraint_constants - status_ledger ~txn_state_view:(dummy_state_view ()) txn ) + status_ledger ~txn_state_view:(dummy_state_view ()) txn) in Pre_diff_info.compute_statuses ~constraint_constants ~diff ~coinbase_amount ~coinbase_receiver ~generate_status ~forget:Fn.id @@ -2472,37 +2497,43 @@ let%test_module "test" = let slots, job_count1 = partition.first in match partition.second with | None -> - { diff= + { diff = compute_statuses ~ledger ~coinbase_amount - @@ ( { completed_works= List.take completed_works job_count1 - ; commands= List.take txns slots - ; coinbase= Zero - ; internal_command_balances= [] } - , None ) } + @@ ( { completed_works = List.take completed_works job_count1 + ; commands = List.take txns slots + ; coinbase = Zero + ; internal_command_balances = [] + } + , None ) + } | Some (_, _) -> let txns_in_second_diff = List.drop txns slots in let diff : Staged_ledger_diff.Diff.t = - ( { completed_works= List.take completed_works job_count1 - ; commands= List.take txns slots - ; coinbase= Zero - ; internal_command_balances= [] } + ( { completed_works = List.take completed_works job_count1 + ; commands = List.take txns slots + ; coinbase = Zero + ; internal_command_balances = [] + } , Some - { completed_works= + { completed_works = ( if List.is_empty txns_in_second_diff then [] else List.drop completed_works job_count1 ) - ; commands= txns_in_second_diff - ; coinbase= Zero - ; internal_command_balances= [] } ) + ; commands = txns_in_second_diff + ; coinbase = Zero + ; internal_command_balances = [] + } ) in - {diff= compute_statuses ~ledger ~coinbase_amount diff} + { diff = compute_statuses ~ledger ~coinbase_amount diff } in let empty_diff : Staged_ledger_diff.t = - { diff= - ( { completed_works= [] - ; commands= [] - ; coinbase= Staged_ledger_diff.At_most_two.Zero - ; internal_command_balances= [] } - , None ) } + { diff = + ( { completed_works = [] + ; commands = [] + ; coinbase = Staged_ledger_diff.At_most_two.Zero + ; internal_command_balances = [] + } + , None ) + } in Quickcheck.test (gen_below_capacity ()) ~sexp_of: @@ -2523,20 +2554,21 @@ let%test_module "test" = let work_done = List.map ~f:(fun stmts -> - { Transaction_snark_work.Checked.fee= Fee.zero - ; proofs= proofs stmts - ; prover= snark_worker_pk } ) + { Transaction_snark_work.Checked.fee = Fee.zero + ; proofs = proofs stmts + ; prover = snark_worker_pk + }) work in let cmds_this_iter = cmds_this_iter |> Sequence.to_list |> List.map ~f:(fun cmd -> - { With_status.data= (cmd :> User_command.t) - ; status= + { With_status.data = (cmd :> User_command.t) + ; status = Applied ( Transaction_status.Auxiliary_data.empty - , Transaction_status.Balance_data.empty ) } - ) + , Transaction_status.Balance_data.empty ) + }) in let diff = create_diff_with_non_zero_fee_excess @@ -2569,10 +2601,10 @@ let%test_module "test" = sl := sl' ; (false, diff) in - return (diff', checked || checked') ) + return (diff', checked || checked')) in (*Note: if this fails, try increasing the number of trials*) - assert checked ) ) + assert checked)) let%test_unit "Provers can't pay the account creation fee" = let no_work_included (diff : Staged_ledger_diff.t) = @@ -2581,10 +2613,11 @@ let%test_module "test" = let stmt_to_work stmts = let prover = stmt_to_prover stmts in Some - { Transaction_snark_work.Checked.fee= + { Transaction_snark_work.Checked.fee = Currency.Fee.(sub work_fee (of_int 1)) |> Option.value_exn - ; proofs= proofs stmts - ; prover } + ; proofs = proofs stmts + ; prover + } in Quickcheck.test (gen_below_capacity ()) ~sexp_of: @@ -2596,8 +2629,8 @@ let%test_module "test" = Sequence.singleton ( init_state , List.take cmds (List.length cmds - transaction_capacity) - , [None] ) - else Sequence.empty )) + , [ None ] ) + else Sequence.empty)) ~trials:1 ~f:(fun (ledger_init_state, cmds, iters) -> async_with_ledgers ledger_init_state (fun sl _test_mask -> @@ -2619,7 +2652,7 @@ let%test_module "test" = in (*No proofs were purchased since the fee for the proofs are not sufficient to pay for account creation*) assert (no_work_included diff) ; - Deferred.return (diff, ()) ) ) ) + Deferred.return (diff, ())))) let stmt_to_work_restricted work_list provers (stmts : Transaction_snark_work.Statement.t) : @@ -2634,12 +2667,13 @@ let%test_module "test" = if Option.is_some (List.find work_list ~f:(fun s -> - Transaction_snark_work.Statement.compare s stmts = 0 )) + Transaction_snark_work.Statement.compare s stmts = 0)) then Some - { Transaction_snark_work.Checked.fee= work_fee - ; proofs= proofs stmts - ; prover } + { Transaction_snark_work.Checked.fee = work_fee + ; proofs = proofs stmts + ; prover + } else None (** Like test_simple but with a random number of completed jobs available. @@ -2652,7 +2686,7 @@ let%test_module "test" = -> int list -> Sl.t ref -> Ledger.Mask.Attached.t - -> [`One_prover | `Many_provers] + -> [ `One_prover | `Many_provers ] -> unit Deferred.t = fun init_state cmds cmd_iters proofs_available sl test_mask provers -> let%map proofs_available_left = @@ -2685,7 +2719,7 @@ let%test_module "test" = let coinbase_cost = coinbase_cost diff in assert_ledger test_mask ~coinbase_cost !sl cmds_left cmds_applied_this_iter (init_pks init_state) ; - (diff, List.tl_exn proofs_available_left) ) + (diff, List.tl_exn proofs_available_left)) in assert (List.is_empty proofs_available_left) @@ -2704,7 +2738,7 @@ let%test_module "test" = number of commands) works. I make it twice as many for simplicity and to cover coinbases. *) Quickcheck_lib.map_gens iters ~f:(fun _ -> - Int.gen_incl 0 (transaction_capacity * 2) ) + Int.gen_incl 0 (transaction_capacity * 2)) in return (ledger_init_state, cmds, iters, proofs_available) in @@ -2712,7 +2746,7 @@ let%test_module "test" = ~f:(fun (ledger_init_state, cmds, iters, proofs_available) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_random_number_of_proofs ledger_init_state cmds iters - proofs_available sl test_mask `Many_provers ) ) + proofs_available sl test_mask `Many_provers)) let%test_unit "random no of transactions-random number of proofs-worst \ case provers" = @@ -2723,7 +2757,7 @@ let%test_module "test" = in let%bind proofs_available = Quickcheck_lib.map_gens iters ~f:(fun cmds_opt -> - Int.gen_incl 0 (3 * Option.value_exn cmds_opt) ) + Int.gen_incl 0 (3 * Option.value_exn cmds_opt)) in return (ledger_init_state, cmds, iters, proofs_available) in @@ -2748,8 +2782,8 @@ let%test_module "test" = if iter_count > 2 then Some (mod_iters (all_but_last iters)) else None in - List.filter_map [half_iters; one_less_iters] ~f:Fn.id - |> Sequence.of_list ) + List.filter_map [ half_iters; one_less_iters ] ~f:Fn.id + |> Sequence.of_list) in Quickcheck.test g ~shrinker ~shrink_attempts:`Exhaustive ~sexp_of: @@ -2761,7 +2795,7 @@ let%test_module "test" = ~f:(fun (ledger_init_state, cmds, iters, proofs_available) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_random_number_of_proofs ledger_init_state cmds iters - proofs_available sl test_mask `Many_provers ) ) + proofs_available sl test_mask `Many_provers)) let%test_unit "Random number of commands-random number of proofs-one \ prover)" = @@ -2772,7 +2806,7 @@ let%test_module "test" = in let%bind proofs_available = Quickcheck_lib.map_gens iters ~f:(fun cmds_opt -> - Int.gen_incl 0 (3 * Option.value_exn cmds_opt) ) + Int.gen_incl 0 (3 * Option.value_exn cmds_opt)) in return (ledger_init_state, cmds, iters, proofs_available) in @@ -2780,7 +2814,7 @@ let%test_module "test" = ~f:(fun (ledger_init_state, cmds, iters, proofs_available) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_random_number_of_proofs ledger_init_state cmds iters - proofs_available sl test_mask `One_prover ) ) + proofs_available sl test_mask `One_prover)) let stmt_to_work_random_fee work_list provers (stmts : Transaction_snark_work.Statement.t) : @@ -2794,9 +2828,9 @@ let%test_module "test" = in Option.map (List.find work_list ~f:(fun (s, _) -> - Transaction_snark_work.Statement.compare s stmts = 0 )) + Transaction_snark_work.Statement.compare s stmts = 0)) ~f:(fun (_, fee) -> - {Transaction_snark_work.Checked.fee; proofs= proofs stmts; prover} ) + { Transaction_snark_work.Checked.fee; proofs = proofs stmts; prover }) (** Like test_random_number_of_proofs but with random proof fees. *) @@ -2807,7 +2841,7 @@ let%test_module "test" = -> (int * Fee.t list) list -> Sl.t ref -> Ledger.Mask.Attached.t - -> [`One_prover | `Many_provers] + -> [ `One_prover | `Many_provers ] -> unit Deferred.t = fun _init_state cmds cmd_iters proofs_available sl _test_mask provers -> let%map proofs_available_left = @@ -2831,18 +2865,18 @@ let%test_module "test" = (pre_diff : Staged_ledger_diff.Pre_diff_with_at_most_two_coinbase.t) = List.sort pre_diff.completed_works ~compare:(fun w w' -> - Fee.compare w.fee w'.fee ) + Fee.compare w.fee w'.fee) in let sorted_work_from_diff2 (pre_diff : - Staged_ledger_diff.Pre_diff_with_at_most_one_coinbase.t - option) = + Staged_ledger_diff.Pre_diff_with_at_most_one_coinbase.t option) + = Option.value_map pre_diff ~default:[] ~f:(fun p -> List.sort p.completed_works ~compare:(fun w w' -> - Fee.compare w.fee w'.fee ) ) + Fee.compare w.fee w'.fee)) in let () = - let assert_same_fee {Coinbase.Fee_transfer.fee; _} fee' = + let assert_same_fee { Coinbase.Fee_transfer.fee; _ } fee' = assert (Fee.equal fee fee') in let first_pre_diff, second_pre_diff_opt = diff.diff in @@ -2850,7 +2884,7 @@ let%test_module "test" = ( first_pre_diff.coinbase , Option.value_map second_pre_diff_opt ~default:Staged_ledger_diff.At_most_one.Zero ~f:(fun d -> - d.coinbase ) ) + d.coinbase) ) with | ( Staged_ledger_diff.At_most_two.Zero , Staged_ledger_diff.At_most_one.Zero ) @@ -2862,15 +2896,14 @@ let%test_module "test" = List.hd_exn (sorted_work_from_diff1 first_pre_diff) |> Transaction_snark_work.forget in - assert_same_fee single work.fee ) + assert_same_fee single work.fee) | Zero, One ft_opt -> Option.value_map ft_opt ~default:() ~f:(fun single -> let work = - List.hd_exn - (sorted_work_from_diff2 second_pre_diff_opt) + List.hd_exn (sorted_work_from_diff2 second_pre_diff_opt) |> Transaction_snark_work.forget in - assert_same_fee single work.fee ) + assert_same_fee single work.fee) | Two (Some (ft, ft_opt)), Zero -> let work_done = sorted_work_from_diff1 first_pre_diff in let work = @@ -2882,7 +2915,7 @@ let%test_module "test" = List.hd_exn (List.drop work_done 1) |> Transaction_snark_work.forget in - assert_same_fee single work.fee ) + assert_same_fee single work.fee) | _ -> failwith (sprintf @@ -2890,7 +2923,7 @@ let%test_module "test" = Staged_ledger_diff.t}" diff) in - (diff, List.tl_exn proofs_available_left) ) + (diff, List.tl_exn proofs_available_left)) in assert (List.is_empty proofs_available_left) @@ -2910,7 +2943,7 @@ let%test_module "test" = Quickcheck.Generator.list_with_length number_of_proofs Fee.(gen_incl (of_int 1) (of_int 20)) in - (number_of_proofs, fees) ) + (number_of_proofs, fees)) in return (ledger_init_state, cmds, iters, proofs_available) in @@ -2918,7 +2951,7 @@ let%test_module "test" = ~f:(fun (ledger_init_state, cmds, iters, proofs_available) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_random_proof_fee ledger_init_state cmds iters - proofs_available sl test_mask `Many_provers ) ) + proofs_available sl test_mask `Many_provers)) let%test_unit "Max throughput-random fee" = let g = @@ -2934,7 +2967,7 @@ let%test_module "test" = Quickcheck.Generator.list_with_length number_of_proofs Fee.(gen_incl (of_int 1) (of_int 20)) in - (number_of_proofs, fees) ) + (number_of_proofs, fees)) in return (ledger_init_state, cmds, iters, proofs_available) in @@ -2948,7 +2981,7 @@ let%test_module "test" = ~f:(fun (ledger_init_state, cmds, iters, proofs_available) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_random_proof_fee ledger_init_state cmds iters - proofs_available sl test_mask `Many_provers ) ) + proofs_available sl test_mask `Many_provers)) let check_pending_coinbase ~supercharge_coinbase proof ~sl_before ~sl_after (_state_hash, state_body_hash) pc_update ~is_new_stack = @@ -3005,16 +3038,17 @@ let%test_module "test" = -> Mina_base.Snapp_predicate.Protocol_state.View.t -> Sl.t ref -> Ledger.Mask.Attached.t - -> [`One_prover | `Many_provers] + -> [ `One_prover | `Many_provers ] -> unit Deferred.t = fun init_state cmds cmd_iters proofs_available state_body_hashes current_state_view sl test_mask provers -> let%map proofs_available_left, _state_body_hashes_left = iter_cmds_acc cmds cmd_iters (proofs_available, state_body_hashes) - (fun cmds_left - _count_opt - cmds_this_iter - (proofs_available_left, state_body_hashes) + (fun + cmds_left + _count_opt + cmds_this_iter + (proofs_available_left, state_body_hashes) -> let work_list : Transaction_snark_work.Statement.t list = Sl.Scan_state.all_work_statements_exn !sl.scan_state @@ -3024,8 +3058,7 @@ let%test_module "test" = in let sl_before = !sl in let state_body_hash = List.hd_exn state_body_hashes in - let%map proof, diff, is_new_stack, pc_update, supercharge_coinbase - = + let%map proof, diff, is_new_stack, pc_update, supercharge_coinbase = create_and_apply_with_state_body_hash ~current_state_view ~state_and_body_hash:state_body_hash sl cmds_this_iter (stmt_to_work_restricted @@ -3050,7 +3083,7 @@ let%test_module "test" = cmds_applied_this_iter (init_pks init_state) ; ( diff , (List.tl_exn proofs_available_left, List.tl_exn state_body_hashes) - ) ) + )) in assert (List.is_empty proofs_available_left) @@ -3062,27 +3095,28 @@ let%test_module "test" = in let%bind state_body_hashes = Quickcheck_lib.map_gens iters ~f:(fun _ -> - Quickcheck.Generator.tuple2 State_hash.gen State_body_hash.gen ) + Quickcheck.Generator.tuple2 State_hash.gen State_body_hash.gen) in let%bind proofs_available = Quickcheck_lib.map_gens iters ~f:(fun cmds_opt -> - Int.gen_incl 0 (3 * Option.value_exn cmds_opt) ) + Int.gen_incl 0 (3 * Option.value_exn cmds_opt)) in return (ledger_init_state, cmds, iters, proofs_available, state_body_hashes) in let current_state_view = dummy_state_view () in Quickcheck.test g ~trials:5 - ~f:(fun ( ledger_init_state - , cmds - , iters - , proofs_available - , state_body_hashes ) + ~f:(fun + ( ledger_init_state + , cmds + , iters + , proofs_available + , state_body_hashes ) -> async_with_ledgers ledger_init_state (fun sl test_mask -> test_pending_coinbase ledger_init_state cmds iters proofs_available state_body_hashes current_state_view sl - test_mask prover ) ) + test_mask prover)) let%test_unit "Validate pending coinbase for random number of \ commands-random number of proofs-one prover)" = @@ -3106,8 +3140,7 @@ let%test_module "test" = let balance = Balance.of_int 100_000_000_000 in (*Should fully vest by slot = 7*) let acc = - Account.create_timed account_id balance - ~initial_minimum_balance:balance + Account.create_timed account_id balance ~initial_minimum_balance:balance ~cliff_time:(Mina_numbers.Global_slot.of_int 4) ~cliff_amount:Amount.zero ~vesting_period:(Mina_numbers.Global_slot.of_int 2) @@ -3162,7 +3195,7 @@ let%test_module "test" = (stmt_to_work_zero_fee ~prover:self.public_key) in check_receiver_account !sl block_count ; - return () ) + return ()) let normal_coinbase = constraint_constants.coinbase_amount @@ -3203,12 +3236,13 @@ let%test_module "test" = [| ( keypair_self , Balance.to_amount self.balance , self.nonce - , self.timing ) |] + , self.timing ) + |] ledger_init_state in async_with_ledgers ledger_init_state (fun sl _test_mask -> supercharge_coinbase_test ~self ~delegator:self ~block_count - f_expected_balance sl ) ) + f_expected_balance sl)) let%test_unit "Supercharged coinbase - unlocked account delegating to \ locked account" = @@ -3234,13 +3268,13 @@ let%test_module "test" = ; ( keypair_delegator , Balance.to_amount unlocked_delegator.balance , unlocked_delegator.nonce - , unlocked_delegator.timing ) |] + , unlocked_delegator.timing ) + |] ledger_init_state in async_with_ledgers ledger_init_state (fun sl _test_mask -> supercharge_coinbase_test ~self:locked_self - ~delegator:unlocked_delegator ~block_count f_expected_balance - sl ) ) + ~delegator:unlocked_delegator ~block_count f_expected_balance sl)) let%test_unit "Supercharged coinbase - locked account delegating to \ unlocked account" = @@ -3277,16 +3311,16 @@ let%test_module "test" = ; ( keypair_delegator , Balance.to_amount locked_delegator.balance , locked_delegator.nonce - , locked_delegator.timing ) |] + , locked_delegator.timing ) + |] ledger_init_state in async_with_ledgers ledger_init_state (fun sl _test_mask -> supercharge_coinbase_test ~self:unlocked_self - ~delegator:locked_delegator ~block_count f_expected_balance sl - ) ) + ~delegator:locked_delegator ~block_count f_expected_balance sl)) - let%test_unit "Supercharged coinbase - locked account delegating to \ - locked account" = + let%test_unit "Supercharged coinbase - locked account delegating to locked \ + account" = let keypair_self, locked_self = timed_account 1 in let keypair_delegator, locked_delegator = timed_account 2 in let slots_with_locked_tokens = @@ -3309,11 +3343,11 @@ let%test_module "test" = ; ( keypair_delegator , Balance.to_amount locked_delegator.balance , locked_delegator.nonce - , locked_delegator.timing ) |] + , locked_delegator.timing ) + |] ledger_init_state in async_with_ledgers ledger_init_state (fun sl _test_mask -> supercharge_coinbase_test ~self:locked_self - ~delegator:locked_delegator ~block_count f_expected_balance sl - ) ) + ~delegator:locked_delegator ~block_count f_expected_balance sl)) end ) diff --git a/src/lib/staged_ledger/staged_ledger.mli b/src/lib/staged_ledger/staged_ledger.mli index 358c8e3ebb4..73212eff6a7 100644 --- a/src/lib/staged_ledger/staged_ledger.mli +++ b/src/lib/staged_ledger/staged_ledger.mli @@ -20,7 +20,8 @@ module Scan_state : sig end module Space_partition : sig - type t = {first: int * int; second: (int * int) option} [@@deriving sexp] + type t = { first : int * int; second : (int * int) option } + [@@deriving sexp] end val hash : t -> Staged_ledger_hash.Aux_hash.t @@ -115,7 +116,7 @@ val copy : t -> t val hash : t -> Staged_ledger_hash.t val apply : - ?skip_verification:[`Proofs | `All] + ?skip_verification:[ `Proofs | `All ] -> constraint_constants:Genesis_constants.Constraint_constants.t -> t -> Staged_ledger_diff.t @@ -125,12 +126,12 @@ val apply : -> state_and_body_hash:State_hash.t * State_body_hash.t -> coinbase_receiver:Public_key.Compressed.t -> supercharge_coinbase:bool - -> ( [`Hash_after_applying of Staged_ledger_hash.t] + -> ( [ `Hash_after_applying of Staged_ledger_hash.t ] * [ `Ledger_proof of (Ledger_proof.t * (Transaction.t With_status.t * State_hash.t) list) option ] - * [`Staged_ledger of t] - * [`Pending_coinbase_update of bool * Pending_coinbase.Update.t] + * [ `Staged_ledger of t ] + * [ `Pending_coinbase_update of bool * Pending_coinbase.Update.t ] , Staged_ledger_error.t ) Deferred.Result.t @@ -143,12 +144,12 @@ val apply_diff_unchecked : -> state_and_body_hash:State_hash.t * State_body_hash.t -> coinbase_receiver:Public_key.Compressed.t -> supercharge_coinbase:bool - -> ( [`Hash_after_applying of Staged_ledger_hash.t] + -> ( [ `Hash_after_applying of Staged_ledger_hash.t ] * [ `Ledger_proof of (Ledger_proof.t * (Transaction.t With_status.t * State_hash.t) list) option ] - * [`Staged_ledger of t] - * [`Pending_coinbase_update of bool * Pending_coinbase.Update.t] + * [ `Staged_ledger of t ] + * [ `Pending_coinbase_update of bool * Pending_coinbase.Update.t ] , Staged_ledger_error.t ) Deferred.Result.t @@ -164,8 +165,9 @@ val create_diff : -> logger:Logger.t -> current_state_view:Snapp_predicate.Protocol_state.View.t -> transactions_by_fee:User_command.Valid.t Sequence.t - -> get_completed_work:( Transaction_snark_work.Statement.t - -> Transaction_snark_work.Checked.t option) + -> get_completed_work: + ( Transaction_snark_work.Statement.t + -> Transaction_snark_work.Checked.t option) -> supercharge_coinbase:bool -> ( Staged_ledger_diff.With_valid_signatures_and_proofs.t , Pre_diff_info.Error.t ) @@ -180,7 +182,7 @@ val can_apply_supercharged_coinbase_exn : val statement_exn : constraint_constants:Genesis_constants.Constraint_constants.t -> t - -> [`Non_empty of Transaction_snark.Statement.t | `Empty] Deferred.t + -> [ `Non_empty of Transaction_snark.Statement.t | `Empty ] Deferred.t val of_scan_state_pending_coinbases_and_snarked_ledger : logger:Logger.t diff --git a/src/lib/staged_ledger_diff/staged_ledger_diff.ml b/src/lib/staged_ledger_diff/staged_ledger_diff.ml index aac66d92225..14dd847691f 100644 --- a/src/lib/staged_ledger_diff/staged_ledger_diff.ml +++ b/src/lib/staged_ledger_diff/staged_ledger_diff.ml @@ -22,13 +22,13 @@ module At_most_two = struct match (t, ws) with | Zero, [] -> Ok (One None) - | Zero, [a] -> + | Zero, [ a ] -> Ok (One (Some a)) | One _, [] -> Ok (Two None) - | One _, [a] -> + | One _, [ a ] -> Ok (Two (Some (a, None))) - | One _, [a; a'] -> + | One _, [ a; a' ] -> Ok (Two (Some (a', Some a))) | _ -> Or_error.error_string "Error incrementing coinbase parts" @@ -51,7 +51,7 @@ module At_most_one = struct match (t, ws) with | Zero, [] -> Ok (One None) - | Zero, [a] -> + | Zero, [ a ] -> Ok (One (Some a)) | _ -> Or_error.error_string "Error incrementing coinbase parts" @@ -80,10 +80,10 @@ module Pre_diff_two = struct module V1 = struct type ('a, 'b) t = - { completed_works: 'a list - ; commands: 'b list - ; coinbase: Ft.Stable.V1.t At_most_two.Stable.V1.t - ; internal_command_balances: + { completed_works : 'a list + ; commands : 'b list + ; coinbase : Ft.Stable.V1.t At_most_two.Stable.V1.t + ; internal_command_balances : Transaction_status.Internal_command_balance_data.Stable.V1.t list } [@@deriving compare, sexp, yojson] @@ -91,11 +91,12 @@ module Pre_diff_two = struct end] type ('a, 'b) t = ('a, 'b) Stable.Latest.t = - { completed_works: 'a list - ; commands: 'b list - ; coinbase: Ft.t At_most_two.t - ; internal_command_balances: - Transaction_status.Internal_command_balance_data.t list } + { completed_works : 'a list + ; commands : 'b list + ; coinbase : Ft.t At_most_two.t + ; internal_command_balances : + Transaction_status.Internal_command_balance_data.t list + } [@@deriving compare, sexp, yojson] end @@ -106,10 +107,10 @@ module Pre_diff_one = struct module V1 = struct type ('a, 'b) t = - { completed_works: 'a list - ; commands: 'b list - ; coinbase: Ft.Stable.V1.t At_most_one.Stable.V1.t - ; internal_command_balances: + { completed_works : 'a list + ; commands : 'b list + ; coinbase : Ft.Stable.V1.t At_most_one.Stable.V1.t + ; internal_command_balances : Transaction_status.Internal_command_balance_data.Stable.V1.t list } [@@deriving compare, sexp, yojson] @@ -117,11 +118,12 @@ module Pre_diff_one = struct end] type ('a, 'b) t = ('a, 'b) Stable.Latest.t = - { completed_works: 'a list - ; commands: 'b list - ; coinbase: Ft.t At_most_one.t - ; internal_command_balances: - Transaction_status.Internal_command_balance_data.t list } + { completed_works : 'a list + ; commands : 'b list + ; coinbase : Ft.t At_most_one.t + ; internal_command_balances : + Transaction_status.Internal_command_balance_data.t list + } [@@deriving compare, sexp, yojson] end @@ -186,13 +188,13 @@ module Stable = struct [@@@no_toplevel_latest_type] module V1 = struct - type t = {diff: Diff.Stable.V1.t} [@@deriving compare, sexp, yojson] + type t = { diff : Diff.Stable.V1.t } [@@deriving compare, sexp, yojson] let to_latest = Fn.id end end] -type t = Stable.Latest.t = {diff: Diff.t} +type t = Stable.Latest.t = { diff : Diff.t } [@@deriving compare, sexp, yojson, fields] module With_valid_signatures_and_proofs = struct @@ -213,15 +215,17 @@ module With_valid_signatures_and_proofs = struct * pre_diff_with_at_most_one_coinbase option [@@deriving compare, sexp, to_yojson] - type t = {diff: diff} [@@deriving compare, sexp, to_yojson] + type t = { diff : diff } [@@deriving compare, sexp, to_yojson] let empty_diff : t = - { diff= - ( { completed_works= [] - ; commands= [] - ; coinbase= At_most_two.Zero - ; internal_command_balances= [] } - , None ) } + { diff = + ( { completed_works = [] + ; commands = [] + ; coinbase = At_most_two.Zero + ; internal_command_balances = [] + } + , None ) + } let commands t = (fst t.diff).commands @@ -272,7 +276,7 @@ module With_valid_signatures = struct * pre_diff_with_at_most_one_coinbase option [@@deriving compare, sexp, to_yojson] - type t = {diff: diff} [@@deriving compare, sexp, to_yojson] + type t = { diff : diff } [@@deriving compare, sexp, to_yojson] let coinbase ~(constraint_constants : Genesis_constants.Constraint_constants.t) @@ -302,73 +306,80 @@ let validate_commands (t : t) map (check (List.map cs ~f:With_status.data)) ~f: - (List.map2_exn cs ~f:(fun c data -> {With_status.data; status= c.status})) + (List.map2_exn cs ~f:(fun c data -> + { With_status.data; status = c.status })) in let d1, d2 = t.diff in map (validate - ( d1.commands - @ Option.value_map d2 ~default:[] ~f:(fun d2 -> d2.commands) )) + (d1.commands @ Option.value_map d2 ~default:[] ~f:(fun d2 -> d2.commands))) ~f:(fun commands_all -> let commands1, commands2 = List.split_n commands_all (List.length d1.commands) in let p1 : With_valid_signatures.pre_diff_with_at_most_two_coinbase = - { completed_works= d1.completed_works - ; commands= commands1 - ; coinbase= d1.coinbase - ; internal_command_balances= d1.internal_command_balances } + { completed_works = d1.completed_works + ; commands = commands1 + ; coinbase = d1.coinbase + ; internal_command_balances = d1.internal_command_balances + } in let p2 = Option.value_map ~default:None d2 ~f:(fun d2 -> Some - { Pre_diff_one.completed_works= d2.completed_works - ; commands= commands2 - ; coinbase= d2.coinbase - ; internal_command_balances= d2.internal_command_balances } ) + { Pre_diff_one.completed_works = d2.completed_works + ; commands = commands2 + ; coinbase = d2.coinbase + ; internal_command_balances = d2.internal_command_balances + }) in - ({diff= (p1, p2)} : With_valid_signatures.t) ) + ({ diff = (p1, p2) } : With_valid_signatures.t)) let forget_proof_checks (d : With_valid_signatures_and_proofs.t) : With_valid_signatures.t = let d1 = fst d.diff in let p1 : With_valid_signatures.pre_diff_with_at_most_two_coinbase = - { completed_works= forget_cw d1.completed_works - ; commands= d1.commands - ; coinbase= d1.coinbase - ; internal_command_balances= d1.internal_command_balances } + { completed_works = forget_cw d1.completed_works + ; commands = d1.commands + ; coinbase = d1.coinbase + ; internal_command_balances = d1.internal_command_balances + } in let p2 = - Option.map (snd d.diff) ~f:(fun d2 -> - ( { completed_works= forget_cw d2.completed_works - ; commands= d2.commands - ; coinbase= d2.coinbase - ; internal_command_balances= d2.internal_command_balances } - : With_valid_signatures.pre_diff_with_at_most_one_coinbase ) ) + Option.map (snd d.diff) + ~f:(fun d2 : With_valid_signatures.pre_diff_with_at_most_one_coinbase -> + { completed_works = forget_cw d2.completed_works + ; commands = d2.commands + ; coinbase = d2.coinbase + ; internal_command_balances = d2.internal_command_balances + }) in - {diff= (p1, p2)} + { diff = (p1, p2) } let forget_pre_diff_with_at_most_two (pre_diff : With_valid_signatures_and_proofs.pre_diff_with_at_most_two_coinbase) : Pre_diff_with_at_most_two_coinbase.t = - { completed_works= forget_cw pre_diff.completed_works - ; commands= (pre_diff.commands :> User_command.t With_status.t list) - ; coinbase= pre_diff.coinbase - ; internal_command_balances= pre_diff.internal_command_balances } + { completed_works = forget_cw pre_diff.completed_works + ; commands = (pre_diff.commands :> User_command.t With_status.t list) + ; coinbase = pre_diff.coinbase + ; internal_command_balances = pre_diff.internal_command_balances + } let forget_pre_diff_with_at_most_one (pre_diff : With_valid_signatures_and_proofs.pre_diff_with_at_most_one_coinbase) = - { Pre_diff_one.completed_works= forget_cw pre_diff.completed_works - ; commands= (pre_diff.commands :> User_command.t With_status.t list) - ; coinbase= pre_diff.coinbase - ; internal_command_balances= pre_diff.internal_command_balances } + { Pre_diff_one.completed_works = forget_cw pre_diff.completed_works + ; commands = (pre_diff.commands :> User_command.t With_status.t list) + ; coinbase = pre_diff.coinbase + ; internal_command_balances = pre_diff.internal_command_balances + } let forget (t : With_valid_signatures_and_proofs.t) = - { diff= + { diff = ( forget_pre_diff_with_at_most_two (fst t.diff) - , Option.map (snd t.diff) ~f:forget_pre_diff_with_at_most_one ) } + , Option.map (snd t.diff) ~f:forget_pre_diff_with_at_most_one ) + } let commands (t : t) = (fst t.diff).commands @@ -390,19 +401,21 @@ let net_return (commands t) ~f:(fun sum cmd -> let%bind sum = sum in - Fee.( + ) sum (User_command.fee_exn (With_status.data cmd)) ) + Fee.( + ) sum (User_command.fee_exn (With_status.data cmd))) in let%bind completed_works_fees = List.fold ~init:(Some Fee.zero) (completed_works t) ~f:(fun sum work -> let%bind sum = sum in - Fee.( + ) sum work.Transaction_snark_work.fee ) + Fee.( + ) sum work.Transaction_snark_work.fee) in Amount.(of_fee total_reward - of_fee completed_works_fees) let empty_diff : t = - { diff= - ( { completed_works= [] - ; commands= [] - ; coinbase= At_most_two.Zero - ; internal_command_balances= [] } - , None ) } + { diff = + ( { completed_works = [] + ; commands = [] + ; coinbase = At_most_two.Zero + ; internal_command_balances = [] + } + , None ) + } diff --git a/src/lib/staged_ledger_diff/staged_ledger_diff.mli b/src/lib/staged_ledger_diff/staged_ledger_diff.mli index a5bb213743c..23cb73a7f53 100644 --- a/src/lib/staged_ledger_diff/staged_ledger_diff.mli +++ b/src/lib/staged_ledger_diff/staged_ledger_diff.mli @@ -5,13 +5,12 @@ module At_most_two : sig type 'a t = Zero | One of 'a option | Two of ('a * 'a option) option [@@deriving compare, sexp, yojson] - module Stable : - sig - module V1 : sig - type 'a t [@@deriving compare, sexp, yojson, bin_io, version] - end + module Stable : sig + module V1 : sig + type 'a t [@@deriving compare, sexp, yojson, bin_io, version] end - with type 'a V1.t = 'a t + end + with type 'a V1.t = 'a t val increase : 'a t -> 'a list -> 'a t Or_error.t end @@ -19,51 +18,50 @@ end module At_most_one : sig type 'a t = Zero | One of 'a option [@@deriving compare, sexp, yojson] - module Stable : - sig - module V1 : sig - type 'a t [@@deriving compare, sexp, yojson, bin_io, version] - end + module Stable : sig + module V1 : sig + type 'a t [@@deriving compare, sexp, yojson, bin_io, version] end - with type 'a V1.t = 'a t + end + with type 'a V1.t = 'a t val increase : 'a t -> 'a list -> 'a t Or_error.t end module Pre_diff_two : sig type ('a, 'b) t = - { completed_works: 'a list - ; commands: 'b list - ; coinbase: Coinbase.Fee_transfer.t At_most_two.t - ; internal_command_balances: - Transaction_status.Internal_command_balance_data.t list } + { completed_works : 'a list + ; commands : 'b list + ; coinbase : Coinbase.Fee_transfer.t At_most_two.t + ; internal_command_balances : + Transaction_status.Internal_command_balance_data.t list + } [@@deriving compare, sexp, yojson] - module Stable : - sig - module V1 : sig - type ('a, 'b) t [@@deriving compare, sexp, yojson, bin_io, version] - end + module Stable : sig + module V1 : sig + type ('a, 'b) t [@@deriving compare, sexp, yojson, bin_io, version] end - with type ('a, 'b) V1.t = ('a, 'b) t + end + with type ('a, 'b) V1.t = ('a, 'b) t end module Pre_diff_one : sig type ('a, 'b) t = - { completed_works: 'a list - ; commands: 'b list - ; coinbase: Coinbase.Fee_transfer.t At_most_one.t - ; internal_command_balances: - Transaction_status.Internal_command_balance_data.t list } + { completed_works : 'a list + ; commands : 'b list + ; coinbase : Coinbase.Fee_transfer.t At_most_one.t + ; internal_command_balances : + Transaction_status.Internal_command_balance_data.t list + } [@@deriving compare, sexp, yojson] - module Stable : - sig - module V1 : sig - type ('a, 'b) t [@@deriving compare, sexp, yojson, bin_io, version] - end + module Stable : sig + module V1 : sig + type ('a, 'b) t [@@deriving compare, sexp, yojson, bin_io, version] end - with type ('a, 'b) V1.t = ('a, 'b) t + end + with type ('a, 'b) V1.t = ('a, 'b) t end module Pre_diff_with_at_most_two_coinbase : sig @@ -71,13 +69,12 @@ module Pre_diff_with_at_most_two_coinbase : sig (Transaction_snark_work.t, User_command.t With_status.t) Pre_diff_two.t [@@deriving compare, sexp, yojson] - module Stable : - sig - module V1 : sig - type t [@@deriving compare, sexp, yojson, bin_io, version] - end + module Stable : sig + module V1 : sig + type t [@@deriving compare, sexp, yojson, bin_io, version] end - with type V1.t = t + end + with type V1.t = t end module Pre_diff_with_at_most_one_coinbase : sig @@ -85,13 +82,12 @@ module Pre_diff_with_at_most_one_coinbase : sig (Transaction_snark_work.t, User_command.t With_status.t) Pre_diff_one.t [@@deriving compare, sexp, yojson] - module Stable : - sig - module V1 : sig - type t [@@deriving compare, sexp, yojson, bin_io, version] - end + module Stable : sig + module V1 : sig + type t [@@deriving compare, sexp, yojson, bin_io, version] end - with type V1.t = t + end + with type V1.t = t end module Diff : sig @@ -100,27 +96,25 @@ module Diff : sig * Pre_diff_with_at_most_one_coinbase.t option [@@deriving compare, sexp, yojson] - module Stable : - sig - module V1 : sig - type t [@@deriving compare, sexp, bin_io, yojson, version] - end + module Stable : sig + module V1 : sig + type t [@@deriving compare, sexp, bin_io, yojson, version] end - with type V1.t = t + end + with type V1.t = t end -type t = {diff: Diff.t} [@@deriving compare, sexp, compare, yojson, fields] +type t = { diff : Diff.t } [@@deriving compare, sexp, compare, yojson, fields] -module Stable : - sig - module V1 : sig - type t = {diff: Diff.t} - [@@deriving compare, sexp, compare, yojson, bin_io, version] - end - - module Latest = V1 +module Stable : sig + module V1 : sig + type t = { diff : Diff.t } + [@@deriving compare, sexp, compare, yojson, bin_io, version] end - with type V1.t = t + + module Latest = V1 +end +with type V1.t = t module With_valid_signatures_and_proofs : sig type pre_diff_with_at_most_two_coinbase = @@ -140,7 +134,7 @@ module With_valid_signatures_and_proofs : sig * pre_diff_with_at_most_one_coinbase option [@@deriving compare, sexp, to_yojson] - type t = {diff: diff} [@@deriving compare, sexp, to_yojson] + type t = { diff : diff } [@@deriving compare, sexp, to_yojson] val empty_diff : t @@ -165,7 +159,7 @@ module With_valid_signatures : sig * pre_diff_with_at_most_one_coinbase option [@@deriving compare, sexp, to_yojson] - type t = {diff: diff} [@@deriving compare, sexp, to_yojson] + type t = { diff : diff } [@@deriving compare, sexp, to_yojson] val coinbase : constraint_constants:Genesis_constants.Constraint_constants.t @@ -179,9 +173,9 @@ val forget_proof_checks : val validate_commands : t - -> check:( User_command.t list - -> (User_command.Valid.t list, 'e) Result.t - Async.Deferred.Or_error.t) + -> check: + ( User_command.t list + -> (User_command.Valid.t list, 'e) Result.t Async.Deferred.Or_error.t) -> (With_valid_signatures.t, 'e) Result.t Async.Deferred.Or_error.t val forget : With_valid_signatures_and_proofs.t -> t diff --git a/src/lib/state_or_error/state_or_error.ml b/src/lib/state_or_error/state_or_error.ml index 4366555962c..197aa6bd780 100644 --- a/src/lib/state_or_error/state_or_error.ml +++ b/src/lib/state_or_error/state_or_error.ml @@ -32,24 +32,24 @@ end module S3_to_S2 (X : State_or_error_intf.S3) : State_or_error_intf.S2 - with type ('a, 'b) t := ('a, 'b, unit) X.t - and type 'a state := ('a, unit) X.state = struct + with type ('a, 'b) t := ('a, 'b, unit) X.t + and type 'a state := ('a, unit) X.state = struct include ( X : State_or_error_intf.S3 - with type ('a, 'b, 'e) t := ('a, 'b, 'e) X.t - and type ('b, 'e) state := ('b, 'e) X.state ) + with type ('a, 'b, 'e) t := ('a, 'b, 'e) X.t + and type ('b, 'e) state := ('b, 'e) X.state ) end module S2_to_S (X : State_or_error_intf.S2) : State_or_error_intf.S - with type 'a t := ('a, unit) X.t - and type state := unit X.state = struct + with type 'a t := ('a, unit) X.t + and type state := unit X.state = struct include ( X : State_or_error_intf.S2 - with type ('a, 'b) t := ('a, 'b) X.t - and type 'a state := 'a X.state ) + with type ('a, 'b) t := ('a, 'b) X.t + and type 'a state := 'a X.state ) end module Make2 (State : State_or_error_intf.State_intf1) : diff --git a/src/lib/storage/checked_data.ml b/src/lib/storage/checked_data.ml index ffa7bac8162..07c62e2e814 100644 --- a/src/lib/storage/checked_data.ml +++ b/src/lib/storage/checked_data.ml @@ -3,15 +3,14 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type 'a t = {checksum: Core_kernel.Md5.Stable.V1.t; data: 'a} + type 'a t = { checksum : Core_kernel.Md5.Stable.V1.t; data : 'a } end end] -let md5 (tc : 'a Binable.m) data = - Md5.digest_string (Binable.to_string tc data) +let md5 (tc : 'a Binable.m) data = Md5.digest_string (Binable.to_string tc data) let wrap tc data : string t = let data = Binable.to_string tc data in - {checksum= Md5.digest_string data; data} + { checksum = Md5.digest_string data; data } -let valid {checksum; data} = Md5.(equal (digest_string data) checksum) +let valid { checksum; data } = Md5.(equal (digest_string data) checksum) diff --git a/src/lib/storage/disk.ml b/src/lib/storage/disk.ml index fd2bfcfca5f..530f7abfea5 100644 --- a/src/lib/storage/disk.ml +++ b/src/lib/storage/disk.ml @@ -7,9 +7,9 @@ type location = Location.t [@@deriving sexp] type 'a t = 'a Checked_data.t module Controller = struct - type nonrec 'a t = {logger: Logger.t; tc: 'a Binable.m} + type nonrec 'a t = { logger : Logger.t; tc : 'a Binable.m } - let create ~logger tc = {logger; tc} + let create ~logger tc = { logger; tc } end let load_with_checksum (type a) (c : a Controller.t) location = @@ -25,7 +25,7 @@ let load_with_checksum (type a) (c : a Controller.t) location = if Checked_data.valid t then Ok Checked_data. - {checksum= t.checksum; data= Binable.of_string c.tc t.data} + { checksum = t.checksum; data = Binable.of_string c.tc t.data } else Error `Checksum_no_match | Error e -> Error (`IO_error e) ) diff --git a/src/lib/storage/list.ml b/src/lib/storage/list.ml index 59d2cc640f6..4a09a86bfaf 100644 --- a/src/lib/storage/list.ml +++ b/src/lib/storage/list.ml @@ -16,7 +16,7 @@ module Make (M : Storage_intf.With_checksum_intf) : | `IO_error e -> e | `No_exist -> - Error.of_string "No_exist" ) + Error.of_string "No_exist") |> Error.of_list let first_success ~f = diff --git a/src/lib/storage/memory.ml b/src/lib/storage/memory.ml index 9160db6d5dc..0858307e61d 100644 --- a/src/lib/storage/memory.ml +++ b/src/lib/storage/memory.ml @@ -8,9 +8,9 @@ include Checked_data module Controller = struct type nonrec 'a t = - {logger: Logger.t; tc: 'a Binable.m; mem: 'a t Location.Table.t} + { logger : Logger.t; tc : 'a Binable.m; mem : 'a t Location.Table.t } - let create ~logger tc = {logger; tc; mem= Location.Table.create ()} + let create ~logger tc = { logger; tc; mem = Location.Table.create () } end let load_with_checksum (type a) (c : a Controller.t) location = @@ -27,7 +27,7 @@ let load c location = let store_with_checksum (type a) (c : a Controller.t) location (data : a) = let checksum = md5 c.tc data in Deferred.return - ( Location.Table.set c.mem ~key:location ~data:{checksum; data} ; + ( Location.Table.set c.mem ~key:location ~data:{ checksum; data } ; checksum ) let store (c : 'a Controller.t) location data : unit Deferred.t = diff --git a/src/lib/storage/storage_intf.ml b/src/lib/storage/storage_intf.ml index aeb33ff3d66..d968e56ba62 100644 --- a/src/lib/storage/storage_intf.ml +++ b/src/lib/storage/storage_intf.ml @@ -16,14 +16,14 @@ module type With_checksum_intf = sig 'a Controller.t -> location -> ( 'a - , [> `Checksum_no_match | `IO_error of Error.t | `No_exist] ) + , [> `Checksum_no_match | `IO_error of Error.t | `No_exist ] ) Deferred.Result.t val load_with_checksum : 'a Controller.t -> location -> ( 'a Checked_data.t - , [> `Checksum_no_match | `IO_error of Error.t | `No_exist] ) + , [> `Checksum_no_match | `IO_error of Error.t | `No_exist ] ) Deferred.Result.t val store : 'a Controller.t -> location -> 'a -> unit Deferred.t diff --git a/src/lib/structured_log_events/examples/event_declarations.ml b/src/lib/structured_log_events/examples/event_declarations.ml index dd5d9680434..f856ac2142c 100644 --- a/src/lib/structured_log_events/examples/event_declarations.ml +++ b/src/lib/structured_log_events/examples/event_declarations.ml @@ -8,12 +8,12 @@ open Core_kernel (* implicit log message *) type Structured_log_events.t += - | Reached_block_height of {height: int; time: string} + | Reached_block_height of { height : int; time : string } [@@deriving register_event] (* explicit log message *) -type Structured_log_events.t += Proof_failure of {why: string} - [@@deriving register_event {msg= "Proof failed because $why"}] +type Structured_log_events.t += Proof_failure of { why : string } + [@@deriving register_event { msg = "Proof failed because $why" }] (* constructor without record argument *) type Structured_log_events.t += Block_finalized [@@deriving register_event] @@ -22,4 +22,4 @@ type Structured_log_events.t += Block_finalized [@@deriving register_event] type Structured_log_events.t += Donuts_are_ready [@@warning "-22"] [@@deriving - register_event {msg= sprintf "My favorite flavor is: %s" "maple glazed"}] + register_event { msg = sprintf "My favorite flavor is: %s" "maple glazed" }] diff --git a/src/lib/structured_log_events/structured_log_events.ml b/src/lib/structured_log_events/structured_log_events.ml index f79b867b970..d70612f7d9f 100644 --- a/src/lib/structured_log_events/structured_log_events.ml +++ b/src/lib/structured_log_events/structured_log_events.ml @@ -9,11 +9,12 @@ let id_of_string s = s let string_of_id s = s type repr = - { id: id - ; event_name: string - ; arguments: String.Set.t - ; log: t -> (string * (string * Yojson.Safe.t) list) option - ; parse: (string * Yojson.Safe.t) list -> t option } + { id : id + ; event_name : string + ; arguments : String.Set.t + ; log : t -> (string * (string * Yojson.Safe.t) list) option + ; parse : (string * Yojson.Safe.t) list -> t option + } module Registry = struct let reprs : repr list ref = ref [] @@ -30,10 +31,10 @@ let parse_exn id json_pairs = message. *) List.filter json_pairs ~f:(fun (field_name, _) -> - Set.mem repr.arguments field_name ) + Set.mem repr.arguments field_name) in repr.parse json_pairs - else None ) + else None) in match result with | Some data -> @@ -44,8 +45,7 @@ let parse_exn id json_pairs = let log t = let result = List.find_map !Registry.reprs ~f:(fun repr -> - Option.map (repr.log t) ~f:(fun (msg, fields) -> (msg, repr.id, fields)) - ) + Option.map (repr.log t) ~f:(fun (msg, fields) -> (msg, repr.id, fields))) in match result with | Some data -> @@ -59,8 +59,8 @@ let log t = let register_constructor = Registry.register_constructor let dump_registered_events () = - List.map !Registry.reprs ~f:(fun {event_name; id; arguments; _} -> - (event_name, id, Set.to_list arguments) ) + List.map !Registry.reprs ~f:(fun { event_name; id; arguments; _ } -> + (event_name, id, Set.to_list arguments)) let check_interpolations_exn ~msg_loc msg label_names = (* don't use Logproc_lib, which depends on C++ code @@ -78,8 +78,8 @@ let check_interpolations_exn ~msg_loc msg label_names = when not (List.mem ~equal:String.equal label_names interp) -> failwithf "%s\n\ - The structured log message contains interpolation point \ - \"$%s\" which is not a field in the record" + The structured log message contains interpolation point \"$%s\" \ + which is not a field in the record" msg_loc interp () | _ -> - () ) + ()) diff --git a/src/lib/structured_log_events/structured_log_events.mli b/src/lib/structured_log_events/structured_log_events.mli index 7d56f86dbd4..34a942b7b41 100644 --- a/src/lib/structured_log_events/structured_log_events.mli +++ b/src/lib/structured_log_events/structured_log_events.mli @@ -35,11 +35,12 @@ val string_of_id : id -> string This is automatically generated by the [@@deriving register_event] ppx. *) type repr = - { id: id - ; event_name: string - ; arguments: String.Set.t - ; log: t -> (string * (string * Yojson.Safe.t) list) option - ; parse: (string * Yojson.Safe.t) list -> t option } + { id : id + ; event_name : string + ; arguments : String.Set.t + ; log : t -> (string * (string * Yojson.Safe.t) list) option + ; parse : (string * Yojson.Safe.t) list -> t option + } (** Register a structured log event's representation. This is for internal use by the [@@deriving register_event] ppx. diff --git a/src/lib/sync_handler/sync_handler.ml b/src/lib/sync_handler/sync_handler.ml index 29975bc5168..63f89182937 100644 --- a/src/lib/sync_handler/sync_handler.ml +++ b/src/lib/sync_handler/sync_handler.ml @@ -10,12 +10,12 @@ module type Inputs_intf = sig module Best_tip_prover : Mina_intf.Best_tip_prover_intf - with type transition_frontier := Transition_frontier.t + with type transition_frontier := Transition_frontier.t end module Make (Inputs : Inputs_intf) : Mina_intf.Sync_handler_intf - with type transition_frontier := Inputs.Transition_frontier.t = struct + with type transition_frontier := Inputs.Transition_frontier.t = struct open Inputs let find_in_root_history frontier state_hash = @@ -55,8 +55,8 @@ module Make (Inputs : Inputs_intf) : staking_epoch_ledger) then match staking_epoch_ledger with - | Consensus.Data.Local_state.Snapshot.Ledger_snapshot - .Genesis_epoch_ledger _ -> + | Consensus.Data.Local_state.Snapshot.Ledger_snapshot.Genesis_epoch_ledger + _ -> None | Ledger_db ledger -> Some (Ledger.Any_ledger.cast (module Ledger.Db) ledger) @@ -66,8 +66,8 @@ module Make (Inputs : Inputs_intf) : next_epoch_ledger) then match next_epoch_ledger with - | Consensus.Data.Local_state.Snapshot.Ledger_snapshot - .Genesis_epoch_ledger _ -> + | Consensus.Data.Local_state.Snapshot.Ledger_snapshot.Genesis_epoch_ledger + _ -> None | Ledger_db ledger -> Some (Ledger.Any_ledger.cast (module Ledger.Db) ledger) @@ -91,8 +91,7 @@ module Make (Inputs : Inputs_intf) : in Sync_ledger.Any_ledger.Responder.answer_query responder query - let get_staged_ledger_aux_and_pending_coinbases_at_hash ~frontier state_hash - = + let get_staged_ledger_aux_and_pending_coinbases_at_hash ~frontier state_hash = let open Option.Let_syntax in let protocol_states scan_state = Staged_ledger.Scan_state.required_state_hashes scan_state @@ -107,7 +106,7 @@ module Make (Inputs : Inputs_intf) : | None -> Stop None | Some acc' -> - Continue (Some acc') ) + Continue (Some acc')) ~finish:Fn.id in match @@ -145,7 +144,7 @@ module Make (Inputs : Inputs_intf) : if requested <= Transition_frontier.max_catchup_chunk_length then Some () else ( [%log' trace (Logger.create ())] - ~metadata:[("n", `Int requested)] + ~metadata:[ ("n", `Int requested) ] "get_transition_chain requested $n > %d hashes" Transition_frontier.max_catchup_chunk_length ; None ) @@ -183,15 +182,13 @@ module Make (Inputs : Inputs_intf) : module Root = struct let prove ~logger ~consensus_constants ~frontier seen_consensus_state = let open Option.Let_syntax in - let%bind best_tip_with_witness = - Best_tip_prover.prove ~logger frontier - in + let%bind best_tip_with_witness = Best_tip_prover.prove ~logger frontier in let is_tip_better = Consensus.Hooks.equal_select_status (Consensus.Hooks.select ~constants:consensus_constants ~logger: (Logger.extend logger - [("selection_context", `String "Root.prove")]) + [ ("selection_context", `String "Root.prove") ]) ~existing: (With_hash.map ~f:External_transition.consensus_state best_tip_with_witness.data) @@ -200,7 +197,8 @@ module Make (Inputs : Inputs_intf) : in let%map () = Option.some_if is_tip_better () in { best_tip_with_witness with - data= With_hash.data best_tip_with_witness.data } + data = With_hash.data best_tip_with_witness.data + } let verify ~logger ~verifier ~consensus_constants ~genesis_constants ~precomputed_values observed_state peer_root = @@ -215,7 +213,7 @@ module Make (Inputs : Inputs_intf) : (Consensus.Hooks.select ~constants:consensus_constants ~logger: (Logger.extend logger - [("selection_context", `String "Root.verify")]) + [ ("selection_context", `String "Root.verify") ]) ~existing: (With_hash.map ~f:External_transition.consensus_state best_tip_transition) diff --git a/src/lib/sync_status/sync_status.ml b/src/lib/sync_status/sync_status.ml index 505ca7ce943..cafe0399d98 100644 --- a/src/lib/sync_status/sync_status.ml +++ b/src/lib/sync_status/sync_status.ml @@ -52,7 +52,7 @@ module T = struct module Stable = struct module V1 = struct type t = - [`Connecting | `Listening | `Offline | `Bootstrap | `Synced | `Catchup] + [ `Connecting | `Listening | `Offline | `Bootstrap | `Synced | `Catchup ] [@@deriving sexp, hash, compare, equal, enumerate] let to_latest = Fn.id @@ -77,9 +77,9 @@ include Hashable.Make (T) let check_conv to_repr of_repr ok_or_fail = List.for_all - [`Offline; `Bootstrap; `Synced; `Connecting; `Listening; `Catchup] + [ `Offline; `Bootstrap; `Synced; `Connecting; `Listening; `Catchup ] ~f:(fun sync_status -> - equal sync_status (of_repr (to_repr sync_status) |> ok_or_fail) ) + equal sync_status (of_repr (to_repr sync_status) |> ok_or_fail)) let%test "of_string (to_string x) == x" = check_conv to_string of_string Or_error.ok_exn diff --git a/src/lib/syncable_ledger/syncable_ledger.ml b/src/lib/syncable_ledger/syncable_ledger.ml index 1f6ab5cd25f..e6ca6a2dfa1 100644 --- a/src/lib/syncable_ledger/syncable_ledger.ml +++ b/src/lib/syncable_ledger/syncable_ledger.ml @@ -4,7 +4,7 @@ open Pipe_lib open Network_peer type Structured_log_events.t += Snarked_ledger_synced - [@@deriving register_event {msg= "Snarked database sync'd. All done"}] + [@@deriving register_event { msg = "Snarked database sync'd. All done" }] (** Run f recursively n times, starting with value r. e.g. funpow 3 f r = f (f (f r)) *) @@ -62,10 +62,10 @@ module type Inputs_intf = sig module MT : Merkle_ledger.Syncable_intf.S - with type hash := Hash.t - and type root_hash := Root_hash.t - and type addr := Addr.t - and type account := Account.t + with type hash := Hash.t + and type root_hash := Root_hash.t + and type addr := Addr.t + and type account := Account.t val account_subtree_height : int end @@ -123,7 +123,7 @@ module type S = sig -> root_hash -> data:'a -> equal:('a -> 'a -> bool) - -> [`Repeat | `New | `Update_data] + -> [ `Repeat | `New | `Update_data ] val peek_valid_tree : 'a t -> merkle_tree option @@ -132,7 +132,7 @@ module type S = sig val wait_until_valid : 'a t -> root_hash - -> [`Ok of merkle_tree | `Target_changed of root_hash option * root_hash] + -> [ `Ok of merkle_tree | `Target_changed of root_hash option * root_hash ] Deferred.t val fetch : @@ -140,7 +140,7 @@ module type S = sig -> root_hash -> data:'a -> equal:('a -> 'a -> bool) - -> [`Ok of merkle_tree | `Target_changed of root_hash option * root_hash] + -> [ `Ok of merkle_tree | `Target_changed of root_hash option * root_hash ] Deferred.t val apply_or_queue_diff : 'a t -> diff -> unit @@ -194,14 +194,14 @@ module Make (Inputs : Inputs_intf) : sig include S - with type merkle_tree := MT.t - and type hash := Hash.t - and type root_hash := Root_hash.t - and type addr := Addr.t - and type merkle_path := MT.path - and type account := Account.t - and type query := Addr.t Query.t - and type answer := (Hash.t, Account.t) Answer.t + with type merkle_tree := MT.t + and type hash := Hash.t + and type root_hash := Root_hash.t + and type addr := Addr.t + and type merkle_path := MT.path + and type account := Account.t + and type query := Addr.t Query.t + and type answer := (Hash.t, Account.t) Answer.t end = struct open Inputs @@ -215,10 +215,11 @@ end = struct module Responder = struct type t = - { mt: MT.t - ; f: query -> unit - ; logger: Logger.t - ; trust_system: Trust_system.t } + { mt : MT.t + ; f : query -> unit + ; logger : Logger.t + ; trust_system : Trust_system.t + } let create : MT.t @@ -226,11 +227,11 @@ end = struct -> logger:Logger.t -> trust_system:Trust_system.t -> t = - fun mt f ~logger ~trust_system -> {mt; f; logger; trust_system} + fun mt f ~logger ~trust_system -> { mt; f; logger; trust_system } let answer_query : t -> query Envelope.Incoming.t -> answer option Deferred.t = - fun {mt; f; logger; trust_system} query_envelope -> + fun { mt; f; logger; trust_system } query_envelope -> let open Trust_system in let ledger_depth = MT.depth mt in let sender = Envelope.Incoming.sender query_envelope in @@ -239,39 +240,39 @@ end = struct let response_or_punish = match query with | What_child_hashes a -> ( - match - let open Or_error.Let_syntax in - let%bind lchild = Addr.child ~ledger_depth a Direction.Left in - let%bind rchild = Addr.child ~ledger_depth a Direction.Right in - Or_error.try_with (fun () -> - Answer.Child_hashes_are - ( MT.get_inner_hash_at_addr_exn mt lchild - , MT.get_inner_hash_at_addr_exn mt rchild ) ) - with - | Ok answer -> - Either.First answer - | Error e -> - let logger = Logger.create () in - [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] - "When handling What_child_hashes request, the following error \ - happended: $error" ; - Either.Second - ( Actions.Violated_protocol - , Some - ( "invalid address $addr in What_child_hashes request" - , [("addr", Addr.to_yojson a)] ) ) ) + match + let open Or_error.Let_syntax in + let%bind lchild = Addr.child ~ledger_depth a Direction.Left in + let%bind rchild = Addr.child ~ledger_depth a Direction.Right in + Or_error.try_with (fun () -> + Answer.Child_hashes_are + ( MT.get_inner_hash_at_addr_exn mt lchild + , MT.get_inner_hash_at_addr_exn mt rchild )) + with + | Ok answer -> + Either.First answer + | Error e -> + let logger = Logger.create () in + [%log error] + ~metadata:[ ("error", Error_json.error_to_yojson e) ] + "When handling What_child_hashes request, the following \ + error happended: $error" ; + Either.Second + ( Actions.Violated_protocol + , Some + ( "invalid address $addr in What_child_hashes request" + , [ ("addr", Addr.to_yojson a) ] ) ) ) | What_contents a -> if Addr.height ~ledger_depth a > account_subtree_height then Either.Second ( Actions.Violated_protocol , Some ( "requested too big of a subtree at once: $addr" - , [("addr", Addr.to_yojson a)] ) ) + , [ ("addr", Addr.to_yojson a) ] ) ) else let addresses_and_accounts = List.sort ~compare:(fun (addr1, _) (addr2, _) -> - Addr.compare addr1 addr2 ) + Addr.compare addr1 addr2) @@ MT.get_all_accounts_rooted_at_exn mt a (* can't actually throw *) in @@ -283,7 +284,7 @@ end = struct ( Actions.Violated_protocol , Some ( "Requested empty subtree: $addr" - , [("addr", Addr.to_yojson a)] ) ) + , [ ("addr", Addr.to_yojson a) ] ) ) else let first_address, rest_address = (List.hd_exn addresses, List.tl_exn addresses) @@ -297,7 +298,7 @@ end = struct && [%equal: Addr.t option] expected_address (Some actual_address) then (Addr.next actual_address, true) - else (expected_address, false) ) + else (expected_address, false)) in if not is_compact then ( (* indicates our ledger is invalid somehow. *) @@ -311,7 +312,9 @@ end = struct ~f:(fun (addr, account) -> `Tuple [ Addr.to_yojson addr - ; Account.to_yojson account ] )) ) ] + ; Account.to_yojson account + ])) ) + ] "Missing an account at address: $missing_address inside \ the list: $addresses_and_accounts" ; assert false ) @@ -341,29 +344,30 @@ end = struct end type 'a t = - { mutable desired_root: Root_hash.t option - ; mutable auxiliary_data: 'a option - ; tree: MT.t - ; logger: Logger.t - ; trust_system: Trust_system.t - ; answers: + { mutable desired_root : Root_hash.t option + ; mutable auxiliary_data : 'a option + ; tree : MT.t + ; logger : Logger.t + ; trust_system : Trust_system.t + ; answers : (Root_hash.t * query * answer Envelope.Incoming.t) Linear_pipe.Reader.t - ; answer_writer: + ; answer_writer : (Root_hash.t * query * answer Envelope.Incoming.t) Linear_pipe.Writer.t - ; queries: (Root_hash.t * query) Linear_pipe.Writer.t - ; query_reader: (Root_hash.t * query) Linear_pipe.Reader.t - ; waiting_parents: Hash.t Addr.Table.t + ; queries : (Root_hash.t * query) Linear_pipe.Writer.t + ; query_reader : (Root_hash.t * query) Linear_pipe.Reader.t + ; waiting_parents : Hash.t Addr.Table.t (** Addresses we are waiting for the children of, and the expected hash of the node with the address. *) - ; waiting_content: Hash.t Addr.Table.t - ; mutable validity_listener: - [`Ok | `Target_changed of Root_hash.t option * Root_hash.t] Ivar.t } + ; waiting_content : Hash.t Addr.Table.t + ; mutable validity_listener : + [ `Ok | `Target_changed of Root_hash.t option * Root_hash.t ] Ivar.t + } let t_of_sexp _ = failwith "t_of_sexp: not implemented" let sexp_of_t _ = failwith "sexp_of_t: not implemented" - let desired_root_exn {desired_root; _} = desired_root |> Option.value_exn + let desired_root_exn { desired_root; _ } = desired_root |> Option.value_exn let destroy t = Linear_pipe.close_read t.answers ; @@ -378,7 +382,8 @@ end = struct [%log' trace t.logger] ~metadata: [ ("parent_address", Addr.to_yojson parent_addr) - ; ("hash", Hash.to_yojson expected) ] + ; ("hash", Hash.to_yojson expected) + ] "Expecting children parent $parent_address, expected: $hash" ; Addr.Table.add_exn t.waiting_parents ~key:parent_addr ~data:expected @@ -386,7 +391,7 @@ end = struct fun t addr expected -> [%log' trace t.logger] ~metadata: - [("address", Addr.to_yojson addr); ("hash", Hash.to_yojson expected)] + [ ("address", Addr.to_yojson addr); ("hash", Hash.to_yojson expected) ] "Expecting content addr $address, expected: $hash" ; Addr.Table.add_exn t.waiting_content ~key:addr ~data:expected @@ -448,7 +453,7 @@ end = struct not @@ Hash.equal (MT.get_inner_hash_at_addr_exn t.tree addr) hash in let subtrees_to_fetch = - [(la, lh); (ra, rh)] + [ (la, lh); (ra, rh) ] |> List.filter ~f:(Tuple2.uncurry should_fetch_children) in Addr.Table.remove t.waiting_parents parent_addr ; @@ -499,7 +504,8 @@ end = struct (** Handle the initial Num_accounts message, starting the main syncing process. *) let handle_num_accounts : - 'a t -> int -> Hash.t -> [`Success | `Hash_mismatch of Hash.t * Hash.t] = + 'a t -> int -> Hash.t -> [ `Success | `Hash_mismatch of Hash.t * Hash.t ] + = fun t n content_hash -> let rh = Root_hash.to_hash (desired_root_exn t) in let height = Int.ceil_log2 n in @@ -526,24 +532,22 @@ end = struct the t and the underlying ledger can change while processing is happening. *) let already_done = - match Ivar.peek t.validity_listener with - | Some `Ok -> - true - | _ -> - false + match Ivar.peek t.validity_listener with Some `Ok -> true | _ -> false in let sender = Envelope.Incoming.sender env in let answer = Envelope.Incoming.data env in [%log' trace t.logger] ~metadata: [ ("root_hash", Root_hash.to_yojson root_hash) - ; ("query", Query.to_yojson Addr.to_yojson query) ] + ; ("query", Query.to_yojson Addr.to_yojson query) + ] "Handle answer for $root_hash" ; if not (Root_hash.equal root_hash (desired_root_exn t)) then ( [%log' trace t.logger] ~metadata: [ ("desired_hash", Root_hash.to_yojson (desired_root_exn t)) - ; ("ignored_hash", Root_hash.to_yojson root_hash) ] + ; ("ignored_hash", Root_hash.to_yojson root_hash) + ] "My desired root was $desired_hash, so I'm ignoring $ignored_hash" ; Deferred.unit ) else if already_done then ( @@ -557,75 +561,77 @@ end = struct (* If a peer misbehaves we still need the information we asked them for, so requeue in that case. *) let requeue_query () = - Linear_pipe.write_without_pushback_if_open t.queries - (root_hash, query) + Linear_pipe.write_without_pushback_if_open t.queries (root_hash, query) in let credit_fulfilled_request () = record_envelope_sender t.trust_system t.logger sender ( Actions.Fulfilled_request , Some ( "sync ledger query $query" - , [("query", Query.to_yojson Addr.to_yojson query)] ) ) + , [ ("query", Query.to_yojson Addr.to_yojson query) ] ) ) in let%bind _ = match (query, answer) with | Query.What_child_hashes addr, Answer.Child_hashes_are (lh, rh) -> ( - match add_child_hashes_to t addr lh rh with - | `Hash_mismatch (expected, actual) -> - let%map () = - record_envelope_sender t.trust_system t.logger sender - ( Actions.Sent_bad_hash - , Some - ( "sent child hashes $lhash and $rhash for address \ - $addr, they merge hash to $actualmerge but we \ - expected $expectedmerge" - , [ ("lhash", Hash.to_yojson lh) - ; ("rhash", Hash.to_yojson rh) - ; ("actualmerge", Hash.to_yojson actual) - ; ("expectedmerge", Hash.to_yojson expected) ] ) ) - in - requeue_query () - | `Good children_to_verify -> - (* TODO #312: Make sure we don't write too much *) - List.iter children_to_verify ~f:(fun (addr, hash) -> - handle_node t addr hash ) ; - credit_fulfilled_request () ) + match add_child_hashes_to t addr lh rh with + | `Hash_mismatch (expected, actual) -> + let%map () = + record_envelope_sender t.trust_system t.logger sender + ( Actions.Sent_bad_hash + , Some + ( "sent child hashes $lhash and $rhash for address \ + $addr, they merge hash to $actualmerge but we \ + expected $expectedmerge" + , [ ("lhash", Hash.to_yojson lh) + ; ("rhash", Hash.to_yojson rh) + ; ("actualmerge", Hash.to_yojson actual) + ; ("expectedmerge", Hash.to_yojson expected) + ] ) ) + in + requeue_query () + | `Good children_to_verify -> + (* TODO #312: Make sure we don't write too much *) + List.iter children_to_verify ~f:(fun (addr, hash) -> + handle_node t addr hash) ; + credit_fulfilled_request () ) | Query.What_contents addr, Answer.Contents_are leaves -> ( - match add_content t addr leaves with - | `Success -> - credit_fulfilled_request () - | `Hash_mismatch (expected, actual) -> - let%map () = - record_envelope_sender t.trust_system t.logger sender - ( Actions.Sent_bad_hash - , Some - ( "sent accounts $accounts for address $addr, they \ - hash to $actual but we expected $expected" - , [ ( "accounts" - , `List (List.map ~f:Account.to_yojson leaves) ) - ; ("addr", Addr.to_yojson addr) - ; ("actual", Hash.to_yojson actual) - ; ("expected", Hash.to_yojson expected) ] ) ) - in - requeue_query () ) + match add_content t addr leaves with + | `Success -> + credit_fulfilled_request () + | `Hash_mismatch (expected, actual) -> + let%map () = + record_envelope_sender t.trust_system t.logger sender + ( Actions.Sent_bad_hash + , Some + ( "sent accounts $accounts for address $addr, they \ + hash to $actual but we expected $expected" + , [ ( "accounts" + , `List (List.map ~f:Account.to_yojson leaves) ) + ; ("addr", Addr.to_yojson addr) + ; ("actual", Hash.to_yojson actual) + ; ("expected", Hash.to_yojson expected) + ] ) ) + in + requeue_query () ) | Query.Num_accounts, Answer.Num_accounts (count, content_root) -> ( - match handle_num_accounts t count content_root with - | `Success -> - credit_fulfilled_request () - | `Hash_mismatch (expected, actual) -> - let%map () = - record_envelope_sender t.trust_system t.logger sender - ( Actions.Sent_bad_hash - , Some - ( "Claimed num_accounts $count, content root hash \ - $content_root_hash, that implies a root hash of \ - $actual, we expected $expected" - , [ ("count", `Int count) - ; ("content_root_hash", Hash.to_yojson content_root) - ; ("actual", Hash.to_yojson actual) - ; ("expected", Hash.to_yojson expected) ] ) ) - in - requeue_query () ) + match handle_num_accounts t count content_root with + | `Success -> + credit_fulfilled_request () + | `Hash_mismatch (expected, actual) -> + let%map () = + record_envelope_sender t.trust_system t.logger sender + ( Actions.Sent_bad_hash + , Some + ( "Claimed num_accounts $count, content root hash \ + $content_root_hash, that implies a root hash of \ + $actual, we expected $expected" + , [ ("count", `Int count) + ; ("content_root_hash", Hash.to_yojson content_root) + ; ("actual", Hash.to_yojson actual) + ; ("expected", Hash.to_yojson expected) + ] ) ) + in + requeue_query () ) | query, answer -> let%map () = record_envelope_sender t.trust_system t.logger sender @@ -636,7 +642,8 @@ end = struct , [ ("query", Query.to_yojson Addr.to_yojson query) ; ( "answer" , Answer.to_yojson Hash.to_yojson Account.to_yojson - answer ) ] ) ) + answer ) + ] ) ) in requeue_query () in @@ -664,9 +671,9 @@ end = struct [%log' debug t.logger] ~metadata: [ ("old_root_hash", Root_hash.to_yojson root_hash) - ; ("new_root_hash", Root_hash.to_yojson h) ] - "New_goal: changing target from $old_root_hash to $new_root_hash" - ) ; + ; ("new_root_hash", Root_hash.to_yojson h) + ] + "New_goal: changing target from $old_root_hash to $new_root_hash") ; Ivar.fill_if_empty t.validity_listener (`Target_changed (t.desired_root, h)) ; t.validity_listener <- Ivar.create () ; @@ -676,7 +683,7 @@ end = struct `New ) else if Option.fold t.auxiliary_data ~init:false ~f:(fun _ saved_data -> - equal data saved_data ) + equal data saved_data) then ( [%log' debug t.logger] "New_goal to same hash, not doing anything" ; `Repeat ) @@ -696,7 +703,7 @@ end = struct | `Ok -> Some t.tree | `Target_changed _ -> - None ) + None) let wait_until_valid t h = if not (Root_hash.equal h (desired_root_exn t)) then @@ -706,28 +713,29 @@ end = struct | `Target_changed payload -> `Target_changed payload | `Ok -> - `Ok t.tree ) + `Ok t.tree) let fetch t rh ~data ~equal = - ignore (new_goal t rh ~data ~equal : [`New | `Repeat | `Update_data]) ; + ignore (new_goal t rh ~data ~equal : [ `New | `Repeat | `Update_data ]) ; wait_until_valid t rh let create mt ~logger ~trust_system = let qr, qw = Linear_pipe.create () in let ar, aw = Linear_pipe.create () in let t = - { desired_root= None - ; auxiliary_data= None - ; tree= mt + { desired_root = None + ; auxiliary_data = None + ; tree = mt ; logger ; trust_system - ; answers= ar - ; answer_writer= aw - ; queries= qw - ; query_reader= qr - ; waiting_parents= Addr.Table.create () - ; waiting_content= Addr.Table.create () - ; validity_listener= Ivar.create () } + ; answers = ar + ; answer_writer = aw + ; queries = qw + ; query_reader = qr + ; waiting_parents = Addr.Table.create () + ; waiting_content = Addr.Table.create () + ; validity_listener = Ivar.create () + } in don't_wait_for (main_loop t) ; t diff --git a/src/lib/syncable_ledger/test.ml b/src/lib/syncable_ledger/test.ml index 6760b9aee2c..f897559be12 100644 --- a/src/lib/syncable_ledger/test.ml +++ b/src/lib/syncable_ledger/test.ml @@ -20,24 +20,24 @@ module type Input_intf = sig module Ledger : Ledger_intf - with type root_hash := Root_hash.t - and type account_id := Merkle_ledger_tests.Test_stubs.Account_id.t + with type root_hash := Root_hash.t + and type account_id := Merkle_ledger_tests.Test_stubs.Account_id.t module Sync_ledger : Syncable_ledger.S - with type merkle_tree := Ledger.t - and type hash := Ledger.hash - and type root_hash := Root_hash.t - and type addr := Ledger.addr - and type merkle_path := Ledger.path - and type account := Ledger.account - and type query := Ledger.addr Syncable_ledger.Query.t - and type answer := (Root_hash.t, Ledger.account) Syncable_ledger.Answer.t + with type merkle_tree := Ledger.t + and type hash := Ledger.hash + and type root_hash := Root_hash.t + and type addr := Ledger.addr + and type merkle_path := Ledger.path + and type account := Ledger.account + and type query := Ledger.addr Syncable_ledger.Query.t + and type answer := (Root_hash.t, Ledger.account) Syncable_ledger.Answer.t end module Make_test (Input : Input_intf) (Input' : sig - val num_accts : int + val num_accts : int end) = struct open Input @@ -85,12 +85,11 @@ struct ~percent:(Percent.of_percentage 20.)) else Deferred.unit in - Linear_pipe.write aw (root_hash, query, Envelope.Incoming.local answ) - )) ; + Linear_pipe.write aw (root_hash, query, Envelope.Incoming.local answ))) ; match Async.Thread_safe.block_on_async_exn (fun () -> Sync_ledger.fetch lsync desired_root ~data:() ~equal:(fun () () -> - true ) ) + true)) with | `Ok mt -> total_queries := Some (List.length !seen_queries) ; @@ -128,7 +127,7 @@ struct ignore ( Sync_ledger.new_goal lsync !desired_root ~data:() ~equal:(fun () () -> true) - : [`New | `Repeat | `Update_data] ) ; + : [ `New | `Repeat | `Update_data ] ) ; Deferred.unit ) else let%bind answ_opt = @@ -142,24 +141,24 @@ struct (!desired_root, query, Envelope.Incoming.local answ) in ctr := !ctr + 1 ; - res )) ; + res)) ; match Async.Thread_safe.block_on_async_exn (fun () -> Sync_ledger.fetch lsync !desired_root ~data:() ~equal:(fun () () -> - true ) ) + true)) with | `Ok _ -> failwith "shouldn't happen" | `Target_changed _ -> ( - match - Async.Thread_safe.block_on_async_exn (fun () -> - Sync_ledger.wait_until_valid lsync !desired_root ) - with - | `Ok mt -> - [%test_result: Root_hash.t] ~expect:(Ledger.merkle_root l3) - (Ledger.merkle_root mt) - | `Target_changed _ -> - failwith "the target changed again" ) + match + Async.Thread_safe.block_on_async_exn (fun () -> + Sync_ledger.wait_until_valid lsync !desired_root) + with + | `Ok mt -> + [%test_result: Root_hash.t] ~expect:(Ledger.merkle_root l3) + (Ledger.merkle_root mt) + | `Target_changed _ -> + failwith "the target changed again" ) end module Root_hash = struct @@ -233,7 +232,7 @@ module Db = struct let account = Account.create aid currency_balance in ignore ( get_or_create_account ledger aid account |> Or_error.ok_exn - : [`Added | `Existed] * Location.t ) ) ; + : [ `Added | `Existed ] * Location.t )) ; (ledger, account_ids) end @@ -339,7 +338,7 @@ module Mask = struct Maskable.get_or_create_account maskable account_id account |> Or_error.ok_exn in - assert ([%equal: [`Added | `Existed]] action `Added) ) ; + assert ([%equal: [ `Added | `Existed ]] action `Added)) ; let mask = Mask.create ~depth:Input.depth () in let attached_mask = Maskable.register_mask maskable mask in (* On the mask, all the children will have different values *) @@ -352,9 +351,7 @@ module Mask = struct Any_base.cast (module Mask.Attached) parent_mask in let child_mask = Mask.create ~depth:Input.depth () in - let attached_mask = - Maskable.register_mask parent_base child_mask - in + let attached_mask = Maskable.register_mask parent_base child_mask in List.iter account_ids ~f:(fun account_id -> let account = Account.create account_id @@ -369,9 +366,8 @@ module Mask = struct | `Existed -> Mask.Attached.set attached_mask location account | `Added -> - failwith "Expected to re-use an existing account" ) ; - construct_layered_masks (iter - 1) (child_balance / 2) - attached_mask + failwith "Expected to re-use an existing account") ; + construct_layered_masks (iter - 1) (child_balance / 2) attached_mask in ( construct_layered_masks Input.mask_layers initial_balance_multiplier attached_mask diff --git a/src/lib/test_genesis_ledger/test_genesis_ledger.ml b/src/lib/test_genesis_ledger/test_genesis_ledger.ml index 12a5bf88e48..3708a28ae87 100644 --- a/src/lib/test_genesis_ledger/test_genesis_ledger.ml +++ b/src/lib/test_genesis_ledger/test_genesis_ledger.ml @@ -1,11 +1,8 @@ -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%if -defined genesis_ledger] +[%%if defined genesis_ledger] -[%%inject -"genesis_ledger", genesis_ledger] +[%%inject "genesis_ledger", genesis_ledger] include Genesis_ledger.Make (struct include (val Genesis_ledger.fetch_ledger_exn genesis_ledger) @@ -17,7 +14,6 @@ end) [%%else] -[%%optcomp.error -"\"genesis_ledger\" not set in config.mlh"] +[%%optcomp.error "\"genesis_ledger\" not set in config.mlh"] [%%endif] diff --git a/src/lib/test_util/test_util.ml b/src/lib/test_util/test_util.ml index a1ed88267ab..1fb1fce9bf9 100644 --- a/src/lib/test_util/test_util.ml +++ b/src/lib/test_util/test_util.ml @@ -6,7 +6,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct let to_string b = if b then "1" else "0" in String.concat ~sep:" " (List.map trips ~f:(fun (b1, b2, b3) -> - to_string b1 ^ to_string b2 ^ to_string b3 )) + to_string b1 ^ to_string b2 ^ to_string b3)) let checked_to_unchecked typ1 typ2 checked input = let open Impl in @@ -47,7 +47,7 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) = struct let arbitrary_string ~len = String.init (Random.int len) ~f:(fun _ -> - Char.of_int_exn (Random.int_incl 0 255) ) + Char.of_int_exn (Random.int_incl 0 255)) let with_randomness r f = let s = Caml.Random.get_state () in diff --git a/src/lib/time_simulator/time_simulator.ml b/src/lib/time_simulator/time_simulator.ml index 5b129e37782..59becce152b 100644 --- a/src/lib/time_simulator/time_simulator.ml +++ b/src/lib/time_simulator/time_simulator.ml @@ -14,9 +14,9 @@ let modulus = Int64.( % ) let ( < ) = Int64.( < ) module Action = struct - type nonrec t = {at: t; perform: t Ivar.t; afterwards: unit Deferred.t} + type nonrec t = { at : t; perform : t Ivar.t; afterwards : unit Deferred.t } - let compare {at; _} {at= at'; _} = compare at at' + let compare { at; _ } { at = at'; _ } = compare at at' end (* Seconds in floating point *) @@ -30,14 +30,16 @@ end module Controller = struct type nonrec t = - { mutable last_time: t - ; mutable last_snapshot: Time.t - ; actions: Action.t Pairing_heap.t } + { mutable last_time : t + ; mutable last_snapshot : Time.t + ; actions : Action.t Pairing_heap.t + } let create () = - { last_time= Int64.zero - ; last_snapshot= Time.now () - ; actions= Pairing_heap.create ~cmp:Action.compare () } + { last_time = Int64.zero + ; last_snapshot = Time.now () + ; actions = Pairing_heap.create ~cmp:Action.compare () + } let fast_forward t time = if time < t.last_time then () @@ -73,7 +75,7 @@ module Controller = struct *) let tick t = let exec t = - let {Action.perform; at; afterwards} = Pairing_heap.pop_exn t.actions in + let { Action.perform; at; afterwards } = Pairing_heap.pop_exn t.actions in fast_forward t at ; Ivar.fill_if_empty perform t.last_time ; afterwards @@ -83,7 +85,7 @@ module Controller = struct | Some _, `First -> let%bind () = exec t in go `No - | Some {Action.at; _}, `No when at < t.last_time -> + | Some { Action.at; _ }, `No when at < t.last_time -> let%bind () = exec t in go `No | Some _, `No -> @@ -98,25 +100,26 @@ let now = Controller.now module Timeout = struct type 'a t = - {d: 'a Deferred.t; elt: Action.t Pairing_heap.Elt.t; cancel: 'a Ivar.t} + { d : 'a Deferred.t; elt : Action.t Pairing_heap.Elt.t; cancel : 'a Ivar.t } let create (ctrl : Controller.t) span ~f = let ivar = Ivar.create () in let cancel = Ivar.create () in - let d = Deferred.any [Ivar.read ivar >>| f; Ivar.read cancel] in + let d = Deferred.any [ Ivar.read ivar >>| f; Ivar.read cancel ] in let elt = Pairing_heap.add_removable ctrl.actions - { Action.at= add (now ctrl) span - ; perform= ivar - ; afterwards= d >>| ignore } + { Action.at = add (now ctrl) span + ; perform = ivar + ; afterwards = d >>| ignore + } in - {d; elt; cancel} + { d; elt; cancel } - let to_deferred {d; _} = d + let to_deferred { d; _ } = d - let peek {d; _} = Deferred.peek d + let peek { d; _ } = Deferred.peek d - let cancel (ctrl : Controller.t) {elt; cancel; _} a = + let cancel (ctrl : Controller.t) { elt; cancel; _ } a = Pairing_heap.remove ctrl.actions elt ; Ivar.fill_if_empty cancel a end @@ -136,9 +139,9 @@ let%test_unit "tick triggers timeouts and fast-forwards to event time" = ~expect:false !fired ; let%map () = Controller.tick ctrl in [%test_result: Bool.t] ~message:"We ticked" ~expect:true !fired ; - [%test_result: Bool.t] - ~message:"Time fast-forwads to at least event time" ~expect:true - Int64.(diff (now ctrl) start >= Int64.of_int 5000) ) + [%test_result: Bool.t] ~message:"Time fast-forwads to at least event time" + ~expect:true + Int64.(diff (now ctrl) start >= Int64.of_int 5000)) let%test_unit "tick triggers timeouts and adjusts to system time" = let ctrl = Controller.create () in @@ -161,7 +164,7 @@ let%test_unit "tick triggers timeouts and adjusts to system time" = "Since 10ms of real time passed, we need to jump more than the 5ms \ of the event" ~expect:true - (Int64.( >= ) (diff (now ctrl) start) (Int64.of_int 5)) ) + (Int64.( >= ) (diff (now ctrl) start) (Int64.of_int 5))) let%test_unit "tick handles multiple timeouts if necessary" = let ctrl = Controller.create () in @@ -174,7 +177,7 @@ let%test_unit "tick handles multiple timeouts if necessary" = ~f:(fun _t -> count := !count + 1) : _ Timeout.t ) in - List.iter [2; 3; 5; 500] ~f:timeout ; + List.iter [ 2; 3; 5; 500 ] ~f:timeout ; Async.Thread_safe.block_on_async_exn (fun () -> let%bind () = Async.after (Time.Span.of_ms 7.) in [%test_result: Int.t] @@ -189,7 +192,7 @@ let%test_unit "tick handles multiple timeouts if necessary" = "Since 10ms of real time passed, we need to jump more than the 5ms \ of the event" ~expect:true - Int64.(diff (now ctrl) start >= Int64.of_int 7) ) + Int64.(diff (now ctrl) start >= Int64.of_int 7)) let%test_unit "cancelling a timeout means it won't fire" = let ctrl = Controller.create () in @@ -199,7 +202,7 @@ let%test_unit "cancelling a timeout means it won't fire" = (Span.of_ms (Int64.of_int x)) ~f:(fun _t -> message := !message ^ s) in - let tokens = List.map [(2, "a"); (3, "b"); (5, "c")] ~f:timeout in + let tokens = List.map [ (2, "a"); (3, "b"); (5, "c") ] ~f:timeout in (* Cancel "b" *) Timeout.cancel ctrl (List.nth_exn tokens 1) () ; Async.Thread_safe.block_on_async_exn (fun () -> @@ -207,4 +210,4 @@ let%test_unit "cancelling a timeout means it won't fire" = let%map () = Controller.tick ctrl in [%test_result: String.t] ~message:"We only triggered the events that we didn't cancel" - ~expect:"ac" !message ) + ~expect:"ac" !message) diff --git a/src/lib/timeout_lib/timeout_lib.ml b/src/lib/timeout_lib/timeout_lib.ml index e366ef2a560..06cddce1a27 100644 --- a/src/lib/timeout_lib/timeout_lib.ml +++ b/src/lib/timeout_lib/timeout_lib.ml @@ -39,7 +39,7 @@ module Timeout_intf (Time : Time_intf) = struct timeout_duration:Time.Span.t -> Time.Controller.t -> 'a Deferred.t - -> [`Ok of 'a | `Timeout] Deferred.t + -> [ `Ok of 'a | `Timeout ] Deferred.t val await_exn : timeout_duration:Time.Span.t @@ -51,30 +51,31 @@ end module Make (Time : Time_intf) : Timeout_intf(Time).S = struct type 'a t = - { deferred: 'a Deferred.t - ; cancel: 'a -> unit - ; start_time: Time.t - ; span: Time.Span.t - ; ctrl: Time.Controller.t } + { deferred : 'a Deferred.t + ; cancel : 'a -> unit + ; start_time : Time.t + ; span : Time.Span.t + ; ctrl : Time.Controller.t + } let create ctrl span ~f:action = let open Deferred.Let_syntax in let cancel_ivar = Ivar.create () in let timeout = after (Time.Span.to_time_ns_span span) >>| fun () -> None in let deferred = - Deferred.any [Ivar.read cancel_ivar; timeout] + Deferred.any [ Ivar.read cancel_ivar; timeout ] >>| function None -> action (Time.now ctrl) | Some x -> x in let cancel value = Ivar.fill_if_empty cancel_ivar (Some value) in - {ctrl; deferred; cancel; start_time= Time.now ctrl; span} + { ctrl; deferred; cancel; start_time = Time.now ctrl; span } - let to_deferred {deferred; _} = deferred + let to_deferred { deferred; _ } = deferred - let peek {deferred; _} = Deferred.peek deferred + let peek { deferred; _ } = Deferred.peek deferred - let cancel _ {cancel; _} value = cancel value + let cancel _ { cancel; _ } value = cancel value - let remaining_time {ctrl: _; start_time; span; _} = + let remaining_time { ctrl : _; start_time; span; _ } = let current_time = Time.now ctrl in let time_elapsed = Time.diff current_time start_time in Time.Span.(span - time_elapsed) @@ -86,12 +87,12 @@ module Make (Time : Time_intf) : Timeout_intf(Time).S = struct ( create time_controller timeout_duration ~f:(fun x -> if Ivar.is_full ivar then [%log' error (Logger.create ())] "Ivar.fill bug is here!" ; - Ivar.fill_if_empty ivar x ) - : unit t ) ) + Ivar.fill_if_empty ivar x) + : unit t )) in Deferred.( choose - [choice deferred (fun x -> `Ok x); choice timeout (Fn.const `Timeout)]) + [ choice deferred (fun x -> `Ok x); choice timeout (Fn.const `Timeout) ]) let await_exn ~timeout_duration time_controller deferred = match%map await ~timeout_duration time_controller deferred with @@ -105,8 +106,8 @@ module Core_time = Make (struct include ( Core.Time : module type of Core.Time - with module Span := Core.Time.Span - and type underlying = float ) + with module Span := Core.Time.Span + and type underlying = float ) module Controller = struct type t = unit diff --git a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml index d1fc7017b70..78c37c2be32 100644 --- a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml +++ b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml @@ -30,26 +30,25 @@ let get_status ~frontier_broadcast_pipe ~transaction_pool cmd = ~error:(Error.of_string "Invalid signature") |> Result.map ~f:(fun x -> Transaction_hash.User_command_with_valid_signature.create - (Signed_command x) ) + (Signed_command x)) in let resource_pool = Transaction_pool.resource_pool transaction_pool in match Broadcast_pipe.Reader.peek frontier_broadcast_pipe with | None -> State.Unknown | Some transition_frontier -> - with_return (fun {return} -> + with_return (fun { return } -> let best_tip_path = Transition_frontier.best_tip_path transition_frontier in let in_breadcrumb breadcrumb = List.exists (Transition_frontier.Breadcrumb.commands breadcrumb) - ~f:(fun {data= cmd'; _} -> + ~f:(fun { data = cmd'; _ } -> match cmd' with | Snapp_command _ -> false | Signed_command cmd' -> - Signed_command.equal cmd (Signed_command.forget_check cmd') - ) + Signed_command.equal cmd (Signed_command.forget_check cmd')) in if List.exists ~f:in_breadcrumb best_tip_path then return State.Included ; @@ -59,7 +58,7 @@ let get_status ~frontier_broadcast_pipe ~transaction_pool cmd = then return State.Pending ; if Transaction_pool.Resource_pool.member resource_pool check_cmd then return State.Pending ; - State.Unknown ) + State.Unknown) let%test_module "transaction_status" = ( module struct @@ -90,7 +89,7 @@ let%test_module "transaction_status" = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) let key_gen = let open Quickcheck.Generator in @@ -137,8 +136,9 @@ let%test_module "transaction_status" = ~metadata: [ ( "transactions" , Transaction_pool.Resource_pool.Diff.to_yojson transactions - ) ] ; - Deferred.unit ) ; + ) + ] ; + Deferred.unit) ; (* Need to wait for transaction_pool to see the transition_frontier *) let%map () = Async.Scheduler.yield_until_no_jobs_remain () in (transaction_pool, local_writer) @@ -154,14 +154,14 @@ let%test_module "transaction_status" = in let%bind () = Strict_pipe.Writer.write local_diffs_writer - ([Signed_command user_command], Fn.const ()) + ([ Signed_command user_command ], Fn.const ()) in let%map () = Async.Scheduler.yield_until_no_jobs_remain () in [%log info] "Checking status" ; [%test_eq: State.t] ~equal:State.equal State.Unknown ( Or_error.ok_exn @@ get_status ~frontier_broadcast_pipe ~transaction_pool - user_command ) ) ) + user_command ))) let%test_unit "A pending transaction is either in the transition frontier \ or transaction pool, but not in the best path of the \ @@ -178,7 +178,7 @@ let%test_module "transaction_status" = in let%bind () = Strict_pipe.Writer.write local_diffs_writer - ([Signed_command user_command], Fn.const ()) + ([ Signed_command user_command ], Fn.const ()) in let%map () = Async.Scheduler.yield_until_no_jobs_remain () in let status = @@ -187,7 +187,7 @@ let%test_module "transaction_status" = user_command in [%log info] "Computing status" ; - [%test_eq: State.t] ~equal:State.equal State.Pending status ) ) + [%test_eq: State.t] ~equal:State.equal State.Pending status)) let%test_unit "An unknown transaction does not appear in the transition \ frontier or transaction pool " = @@ -216,7 +216,7 @@ let%test_module "transaction_status" = let%bind () = Strict_pipe.Writer.write local_diffs_writer ( List.map pool_user_commands ~f:(fun x -> - User_command.Signed_command x ) + User_command.Signed_command x) , Fn.const () ) in let%map () = Async.Scheduler.yield_until_no_jobs_remain () in @@ -224,5 +224,5 @@ let%test_module "transaction_status" = [%test_eq: State.t] ~equal:State.equal State.Unknown ( Or_error.ok_exn @@ get_status ~frontier_broadcast_pipe ~transaction_pool - unknown_user_command ) ) ) + unknown_user_command ))) end ) diff --git a/src/lib/transaction_inclusion_status/transaction_inclusion_status.mli b/src/lib/transaction_inclusion_status/transaction_inclusion_status.mli index 87640b2640c..21fe8ddb4bd 100644 --- a/src/lib/transaction_inclusion_status/transaction_inclusion_status.mli +++ b/src/lib/transaction_inclusion_status/transaction_inclusion_status.mli @@ -19,8 +19,8 @@ module State : sig end val get_status : - frontier_broadcast_pipe:Transition_frontier.t Option.t - Broadcast_pipe.Reader.t + frontier_broadcast_pipe: + Transition_frontier.t Option.t Broadcast_pipe.Reader.t -> transaction_pool:Network_pool.Transaction_pool.t -> Signed_command.t -> State.t Or_error.t diff --git a/src/lib/transaction_protocol_state/transaction_protocol_state.ml b/src/lib/transaction_protocol_state/transaction_protocol_state.ml index 37e5d9dfc03..59786c2b5b6 100644 --- a/src/lib/transaction_protocol_state/transaction_protocol_state.ml +++ b/src/lib/transaction_protocol_state/transaction_protocol_state.ml @@ -20,16 +20,16 @@ module Poly = struct [%%versioned module Stable = struct module V1 = struct - type 'a t = {transaction: 'a; block_data: Block_data.Stable.V1.t} + type 'a t = { transaction : 'a; block_data : Block_data.Stable.V1.t } [@@deriving sexp] - let to_latest a_latest {transaction; block_data} = - {transaction= a_latest transaction; block_data} + let to_latest a_latest { transaction; block_data } = + { transaction = a_latest transaction; block_data } - let of_latest a_latest {transaction; block_data} = + let of_latest a_latest { transaction; block_data } = let open Result.Let_syntax in let%map transaction = a_latest transaction in - {transaction; block_data} + { transaction; block_data } end end] end diff --git a/src/lib/transaction_protocol_state/transaction_protocol_state.mli b/src/lib/transaction_protocol_state/transaction_protocol_state.mli index 21a000ef522..6770a7a4a90 100644 --- a/src/lib/transaction_protocol_state/transaction_protocol_state.mli +++ b/src/lib/transaction_protocol_state/transaction_protocol_state.mli @@ -24,7 +24,7 @@ module Poly : sig [%%versioned: module Stable : sig module V1 : sig - type 'a t = {transaction: 'a; block_data: Block_data.Stable.V1.t} + type 'a t = { transaction : 'a; block_data : Block_data.Stable.V1.t } [@@deriving sexp] end end] @@ -37,8 +37,7 @@ module Stable : sig val to_latest : ('a -> 'b) -> 'a t -> 'b t - val of_latest : - ('a -> ('b, 'err) Result.t) -> 'a t -> ('b t, 'err) Result.t + val of_latest : ('a -> ('b, 'err) Result.t) -> 'a t -> ('b t, 'err) Result.t end end] diff --git a/src/lib/transaction_snark/transaction_snark.ml b/src/lib/transaction_snark/transaction_snark.ml index 7a797e4442c..e85a45a22e5 100644 --- a/src/lib/transaction_snark/transaction_snark.ml +++ b/src/lib/transaction_snark/transaction_snark.ml @@ -35,7 +35,8 @@ module Proof_type = struct [%%versioned module Stable = struct module V1 = struct - type t = [`Base | `Merge] [@@deriving compare, equal, hash, sexp, yojson] + type t = [ `Base | `Merge ] + [@@deriving compare, equal, hash, sexp, yojson] let to_latest = Fn.id end @@ -60,23 +61,23 @@ module Pending_coinbase_stack_state = struct module Stable = struct module V1 = struct type 'pending_coinbase t = - {source: 'pending_coinbase; target: 'pending_coinbase} + { source : 'pending_coinbase; target : 'pending_coinbase } [@@deriving sexp, hash, compare, equal, fields, yojson, hlist] - let to_latest pending_coinbase {source; target} = - {source= pending_coinbase source; target= pending_coinbase target} + let to_latest pending_coinbase { source; target } = + { source = pending_coinbase source; target = pending_coinbase target } end end] let typ pending_coinbase = Tick.Typ.of_hlistable - [pending_coinbase; pending_coinbase] + [ pending_coinbase; pending_coinbase ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end type 'pending_coinbase poly = 'pending_coinbase Poly.t = - {source: 'pending_coinbase; target: 'pending_coinbase} + { source : 'pending_coinbase; target : 'pending_coinbase } [@@deriving sexp, hash, compare, equal, fields, yojson] (* State of the coinbase stack for the current transaction snark *) @@ -94,12 +95,12 @@ module Pending_coinbase_stack_state = struct let typ = Poly.typ Pending_coinbase.Stack.typ - let to_input ({source; target} : t) = + let to_input ({ source; target } : t) = Random_oracle.Input.append (Pending_coinbase.Stack.to_input source) (Pending_coinbase.Stack.to_input target) - let var_to_input ({source; target} : var) = + let var_to_input ({ source; target } : var) = Random_oracle.Input.append (Pending_coinbase.Stack.var_to_input source) (Pending_coinbase.Stack.var_to_input target) @@ -120,14 +121,15 @@ module Statement = struct , 'token_id , 'sok_digest ) t = - { source: 'ledger_hash - ; target: 'ledger_hash - ; supply_increase: 'amount - ; pending_coinbase_stack_state: 'pending_coinbase - ; fee_excess: 'fee_excess - ; next_available_token_before: 'token_id - ; next_available_token_after: 'token_id - ; sok_digest: 'sok_digest } + { source : 'ledger_hash + ; target : 'ledger_hash + ; supply_increase : 'amount + ; pending_coinbase_stack_state : 'pending_coinbase + ; fee_excess : 'fee_excess + ; next_available_token_before : 'token_id + ; next_available_token_after : 'token_id + ; sok_digest : 'sok_digest + } [@@deriving compare, equal, hash, sexp, yojson, hlist] let to_latest ledger_hash amount pending_coinbase fee_excess' token_id @@ -139,21 +141,22 @@ module Statement = struct ; fee_excess ; next_available_token_before ; next_available_token_after - ; sok_digest } = - { source= ledger_hash source - ; target= ledger_hash target - ; supply_increase= amount supply_increase - ; pending_coinbase_stack_state= + ; sok_digest + } = + { source = ledger_hash source + ; target = ledger_hash target + ; supply_increase = amount supply_increase + ; pending_coinbase_stack_state = pending_coinbase pending_coinbase_stack_state - ; fee_excess= fee_excess' fee_excess - ; next_available_token_before= token_id next_available_token_before - ; next_available_token_after= token_id next_available_token_after - ; sok_digest= sok_digest' sok_digest } + ; fee_excess = fee_excess' fee_excess + ; next_available_token_before = token_id next_available_token_before + ; next_available_token_after = token_id next_available_token_after + ; sok_digest = sok_digest' sok_digest + } end end] - let typ ledger_hash amount pending_coinbase fee_excess token_id sok_digest - = + let typ ledger_hash amount pending_coinbase fee_excess token_id sok_digest = Tick.Typ.of_hlistable [ ledger_hash ; ledger_hash @@ -162,7 +165,8 @@ module Statement = struct ; fee_excess ; token_id ; token_id - ; sok_digest ] + ; sok_digest + ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist end @@ -181,14 +185,15 @@ module Statement = struct , 'token_id , 'sok_digest ) Poly.t = - { source: 'ledger_hash - ; target: 'ledger_hash - ; supply_increase: 'amount - ; pending_coinbase_stack_state: 'pending_coinbase - ; fee_excess: 'fee_excess - ; next_available_token_before: 'token_id - ; next_available_token_after: 'token_id - ; sok_digest: 'sok_digest } + { source : 'ledger_hash + ; target : 'ledger_hash + ; supply_increase : 'amount + ; pending_coinbase_stack_state : 'pending_coinbase + ; fee_excess : 'fee_excess + ; next_available_token_before : 'token_id + ; next_available_token_after : 'token_id + ; sok_digest : 'sok_digest + } [@@deriving compare, equal, hash, sexp, yojson] [%%versioned @@ -248,7 +253,8 @@ module Statement = struct ; fee_excess ; next_available_token_before ; next_available_token_after - ; sok_digest } = + ; sok_digest + } = let input = Array.reduce_exn ~f:Random_oracle.Input.append [| Sok_message.Digest.to_input sok_digest @@ -258,7 +264,8 @@ module Statement = struct ; Amount.to_input supply_increase ; Fee_excess.to_input fee_excess ; Token_id.to_input next_available_token_before - ; Token_id.to_input next_available_token_after |] + ; Token_id.to_input next_available_token_after + |] in if !top_hash_logging_enabled then Format.eprintf @@ -280,7 +287,8 @@ module Statement = struct ; fee_excess ; next_available_token_before ; next_available_token_after - ; sok_digest } = + ; sok_digest + } = let open Tick in let open Checked.Let_syntax in let%bind fee_excess = Fee_excess.to_input_checked fee_excess in @@ -300,7 +308,8 @@ module Statement = struct ; Amount.var_to_input supply_increase ; fee_excess ; next_available_token_before - ; next_available_token_after |] + ; next_available_token_after + |] in let%map () = as_prover @@ -317,11 +326,12 @@ module Statement = struct (Array.to_list input.bitstrings) in Format.eprintf - !"Generating checked top hash from:@.%{sexp: (Field.t, \ - bool) Random_oracle.Input.t}@." - { Random_oracle.Input.field_elements= + !"Generating checked top hash from:@.%{sexp: (Field.t, bool) \ + Random_oracle.Input.t}@." + { Random_oracle.Input.field_elements = Array.of_list field_elements - ; bitstrings= Array.of_list bitstrings } + ; bitstrings = Array.of_list bitstrings + } else return ()) in input @@ -360,16 +370,18 @@ module Statement = struct Frozen_ledger_hash.t})" s1.target s2.source in - ( { source= s1.source - ; target= s2.target + ( { source = s1.source + ; target = s2.target ; fee_excess - ; next_available_token_before= s1.next_available_token_before - ; next_available_token_after= s2.next_available_token_after + ; next_available_token_before = s1.next_available_token_before + ; next_available_token_after = s2.next_available_token_after ; supply_increase - ; pending_coinbase_stack_state= - { source= s1.pending_coinbase_stack_state.source - ; target= s2.pending_coinbase_stack_state.target } - ; sok_digest= () } + ; pending_coinbase_stack_state = + { source = s1.pending_coinbase_stack_state.source + ; target = s2.pending_coinbase_stack_state.target + } + ; sok_digest = () + } : t ) include Hashable.Make_binable (Stable.Latest) @@ -394,9 +406,10 @@ module Statement = struct ; next_available_token_before ; next_available_token_after ; supply_increase - ; pending_coinbase_stack_state= - {source= pending_coinbase_before; target= pending_coinbase_after} - ; sok_digest= () } + ; pending_coinbase_stack_state = + { source = pending_coinbase_before; target = pending_coinbase_after } + ; sok_digest = () + } : t ) end @@ -406,7 +419,7 @@ module Proof = struct module V1 = struct type t = Pickles.Proof.Branching_2.Stable.V1.t [@@deriving - version {asserted}, yojson, bin_io, compare, equal, sexp, hash] + version { asserted }, yojson, bin_io, compare, equal, sexp, hash] let to_latest = Fn.id end @@ -417,7 +430,7 @@ end module Stable = struct module V1 = struct type t = - {statement: Statement.With_sok.Stable.V1.t; proof: Proof.Stable.V1.t} + { statement : Statement.With_sok.Stable.V1.t; proof : Proof.Stable.V1.t } [@@deriving compare, equal, fields, sexp, version, yojson, hash] let to_latest = Fn.id @@ -426,7 +439,7 @@ end] let proof t = t.proof -let statement t = {t.statement with sok_digest= ()} +let statement t = { t.statement with sok_digest = () } let sok_digest t = t.statement.sok_digest @@ -435,7 +448,7 @@ let to_yojson = Stable.Latest.to_yojson let create ~source ~target ~supply_increase ~pending_coinbase_stack_state ~fee_excess ~next_available_token_before ~next_available_token_after ~sok_digest ~proof = - { statement= + { statement = { source ; target ; next_available_token_before @@ -443,8 +456,10 @@ let create ~source ~target ~supply_increase ~pending_coinbase_stack_state ; supply_increase ; pending_coinbase_stack_state ; fee_excess - ; sok_digest } - ; proof } + ; sok_digest + } + ; proof + } open Tick open Let_syntax @@ -460,17 +475,18 @@ module Base = struct to the fee-payer if executing the user command will later fail. *) type 'bool t = - { predicate_failed: 'bool (* All *) - ; source_not_present: 'bool (* All *) - ; receiver_not_present: 'bool (* Delegate, Mint_tokens *) - ; amount_insufficient_to_create: 'bool (* Payment only *) - ; token_cannot_create: 'bool (* Payment only, token<>default *) - ; source_insufficient_balance: 'bool (* Payment only *) - ; source_minimum_balance_violation: 'bool (* Payment only *) - ; source_bad_timing: 'bool (* Payment only *) - ; receiver_exists: 'bool (* Create_account only *) - ; not_token_owner: 'bool (* Create_account, Mint_tokens *) - ; token_auth: 'bool (* Create_account *) } + { predicate_failed : 'bool (* All *) + ; source_not_present : 'bool (* All *) + ; receiver_not_present : 'bool (* Delegate, Mint_tokens *) + ; amount_insufficient_to_create : 'bool (* Payment only *) + ; token_cannot_create : 'bool (* Payment only, token<>default *) + ; source_insufficient_balance : 'bool (* Payment only *) + ; source_minimum_balance_violation : 'bool (* Payment only *) + ; source_bad_timing : 'bool (* Payment only *) + ; receiver_exists : 'bool (* Create_account only *) + ; not_token_owner : 'bool (* Create_account, Mint_tokens *) + ; token_auth : 'bool (* Create_account *) + } let num_fields = 11 @@ -485,7 +501,8 @@ module Base = struct ; source_bad_timing ; receiver_exists ; not_token_owner - ; token_auth } = + ; token_auth + } = [ predicate_failed ; source_not_present ; receiver_not_present @@ -496,7 +513,8 @@ module Base = struct ; source_bad_timing ; receiver_exists ; not_token_owner - ; token_auth ] + ; token_auth + ] let of_list = function | [ predicate_failed @@ -509,7 +527,8 @@ module Base = struct ; source_bad_timing ; receiver_exists ; not_token_owner - ; token_auth ] -> + ; token_auth + ] -> { predicate_failed ; source_not_present ; receiver_not_present @@ -520,7 +539,8 @@ module Base = struct ; source_bad_timing ; receiver_exists ; not_token_owner - ; token_auth } + ; token_auth + } | _ -> failwith "Transaction_snark.Base.User_command_failure.to_list: bad length" @@ -542,7 +562,7 @@ module Base = struct ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~txn_global_slot ~creating_new_token ~(fee_payer_account : Account.t) ~(receiver_account : Account.t) ~(source_account : Account.t) - ({payload; signature= _; signer= _} : Transaction_union.t) = + ({ payload; signature = _; signer = _ } : Transaction_union.t) = match payload.body.tag with | Fee_transfer | Coinbase -> (* Not user commands, return no failure. *) @@ -564,10 +584,11 @@ module Base = struct (* This should shadow the logic in [Sparse_ledger]. *) let fee_payer_account = { fee_payer_account with - balance= + balance = Option.value_exn ?here:None ?error:None ?message:None @@ Balance.sub_amount fee_payer_account.balance - (Amount.of_fee payload.common.fee) } + (Amount.of_fee payload.common.fee) + } in let predicate_failed, predicate_result = if @@ -627,14 +648,15 @@ module Base = struct { predicate_failed ; source_not_present ; receiver_not_present - ; amount_insufficient_to_create= false - ; token_cannot_create= false - ; source_insufficient_balance= false - ; source_minimum_balance_violation= false - ; source_bad_timing= false - ; receiver_exists= false - ; not_token_owner= false - ; token_auth= false } + ; amount_insufficient_to_create = false + ; token_cannot_create = false + ; source_insufficient_balance = false + ; source_minimum_balance_violation = false + ; source_bad_timing = false + ; receiver_exists = false + ; not_token_owner = false + ; token_auth = false + } | Payment -> let receiver_account = if Account_id.equal receiver fee_payer then fee_payer_account @@ -710,15 +732,16 @@ module Base = struct in { predicate_failed ; source_not_present - ; receiver_not_present= false + ; receiver_not_present = false ; amount_insufficient_to_create ; token_cannot_create ; source_insufficient_balance ; source_minimum_balance_violation ; source_bad_timing - ; receiver_exists= false - ; not_token_owner= false - ; token_auth= false } + ; receiver_exists = false + ; not_token_owner = false + ; token_auth = false + } | Create_account -> let receiver_account = if Account_id.equal receiver fee_payer then fee_payer_account @@ -732,16 +755,17 @@ module Base = struct in let receiver_account = { receiver_account with - public_key= Account_id.public_key receiver - ; token_id= Account_id.token_id receiver - ; token_permissions= + public_key = Account_id.public_key receiver + ; token_id = Account_id.token_id receiver + ; token_permissions = ( if receiver_exists then receiver_account.token_permissions else if creating_new_token then Token_permissions.Token_owned - {disable_new_accounts= payload.body.token_locked} + { disable_new_accounts = payload.body.token_locked } else Token_permissions.Not_owned - {account_disabled= payload.body.token_locked} ) } + { account_disabled = payload.body.token_locked } ) + } in let source_account = if Account_id.equal source fee_payer then fee_payer_account @@ -759,13 +783,13 @@ module Base = struct (false, false) else match source_account.token_permissions with - | Token_owned {disable_new_accounts} -> + | Token_owned { disable_new_accounts } -> ( not ( Bool.equal payload.body.token_locked disable_new_accounts || predicate_result ) , false ) - | Not_owned {account_disabled} -> + | Not_owned { account_disabled } -> (* NOTE: This [token_auth] value doesn't matter, since we know that there will be a [not_token_owner] failure anyway. We choose this value, since it aliases to the @@ -773,23 +797,23 @@ module Base = struct and so simplifies the snark code. *) ( not - ( Bool.equal payload.body.token_locked - account_disabled + ( Bool.equal payload.body.token_locked account_disabled || predicate_result ) , true ) in let ret = - { predicate_failed= false + { predicate_failed = false ; source_not_present - ; receiver_not_present= false - ; amount_insufficient_to_create= false - ; token_cannot_create= false - ; source_insufficient_balance= false - ; source_minimum_balance_violation= false - ; source_bad_timing= false + ; receiver_not_present = false + ; amount_insufficient_to_create = false + ; token_cannot_create = false + ; source_insufficient_balance = false + ; source_minimum_balance_violation = false + ; source_bad_timing = false ; receiver_exists ; not_token_owner - ; token_auth } + ; token_auth + } in (* Note: This logic is dependent upon all failures above, so we have to calculate it separately here. *) @@ -807,10 +831,10 @@ module Base = struct the source (=receiver) account will not be present. *) { ret with - source_not_present= true - ; not_token_owner= + source_not_present = true + ; not_token_owner = not Token_id.(equal default (Account_id.token_id receiver)) - ; token_auth= + ; token_auth = not ((not payload.body.token_locked) || predicate_result) } else ret @@ -841,14 +865,15 @@ module Base = struct { predicate_failed ; source_not_present ; receiver_not_present - ; amount_insufficient_to_create= false - ; token_cannot_create= false - ; source_insufficient_balance= false - ; source_minimum_balance_violation= false - ; source_bad_timing= false - ; receiver_exists= false + ; amount_insufficient_to_create = false + ; token_cannot_create = false + ; source_insufficient_balance = false + ; source_minimum_balance_violation = false + ; source_bad_timing = false + ; receiver_exists = false ; not_token_owner - ; token_auth= false } ) + ; token_auth = false + } ) let%snarkydef compute_as_prover ~constraint_constants ~txn_global_slot ~creating_new_token ~next_available_token (txn : Transaction_union.var) @@ -868,9 +893,7 @@ module Base = struct let fee_payer = Account_id.create txn.payload.common.fee_payer_pk fee_token in - let source = - Account_id.create txn.payload.body.source_pk token - in + let source = Account_id.create txn.payload.body.source_pk token in let receiver = Account_id.create txn.payload.body.receiver_pk token in @@ -941,9 +964,7 @@ module Base = struct let%bind receiver_account, _path = read (Typ.Internal.ref ()) receiver_account in - let%bind creating_new_token = - read Boolean.typ creating_new_token - in + let%bind creating_new_token = read Boolean.typ creating_new_token in let%map txn_global_slot = read Global_slot.typ txn_global_slot in compute_unchecked ~constraint_constants ~txn_global_slot ~creating_new_token ~fee_payer_account ~source_account @@ -959,11 +980,11 @@ module Base = struct let g = exists Inner_curve.typ ~compute:(fun _ -> Inner_curve.one) in ignore ( Pickles.Step_main_inputs.Ops.scale_fast g - (`Plus_two_to_len [|b; b|]) + (`Plus_two_to_len [| b; b |]) : Pickles.Step_main_inputs.Inner_curve.t ) ; ignore ( Pickles.Pairing_main.Scalar_challenge.endo g - (Scalar_challenge [b]) + (Scalar_challenge [ b ]) : Field.t * Field.t )) let%snarkydef check_signature shifted ~payload ~is_user_command ~signer @@ -972,7 +993,7 @@ module Base = struct let%bind verifies = Schnorr.Checked.verifies shifted signature signer input in - Boolean.Assert.any [Boolean.not is_user_command; verifies] + Boolean.Assert.any [ Boolean.not is_user_command; verifies ] let check_timing ~balance_check ~timed_balance_check ~account ~txn_amount ~txn_global_slot = @@ -984,7 +1005,8 @@ module Base = struct ; cliff_time ; cliff_amount ; vesting_period - ; vesting_increment } = + ; vesting_increment + } = account.timing in let int_of_field field = @@ -1007,22 +1029,21 @@ module Base = struct let%bind `Underflow underflow, proposed_balance_int = make_checked (fun () -> Snarky_integer.Integer.subtract_unpacking_or_zero ~m balance_int - txn_amount_int ) + txn_amount_int) in (* underflow indicates insufficient balance *) let%bind () = balance_check (Boolean.not underflow) in let%bind sufficient_timed_balance = make_checked (fun () -> - Snarky_integer.Integer.(gte ~m proposed_balance_int curr_min_balance) - ) + Snarky_integer.Integer.(gte ~m proposed_balance_int curr_min_balance)) in let%bind () = - let%bind ok = Boolean.(any [not is_timed; sufficient_timed_balance]) in + let%bind ok = Boolean.(any [ not is_timed; sufficient_timed_balance ]) in timed_balance_check ok in let%bind is_timed_balance_zero = make_checked (fun () -> - Snarky_integer.Integer.equal ~m curr_min_balance zero_int ) + Snarky_integer.Integer.equal ~m curr_min_balance zero_int) in (* if current min balance is zero, then timing becomes untimed *) let%bind is_untimed = Boolean.((not is_timed) ||| is_timed_balance_zero) in @@ -1045,9 +1066,9 @@ module Base = struct type _ t += | State_body : Mina_state.Protocol_state.Body.Value.t t - | Snapp_account : [`One | `Two] -> Snapp_account.t t + | Snapp_account : [ `One | `Two ] -> Snapp_account.t t | Fee_payer_signature : Signature.t t - | Account_signature : [`One | `Two] -> Signature.t t + | Account_signature : [ `One | `Two ] -> Signature.t t | Zero_complement : Snapp_command.Payload.Zero_proved.t t | One_complement : Snapp_statement.Complement.One_proved.t t | Two_complement : Snapp_statement.Complement.Two_proved.t t @@ -1055,19 +1076,19 @@ module Base = struct let handler ~(state_body : Mina_state.Protocol_state.Body.Value.t) ~(snapp_account1 : Snapp_account.t option) - ~(snapp_account2 : Snapp_account.t option) (c : Snapp_command.t) - handler : request -> response = - fun (With {request; respond} as r) -> - let Vector.[snapp_account1; snapp_account2] = + ~(snapp_account2 : Snapp_account.t option) (c : Snapp_command.t) handler + : request -> response = + fun (With { request; respond } as r) -> + let Vector.[ snapp_account1; snapp_account2 ] = Vector.map ~f:(Option.value ~default:Snapp_account.default) - [snapp_account1; snapp_account2] + [ snapp_account1; snapp_account2 ] in let sig1, sig2 = let control : Control.t -> Signature.t = function | Signature x -> x - | Both {signature; _} -> + | Both { signature; _ } -> signature | Proof _ | None_given -> Signature.dummy @@ -1110,23 +1131,23 @@ module Base = struct | Account_signature `Two -> respond (Provide sig2) | Zero_complement -> ( - match payload with - | Zero_proved x -> - respond (Provide x) - | _ -> - unhandled ) + match payload with + | Zero_proved x -> + respond (Provide x) + | _ -> + unhandled ) | One_complement -> ( - match payload with - | One_proved x -> - respond (Provide (Snapp_statement.Complement.One_proved.create x)) - | _ -> - unhandled ) + match payload with + | One_proved x -> + respond (Provide (Snapp_statement.Complement.One_proved.create x)) + | _ -> + unhandled ) | Two_complement -> ( - match payload with - | Two_proved x -> - respond (Provide (Snapp_statement.Complement.Two_proved.create x)) - | _ -> - unhandled ) + match payload with + | Two_proved x -> + respond (Provide (Snapp_statement.Complement.Two_proved.create x)) + | _ -> + unhandled ) | _ -> handler r @@ -1144,9 +1165,7 @@ module Base = struct *) let is_default x = !Token_id.(Checked.equal (var_of_t default) x) in let token_is_default = is_default token_id in - let fee_token_is_default = - is_default other_fee_payer_opt.data.token_id - in + let fee_token_is_default = is_default other_fee_payer_opt.data.token_id in let open Boolean in let excess_is_zero = !(Amount.(Checked.equal (var_of_t zero)) excess.magnitude) @@ -1155,9 +1174,13 @@ module Base = struct [ all [ not other_fee_payer_opt.is_some ; token_is_default - ; any [Sgn.Checked.is_neg excess.sgn; excess_is_zero] ] + ; any [ Sgn.Checked.is_neg excess.sgn; excess_is_zero ] + ] ; all - [other_fee_payer_opt.is_some; fee_token_is_default; excess_is_zero] + [ other_fee_payer_opt.is_some + ; fee_token_is_default + ; excess_is_zero + ] ] let snapp1_tag = side_loaded 1 @@ -1171,22 +1194,23 @@ module Base = struct exists Snapp_account.typ ~request:(fun () -> Snapp_account which) in with_label __LOC__ (fun () -> - Field.Assert.equal (fst a.snapp) (Snapp_account.Checked.digest s) ) ; - {a with snapp= s} + Field.Assert.equal (fst a.snapp) (Snapp_account.Checked.digest s)) ; + { a with snapp = s } let apply_body ~(constraint_constants : Genesis_constants.Constraint_constants.t) - ~(is_new : [`No | `Maybe of Boolean.var]) ?tag ~txn_global_slot + ~(is_new : [ `No | `Maybe of Boolean.var ]) ?tag ~txn_global_slot ~add_check ~check_auth - ({ pk= _ - ; update= {app_state; delegate; verification_key; permissions} - ; delta } : - Snapp_command.Party.Body.Checked.t) (a : Account.Checked.Unhashed.t) - : Account.var * _ = + ({ pk = _ + ; update = { app_state; delegate; verification_key; permissions } + ; delta + } : + Snapp_command.Party.Body.Checked.t) (a : Account.Checked.Unhashed.t) : + Account.var * _ = let open Impl in let r = ref [] in let update_authorized (type a) perm ~is_keep - ~(updated : [`Ok of a | `Flagged of a * Boolean.var]) = + ~(updated : [ `Ok of a | `Flagged of a * Boolean.var ]) = let speculative_success, `proof_must_verify x = check_auth perm in r := lazy Boolean.((not is_keep) &&& x) :: !r ; match updated with @@ -1207,11 +1231,11 @@ module Base = struct (let open Tick in let balance_check ok = [%with_label "Check snapp balance"] - (Boolean.Assert.any [ok; is_receiver]) + (Boolean.Assert.any [ ok; is_receiver ]) in let timed_balance_check ok = [%with_label "Check snapp timed balance"] - (Boolean.Assert.any [ok; is_receiver]) + (Boolean.Assert.any [ ok; is_receiver ]) in check_timing ~balance_check ~timed_balance_check ~account:a ~txn_amount:delta.magnitude ~txn_global_slot)) @@ -1247,7 +1271,7 @@ module Base = struct ~else_:balance) in let failed = Boolean.(failed1 ||| (is_new &&& failed2)) in - `Flagged (res, failed)) ) + `Flagged (res, failed))) in let snapp = let app_state = @@ -1260,34 +1284,35 @@ module Base = struct ~updated: (`Ok (Vector.map2 app_state a.snapp.app_state - ~f:(Set_or_keep.Checked.set_or_keep ~if_:Field.if_))) ) + ~f:(Set_or_keep.Checked.set_or_keep ~if_:Field.if_)))) in Option.iter tag ~f:(fun t -> - Pickles.Side_loaded.in_circuit t a.snapp.verification_key.data ) ; + Pickles.Side_loaded.in_circuit t a.snapp.verification_key.data) ; let verification_key = update_authorized a.permissions.set_verification_key ~is_keep:(Set_or_keep.Checked.is_keep verification_key) ~updated: (`Ok - (Set_or_keep.Checked.set_or_keep ~if_:Field.if_ - verification_key + (Set_or_keep.Checked.set_or_keep ~if_:Field.if_ verification_key (Lazy.force a.snapp.verification_key.hash))) in - let snapp' = {Snapp_account.verification_key; app_state} in + let snapp' = { Snapp_account.verification_key; app_state } in let r = As_prover.Ref.create As_prover.( fun () -> Some - ( { verification_key= + ( { verification_key = (* Huge hack. This relies on the fact that the "data" is not - used for computing the hash of the snapp account. We can't - provide the verification key since it's not available here. *) + used for computing the hash of the snapp account. We can't + provide the verification key since it's not available here. *) Some - { With_hash.data= Side_loaded_verification_key.dummy - ; hash= read_var snapp'.verification_key } - ; app_state= - read (Snapp_state.typ Field.typ) snapp'.app_state } + { With_hash.data = Side_loaded_verification_key.dummy + ; hash = read_var snapp'.verification_key + } + ; app_state = + read (Snapp_state.typ Field.typ) snapp'.app_state + } : Snapp_account.t )) in (Snapp_account.Checked.digest' snapp', r) @@ -1299,7 +1324,7 @@ module Base = struct (`Ok (Set_or_keep.Checked.set_or_keep ~if_:(fun b ~then_ ~else_ -> - !(Public_key.Compressed.Checked.if_ b ~then_ ~else_) ) + !(Public_key.Compressed.Checked.if_ b ~then_ ~else_)) delegate a.delegate)) in let permissions = @@ -1310,7 +1335,7 @@ module Base = struct (Set_or_keep.Checked.set_or_keep ~if_:Permissions.Checked.if_ permissions a.permissions)) in - ( {a with balance; snapp; delegate; permissions; timing} + ( { a with balance; snapp; delegate; permissions; timing } , `proof_must_verify proof_must_verify ) let assert_account_present public_key (acct : Account.var) ~is_new = @@ -1326,7 +1351,7 @@ module Base = struct in let%bind there_ok = (not is_new) &&& account_there in let%bind empty_ok = is_new &&& is_empty in - with_label __LOC__ (Assert.any [there_ok; empty_ok]) + with_label __LOC__ (Assert.any [ there_ok; empty_ok ]) | `No -> Assert.is_true account_there @@ -1358,7 +1383,7 @@ module Base = struct ~filter:(fun acct -> Account_id.Checked.( equal fee_payer_id (create acct.public_key acct.token_id)) - >>= Boolean.Assert.is_true ) + >>= Boolean.Assert.is_true) ~f:(fun () account -> Set_once.set_exn actual_fee_payer_nonce_and_rch [%here] (account.nonce, account.receipt_chain_hash) ; @@ -1366,11 +1391,11 @@ module Base = struct let%bind authorized = make_checked (fun () -> Permissions.Auth_required.Checked.eval_no_proof - ~signature_verifies account.permissions.send ) + ~signature_verifies account.permissions.send) in (* It's ok for this signature to fail if there is no separate fee payer. - Their control will be checked independently. *) - Boolean.(Assert.any [authorized; not fee_payer_is_other]) + Their control will be checked independently. *) + Boolean.(Assert.any [ authorized; not fee_payer_is_other ]) in let%bind () = [%with_label "Check fee nonce"] @@ -1378,7 +1403,7 @@ module Base = struct Account.Nonce.Checked.equal fee_payer_nonce account.nonce in (* If there is not a separate fee payer, its nonce is checked elsewhere *) - Boolean.(Assert.any [nonce_matches; not fee_payer_is_other])) + Boolean.(Assert.any [ nonce_matches; not fee_payer_is_other ])) in let%bind next_nonce = Account.Nonce.Checked.succ account.nonce in let%bind receipt_chain_hash = @@ -1405,16 +1430,17 @@ module Base = struct (Balance.Checked.sub_amount account.balance txn_amount) in { Account.Poly.balance - ; public_key= account.public_key - ; token_id= account.token_id - ; token_permissions= account.token_permissions - ; nonce= next_nonce + ; public_key = account.public_key + ; token_id = account.token_id + ; token_permissions = account.token_permissions + ; nonce = next_nonce ; receipt_chain_hash - ; delegate= account.delegate - ; voting_for= account.voting_for + ; delegate = account.delegate + ; voting_for = account.voting_for ; timing - ; permissions= account.permissions - ; snapp= account.snapp } ) + ; permissions = account.permissions + ; snapp = account.snapp + }) in (root, Set_once.get_exn actual_fee_payer_nonce_and_rch [%here]) @@ -1423,7 +1449,7 @@ module Base = struct | `Yes -> is_fee_payer | `Maybe should_step -> - Impl.Boolean.(any [is_fee_payer; not should_step]) + Impl.Boolean.(any [ is_fee_payer; not should_step ]) let update_nonce_and_rch ~payload_digest ~is_fee_payer ~should_step ~(account : Account.var) = @@ -1437,15 +1463,17 @@ module Base = struct in let should_update = Boolean.not shouldn't_update in { account with - nonce= !(Account.Nonce.Checked.succ_if account.nonce should_update) - ; receipt_chain_hash= + nonce = !(Account.Nonce.Checked.succ_if account.nonce should_update) + ; receipt_chain_hash = !(Receipt.Chain_hash.Checked.if_ shouldn't_update - ~then_:account.receipt_chain_hash ~else_:updated) } + ~then_:account.receipt_chain_hash ~else_:updated) + } module Check_predicate = struct let snapp_self p (a : Account.Checked.Unhashed.t) = [ Snapp_predicate.Account.Checked.check_nonsnapp p a - ; Snapp_predicate.Account.Checked.check_snapp p a.snapp ] + ; Snapp_predicate.Account.Checked.check_snapp p a.snapp + ] let snapp_other (o : Snapp_predicate.Other.Checked.t) (a : Account.Checked.Unhashed.t) = @@ -1457,11 +1485,12 @@ module Base = struct ; Snapp_basic.Account_state.Checked.check o.account_transition.prev ~is_empty:Boolean.false_ ; Snapp_basic.Account_state.Checked.check o.account_transition.next - ~is_empty:Boolean.false_ ] + ~is_empty:Boolean.false_ + ] let signed_self nonce (a : Account.Checked.Unhashed.t) = let open Impl in - [run_checked (Account.Nonce.Checked.equal nonce a.nonce)] + [ run_checked (Account.Nonce.Checked.equal nonce a.nonce) ] end let modify @@ -1483,12 +1512,13 @@ module Base = struct second_delta = delta + (if is_fee_payer then fee else 0) *) { body with - delta= + delta = !(Amount.Signed.Checked.add body.delta (Amount.Signed.Checked.of_unsigned !(Amount.Checked.if_ is_fee_payer ~then_:(Amount.Checked.of_fee fee) - ~else_:(Amount.var_of_t Amount.zero)))) } + ~else_:(Amount.var_of_t Amount.zero)))) + } in let root = run_checked @@ -1507,7 +1537,7 @@ module Base = struct apply_body body account ~constraint_constants ~is_new ~tag ~txn_global_slot ~add_check ~check_auth:(fun t -> with_label __LOC__ (fun () -> - check_auth t ~signature_verifies ) ) + check_auth t ~signature_verifies)) in Set_once.set_exn proof_must_verify [%here] must_verify ; let account = @@ -1519,7 +1549,7 @@ module Base = struct ~then_:fee_payer_receipt_chain_hash ~else_:account.receipt_chain_hash in - {account with nonce; receipt_chain_hash}) + { account with nonce; receipt_chain_hash }) in List.iter ~f:(add_check ?label:(Some __LOC__)) @@ -1528,15 +1558,15 @@ module Base = struct ~f:(add_check ?label:(Some __LOC__)) (other_predicate account) ; update_nonce_and_rch ~payload_digest ~is_fee_payer - ~should_step:`Yes ~account:account' ) )) + ~should_step:`Yes ~account:account'))) in (root, Set_once.get_exn proof_must_verify [%here]) let compute_fee_excess ~fee ~fee_payer_id = (* Use the default token for the fee excess if it is zero. - This matches the behaviour of [Fee_excess.rebalance], which allows - [verify_complete_merge] to verify a proof without knowledge of the - particular fee tokens used. + This matches the behaviour of [Fee_excess.rebalance], which allows + [verify_complete_merge] to verify a proof without knowledge of the + particular fee tokens used. *) let open Impl in let ( ! ) = run_checked in @@ -1548,9 +1578,10 @@ module Base = struct ~else_:fee_token) in { Fee_excess.fee_token_l - ; fee_excess_l= Fee.Signed.Checked.of_unsigned fee - ; fee_token_r= Token_id.(var_of_t default) - ; fee_excess_r= Fee.Signed.(Checked.constant zero) } + ; fee_excess_l = Fee.Signed.Checked.of_unsigned fee + ; fee_token_r = Token_id.(var_of_t default) + ; fee_excess_r = Fee.Signed.(Checked.constant zero) + } let determine_fee_payer ~token_id ~(other_fee_payer_opt : @@ -1587,7 +1618,7 @@ module Base = struct let finished = ref false in ( (fun ?label:_ x -> if finished.contents then failwith "finished" - else r := x :: r.contents ) + else r := x :: r.contents) , fun () -> finished := true ; Impl.Boolean.all r.contents ) @@ -1623,15 +1654,15 @@ module Base = struct in s2.body1.hash := s1.body2.hash ; s2.body2.hash := s1.body1.hash ; - {s2 with body1= s1.body2; body2= s1.body1} + { s2 with body1 = s1.body2; body2 = s1.body1 } in let excess = !(Amount.Signed.Checked.add s1.body1.data.delta s1.body2.data.delta) in - let ({token_id; other_fee_payer_opt} as comp + let ({ token_id; other_fee_payer_opt } as comp : _ Snapp_statement.Complement.Two_proved.Poly.t) = exists Snapp_statement.Complement.Two_proved.typ ~request:(fun () -> - Two_complement ) + Two_complement) in (* Check fee *) check_fee ~excess ~token_id ~other_fee_payer_opt ; @@ -1651,8 +1682,8 @@ module Base = struct in let payload : Snapp_command.Payload.Digested.Checked.t = Two_proved - (Snapp_statement.Complement.Two_proved.Checked.complete comp - ~one:s1 ~two:s2) + (Snapp_statement.Complement.Two_proved.Checked.complete comp ~one:s1 + ~two:s2) in let payload_digest = Snapp_command.Payload.Digested.Checked.digest payload @@ -1717,17 +1748,19 @@ module Base = struct ; Fee_excess.assert_equal_checked s.fee_excess fee_excess (* TODO: These should maybe be able to create tokens *) ; Token_id.Checked.Assert.equal s.next_available_token_after - s.next_available_token_before ]) ; + s.next_available_token_before + ]) ; (proof1_must_verify (), proof2_must_verify ()) let _rule ~constraint_constants : _ Pickles.Inductive_rule.t = - { identifier= "snapp-two-proved" - ; prevs= [snapp1_tag; snapp2_tag] - ; main= - (fun [t1; t2] x -> + { identifier = "snapp-two-proved" + ; prevs = [ snapp1_tag; snapp2_tag ] + ; main = + (fun [ t1; t2 ] x -> let s1, s2 = main t1 t2 ~constraint_constants x in - [s1; s2] ) - ; main_value= (fun _ _ -> [true; true]) } + [ s1; s2 ]) + ; main_value = (fun _ _ -> [ true; true ]) + } end module One_proved = struct @@ -1754,10 +1787,11 @@ module Base = struct ; other_fee_payer_opt ; second_starts_empty ; second_ends_empty - ; account2_nonce } as comp + ; account2_nonce + } as comp : _ Snapp_statement.Complement.One_proved.Poly.t) = exists Snapp_statement.Complement.One_proved.typ ~request:(fun () -> - One_complement ) + One_complement) in (* Check fee *) check_fee ~excess ~token_id ~other_fee_payer_opt ; @@ -1785,8 +1819,8 @@ module Base = struct in let (module S) = !(Tick.Inner_curve.Checked.Shifted.create ()) in let txn_global_slot = curr_state.global_slot_since_genesis in - let ( root_after_fee_payer - , (fee_payer_nonce, fee_payer_receipt_chain_hash) ) = + let root_after_fee_payer, (fee_payer_nonce, fee_payer_receipt_chain_hash) + = !(pay_fee ~constraint_constants ~shifted:(module S) ~root:s.source ~fee ~fee_payer_is_other ~fee_payer_id @@ -1827,7 +1861,7 @@ module Base = struct ~signature_verifies in ( Boolean.(res ||| second_starts_empty) - , `proof_must_verify Boolean.true_ ) ) + , `proof_must_verify Boolean.true_ )) ~is_new:(`Maybe second_starts_empty) ~is_fee_payer:account2_is_fee_payer ~which:`Two ~tag:snapp2_tag ~body:s1.body2.data @@ -1860,17 +1894,19 @@ module Base = struct ; Fee_excess.assert_equal_checked s.fee_excess fee_excess (* TODO: These should maybe be able to create tokens *) ; Token_id.Checked.Assert.equal s.next_available_token_after - s.next_available_token_before ]) ; + s.next_available_token_before + ]) ; proof1_must_verify () let _rule ~constraint_constants : _ Pickles.Inductive_rule.t = - { identifier= "snapp-one-proved" - ; prevs= [snapp1_tag] - ; main= - (fun [t1] x -> + { identifier = "snapp-one-proved" + ; prevs = [ snapp1_tag ] + ; main = + (fun [ t1 ] x -> let s1 = main t1 ~constraint_constants x in - [s1] ) - ; main_value= (fun _ _ -> [true]) } + [ s1 ]) + ; main_value = (fun _ _ -> [ true ]) + } end module Zero_proved = struct @@ -1881,14 +1917,15 @@ module Base = struct let ( ! ) = run_checked in let payload = exists Snapp_command.Payload.Zero_proved.typ ~request:(fun () -> - Zero_complement ) + Zero_complement) in let ({ token_id ; other_fee_payer_opt ; one ; two ; second_starts_empty - ; second_ends_empty } + ; second_ends_empty + } : Snapp_command.Payload.Zero_proved.Checked.t) = payload in @@ -1926,8 +1963,8 @@ module Base = struct in let (module S) = !(Tick.Inner_curve.Checked.Shifted.create ()) in let txn_global_slot = curr_state.global_slot_since_genesis in - let ( root_after_fee_payer - , (fee_payer_nonce, fee_payer_receipt_chain_hash) ) = + let root_after_fee_payer, (fee_payer_nonce, fee_payer_receipt_chain_hash) + = !(pay_fee ~constraint_constants ~shifted:(module S) ~root:s.source ~fee ~fee_payer_is_other ~fee_payer_id @@ -1947,9 +1984,10 @@ module Base = struct ~self_predicate:(fun a -> Check_predicate.signed_self one.predicate { a with - nonce= + nonce = !(Account.Nonce.Checked.if_ account1_is_fee_payer - ~then_:fee_payer_nonce ~else_:a.nonce) } ) + ~then_:fee_payer_nonce ~else_:a.nonce) + }) ~other_predicate:(fun _ -> []) in let add_check2, checks_succeeded2 = create_checker () in @@ -1965,16 +2003,17 @@ module Base = struct ~signature_verifies in ( Boolean.(res ||| second_starts_empty) - , `proof_must_verify Boolean.true_ ) ) + , `proof_must_verify Boolean.true_ )) ~is_new:(`Maybe second_starts_empty) ~is_fee_payer:account2_is_fee_payer ~which:`Two ~tag:snapp2_tag ~body:two.body ~self_predicate:(fun a -> Check_predicate.signed_self two.predicate { a with - nonce= + nonce = !(Account.Nonce.Checked.if_ account2_is_fee_payer - ~then_:fee_payer_nonce ~else_:a.nonce) } ) + ~then_:fee_payer_nonce ~else_:a.nonce) + }) ~other_predicate:(fun _ -> []) in (* No deleting accounts for now. *) @@ -2004,13 +2043,14 @@ module Base = struct s.next_available_token_before) let _rule ~constraint_constants : _ Pickles.Inductive_rule.t = - { identifier= "snapp-zero-proved" - ; prevs= [] - ; main= + { identifier = "snapp-zero-proved" + ; prevs = [] + ; main = (fun [] x -> let () = main ~constraint_constants x in - [] ) - ; main_value= (fun _ _ -> []) } + []) + ; main_value = (fun _ _ -> []) + } end end @@ -2026,7 +2066,7 @@ module Base = struct (shifted : (module Inner_curve.Checked.Shifted.S with type t = shifted)) root pending_coinbase_stack_init pending_coinbase_stack_before pending_coinbase_after next_available_token state_body - ({signer; signature; payload} as txn : Transaction_union.var) = + ({ signer; signature; payload } as txn : Transaction_union.var) = let tag = payload.body.tag in let is_user_command = Transaction_union.Tag.Unpacked.is_user_command tag in let%bind () = @@ -2070,11 +2110,13 @@ module Base = struct [ [%with_label "Token_locked value is compatible with the transaction kind"] (Boolean.Assert.any - [Boolean.not payload.body.token_locked; is_create_account]) + [ Boolean.not payload.body.token_locked; is_create_account ]) ; [%with_label "Token_locked cannot be used with the default token"] (Boolean.Assert.any [ Boolean.not payload.body.token_locked - ; Boolean.not token_default ]) ] + ; Boolean.not token_default + ]) + ] in let%bind () = [%with_label "Validate tokens"] @@ -2088,14 +2130,15 @@ module Base = struct ; is_payment ; is_mint_tokens ; is_stake_delegation - ; is_fee_transfer ]) + ; is_fee_transfer + ]) ; (* TODO: Remove this check and update the transaction snark once we - have an exchange rate mechanism. See issue #4447. - *) + have an exchange rate mechanism. See issue #4447. + *) [%with_label "Fees in tokens disabled"] (Boolean.Assert.is_true fee_token_default) ; [%with_label "Token is valid or command allows invalid token"] - Boolean.(Assert.any [not token_invalid; is_create_account]) + Boolean.(Assert.any [ not token_invalid; is_create_account ]) ; [%with_label "Token is default or command allows non-default token"] (Boolean.Assert.any @@ -2104,7 +2147,8 @@ module Base = struct ; is_create_account ; is_mint_tokens (* TODO: Enable this when fees in tokens are enabled. *) - (*; is_fee_transfer*) ]) + (*; is_fee_transfer*) + ]) ; [%with_label "Token is non-default or command allows default token"] Boolean.( @@ -2114,7 +2158,9 @@ module Base = struct ; is_stake_delegation ; is_create_account ; is_fee_transfer - ; is_coinbase ]) ]) + ; is_coinbase + ]) + ]) in let current_global_slot = Mina_state.Protocol_state.Body.consensus_state state_body @@ -2155,10 +2201,10 @@ module Base = struct in let%bind () = [%with_label "Check slot validity"] - ( Global_slot.Checked.( - current_global_slot <= payload.common.valid_until) + ( Global_slot.Checked.(current_global_slot <= payload.common.valid_until) >>= Boolean.Assert.is_true ) in + (* Check coinbase stack. Protocol state body is pushed into the Pending coinbase stack once per block. For example, consider any two transactions in a block. Their pending coinbase stacks would be: @@ -2180,13 +2226,13 @@ module Base = struct (* These are all the possible cases: - Init_stack Source Target - -------------------------------------------------------------- - i i i + state - i i i + state + coinbase - i i + state i + state - i i + state i + state + coinbase - i + coinbase i + state + coinbase i + state + coinbase + Init_stack Source Target + -------------------------------------------------------------- + i i i + state + i i i + state + coinbase + i i + state i + state + i i + state i + state + coinbase + i + coinbase i + state + coinbase i + state + coinbase *) let%bind () = [%with_label "Compute coinbase stack"] @@ -2225,7 +2271,8 @@ module Base = struct in Boolean.(equal_source ||| equal_source_with_state) in - Boolean.Assert.all [correct_coinbase_target_stack; valid_init_state])) + Boolean.Assert.all + [ correct_coinbase_target_stack; valid_init_state ])) in (* Interrogate failure cases. This value is created without constraints; the failures should be checked against potential failures to ensure @@ -2233,7 +2280,7 @@ module Base = struct *) let%bind () = [%with_label "A failing user command is a user command"] - Boolean.(Assert.any [is_user_command; not user_command_fails]) + Boolean.(Assert.any [ is_user_command; not user_command_fails ]) in let predicate_deferred = (* Predicate check is to be performed later if this is true. *) @@ -2288,12 +2335,12 @@ module Base = struct ~is_writeable:can_create_fee_payer_account fee_payer ~f:(fun ~is_empty_and_writeable account -> (* this account is: - - the fee-payer for payments - - the fee-payer for stake delegation - - the fee-payer for account creation - - the fee-payer for token minting - - the fee-receiver for a coinbase - - the second receiver for a fee transfer + - the fee-payer for payments + - the fee-payer for stake delegation + - the fee-payer for account creation + - the fee-payer for token minting + - the fee-receiver for a coinbase + - the second receiver for a fee transfer *) let%bind next_nonce = Account.Nonce.Checked.succ_if account.nonce is_user_command @@ -2304,7 +2351,7 @@ module Base = struct Account.Nonce.Checked.equal nonce account.nonce in Boolean.Assert.any - [Boolean.not is_user_command; nonce_matches]) + [ Boolean.not is_user_command; nonce_matches ]) in let%bind receipt_chain_hash = let current = account.receipt_chain_hash in @@ -2391,14 +2438,15 @@ module Base = struct { Account.Poly.balance ; public_key ; token_id - ; token_permissions= account.token_permissions - ; nonce= next_nonce + ; token_permissions = account.token_permissions + ; nonce = next_nonce ; receipt_chain_hash ; delegate - ; voting_for= account.voting_for + ; voting_for = account.voting_for ; timing - ; permissions= account.permissions - ; snapp= account.snapp } )) + ; permissions = account.permissions + ; snapp = account.snapp + })) in let%bind receiver_increase = (* - payments: payload.body.amount @@ -2411,7 +2459,7 @@ module Base = struct [%with_label "Compute receiver increase"] (let%bind base_amount = let%bind zero_transfer = - Boolean.any [is_stake_delegation; is_create_account] + Boolean.any [ is_stake_delegation; is_create_account ] in Amount.Checked.if_ zero_transfer ~then_:(Amount.var_of_t Amount.zero) @@ -2432,12 +2480,12 @@ module Base = struct ~depth:constraint_constants.ledger_depth root_after_fee_payer_update receiver ~f:(fun ~is_empty_and_writeable account -> (* this account is: - - the receiver for payments - - the delegated-to account for stake delegation - - the created account for an account creation - - the receiver for minted tokens - - the receiver for a coinbase - - the first receiver for a fee transfer + - the receiver for payments + - the delegated-to account for stake delegation + - the created account for an account creation + - the receiver for minted tokens + - the receiver for a coinbase + - the first receiver for a fee transfer *) let%bind is_empty_failure = let%bind must_not_be_empty = @@ -2453,8 +2501,7 @@ module Base = struct let%bind () = [%with_label "Receiver creation failure matches predicted"] (let%bind is_nonempty_creating = - Boolean.( - (not is_empty_and_writeable) &&& is_create_account) + Boolean.((not is_empty_and_writeable) &&& is_create_account) in Boolean.Assert.( = ) is_nonempty_creating user_command_failure.receiver_exists) @@ -2472,8 +2519,7 @@ module Base = struct [%with_label "Check whether creation fails due to a non-default token"] (let%bind token_should_not_create = - Boolean.( - should_pay_to_create &&& Boolean.not token_default) + Boolean.(should_pay_to_create &&& Boolean.not token_default) in let%bind token_cannot_create = Boolean.(token_should_not_create &&& is_user_command) @@ -2521,6 +2567,7 @@ module Base = struct ~then_:Amount.(var_of_t zero) ~else_:amount_for_new_account in + (* NOTE: Instead of capturing this as part of the user command failures, we capture it inline here and bubble it out to a reference. This behavior is still in line with the @@ -2541,7 +2588,7 @@ module Base = struct in let%bind () = [%with_label "Overflow error only occurs in user commands"] - Boolean.(Assert.any [is_user_command; not overflow]) + Boolean.(Assert.any [ is_user_command; not overflow ]) in receiver_overflow := overflow ; Balance.Checked.if_ overflow ~then_:account.balance @@ -2580,14 +2627,16 @@ module Base = struct { Account.Poly.balance ; public_key ; token_id - ; token_permissions= {Token_permissions.token_owner; token_locked} - ; nonce= account.nonce - ; receipt_chain_hash= account.receipt_chain_hash + ; token_permissions = + { Token_permissions.token_owner; token_locked } + ; nonce = account.nonce + ; receipt_chain_hash = account.receipt_chain_hash ; delegate - ; voting_for= account.voting_for - ; timing= account.timing - ; permissions= account.permissions - ; snapp= account.snapp } )) + ; voting_for = account.voting_for + ; timing = account.timing + ; permissions = account.permissions + ; snapp = account.snapp + })) in let%bind user_command_fails = Boolean.(!receiver_overflow ||| user_command_fails) @@ -2602,12 +2651,12 @@ module Base = struct user_command_failure.source_not_present root_after_receiver_update source ~f:(fun ~is_empty_and_writeable account -> (* this account is: - - the source for payments - - the delegator for stake delegation - - the token owner for account creation - - the token owner for token minting - - the fee-receiver for a coinbase - - the second receiver for a fee transfer + - the source for payments + - the delegator for stake delegation + - the token owner for account creation + - the token owner for token minting + - the fee-receiver for a coinbase + - the second receiver for a fee transfer *) let%bind () = [%with_label "Check source presence failure matches predicted"] @@ -2628,10 +2677,10 @@ module Base = struct (Boolean.not fee_payer_is_source :> Field.Var.t) in (* Equivalent to: - if fee_payer_is_source then - num_failures = 0 - else - num_failures = num_failures + if fee_payer_is_source then + num_failures = 0 + else + num_failures = num_failures *) assert_r1cs not_fee_payer_is_source num_failures num_failures) in @@ -2687,7 +2736,8 @@ module Base = struct any [ account.token_permissions.token_owner ; token_default - ; not command_needs_token_owner ]) + ; not command_needs_token_owner + ]) in Boolean.( Assert.( = ) (not token_owner_ok) @@ -2708,7 +2758,8 @@ module Base = struct ; not token_default ; is_create_account ; not creating_new_token - ; not predicate_result ]) + ; not predicate_result + ]) in Boolean.Assert.( = ) token_auth_failed user_command_failure.token_auth) @@ -2723,16 +2774,17 @@ module Base = struct in [final_root] below, so it shouldn't matter. *) { Account.Poly.balance - ; public_key= account.public_key - ; token_id= account.token_id - ; token_permissions= account.token_permissions - ; nonce= account.nonce - ; receipt_chain_hash= account.receipt_chain_hash + ; public_key = account.public_key + ; token_id = account.token_id + ; token_permissions = account.token_permissions + ; nonce = account.nonce + ; receipt_chain_hash = account.receipt_chain_hash ; delegate - ; voting_for= account.voting_for + ; voting_for = account.voting_for ; timing - ; permissions= account.permissions - ; snapp= account.snapp } )) + ; permissions = account.permissions + ; snapp = account.snapp + })) in let%bind fee_excess = (* - payments: payload.common.fee @@ -2761,7 +2813,7 @@ module Base = struct [%with_label "Fee excess does not overflow"] Boolean.( Assert.any - [not is_fee_transfer; not fee_transfer_excess_overflowed]) + [ not is_fee_transfer; not fee_transfer_excess_overflowed ]) in Signed.Checked.if_ is_fee_transfer ~then_:fee_transfer_excess ~else_:user_command_excess) @@ -2780,26 +2832,26 @@ module Base = struct (final_root, fee_excess, supply_increase, next_available_token_after) (* Someday: - write the following soundness tests: - - apply a transaction where the signature is incorrect - - apply a transaction where the sender does not have enough money in their account - - apply a transaction and stuff in the wrong target hash - *) + write the following soundness tests: + - apply a transaction where the signature is incorrect + - apply a transaction where the sender does not have enough money in their account + - apply a transaction and stuff in the wrong target hash + *) (* spec for [main statement]: - constraints pass iff there exists - t : Tagged_transaction.t - such that - - applying [t] to ledger with merkle hash [l1] results in ledger with merkle hash [l2]. - - applying [t] to [pc.source] with results in pending coinbase stack [pc.target] - - t has fee excess equal to [fee_excess] - - t has supply increase equal to [supply_increase] - where statement includes - l1 : Frozen_ledger_hash.t, - l2 : Frozen_ledger_hash.t, - fee_excess : Amount.Signed.t, - supply_increase : Amount.t - pc: Pending_coinbase_stack_state.t + constraints pass iff there exists + t : Tagged_transaction.t + such that + - applying [t] to ledger with merkle hash [l1] results in ledger with merkle hash [l2]. + - applying [t] to [pc.source] with results in pending coinbase stack [pc.target] + - t has fee excess equal to [fee_excess] + - t has supply increase equal to [supply_increase] + where statement includes + l1 : Frozen_ledger_hash.t, + l2 : Frozen_ledger_hash.t, + fee_excess : Amount.Signed.t, + supply_increase : Amount.t + pc: Pending_coinbase_stack_state.t *) let%snarkydef main ~constraint_constants (statement : Statement.With_sok.Checked.t) = @@ -2818,10 +2870,8 @@ module Base = struct ~request:(As_prover.return State_body) in let pc = statement.pending_coinbase_stack_state in - let%bind ( root_after - , fee_excess - , supply_increase - , next_available_token_after ) = + let%bind root_after, fee_excess, supply_increase, next_available_token_after + = apply_tagged_transaction ~constraint_constants (module Shifted) statement.source pending_coinbase_init pc.source pc.target @@ -2842,9 +2892,10 @@ module Base = struct ~else_:t.payload.common.fee_token in { Fee_excess.fee_token_l - ; fee_excess_l= Signed_poly.map ~f:Amount.Checked.to_fee fee_excess - ; fee_token_r= Token_id.(var_of_t default) - ; fee_excess_r= Fee.Signed.(Checked.constant zero) } + ; fee_excess_l = Signed_poly.map ~f:Amount.Checked.to_fee fee_excess + ; fee_token_r = Token_id.(var_of_t default) + ; fee_excess_r = Fee.Signed.(Checked.constant zero) + } in Checked.all_unit [ Frozen_ledger_hash.assert_equal root_after statement.target @@ -2852,22 +2903,24 @@ module Base = struct statement.supply_increase ; Fee_excess.assert_equal_checked fee_excess statement.fee_excess ; Token_id.Checked.Assert.equal next_available_token_after - statement.next_available_token_after ] + statement.next_available_token_after + ] let rule ~constraint_constants : _ Pickles.Inductive_rule.t = - { identifier= "transaction" - ; prevs= [] - ; main= + { identifier = "transaction" + ; prevs = [] + ; main = (fun [] x -> Run.run_checked (main ~constraint_constants x) ; - [] ) - ; main_value= (fun [] _ -> []) } + []) + ; main_value = (fun [] _ -> []) + } let transaction_union_handler handler (transaction : Transaction_union.t) (state_body : Mina_state.Protocol_state.Body.Value.t) (init_stack : Pending_coinbase.Stack.t) : Snarky_backendless.Request.request -> _ = - fun (With {request; respond} as r) -> + fun (With { request; respond } as r) -> match request with | Transaction -> respond (Provide transaction) @@ -2881,11 +2934,12 @@ end module Transition_data = struct type t = - { proof: Proof_type.t - ; supply_increase: Amount.t - ; fee_excess: Fee_excess.t - ; sok_digest: Sok_message.Digest.t - ; pending_coinbase_stack_state: Pending_coinbase_stack_state.t } + { proof : Proof_type.t + ; supply_increase : Amount.t + ; fee_excess : Fee_excess.t + ; sok_digest : Sok_message.Digest.t + ; pending_coinbase_stack_state : Pending_coinbase_stack_state.t + } [@@deriving fields] end @@ -2900,12 +2954,11 @@ module Merge = struct verify_transition tock_vk _ s2 s3 pending_coinbase_stack23.source, pending_coinbase_stack23.target is true *) let%snarkydef main - ([s1; s2] : + ([ s1; s2 ] : (Statement.With_sok.var * (Statement.With_sok.var * _)) Pickles_types.Hlist.HlistId.t) (s : Statement.With_sok.Checked.t) = let%bind fee_excess = - Fee_excess.combine_checked s1.Statement.fee_excess - s2.Statement.fee_excess + Fee_excess.combine_checked s1.Statement.fee_excess s2.Statement.fee_excess in let%bind () = with_label __LOC__ @@ -2934,7 +2987,8 @@ module Merge = struct ; Token_id.Checked.Assert.equal s1.next_available_token_after s2.next_available_token_before ; Token_id.Checked.Assert.equal s2.next_available_token_after - s.next_available_token_after ] + s.next_available_token_after + ] let rule ~proof_level self : _ Pickles.Inductive_rule.t = let prev_should_verify = @@ -2945,13 +2999,14 @@ module Merge = struct false in let b = Boolean.var_of_value prev_should_verify in - { identifier= "merge" - ; prevs= [self; self] - ; main= + { identifier = "merge" + ; prevs = [ self; self ] + ; main = (fun ps x -> Run.run_checked (main ps x) ; - [b; b] ) - ; main_value= (fun _ _ -> [prev_should_verify; prev_should_verify]) } + [ b; b ]) + ; main_value = (fun _ _ -> [ prev_should_verify; prev_should_verify ]) + } end open Pickles_types @@ -2983,7 +3038,7 @@ let system ~proof_level ~constraint_constants = (Genesis_constants.Constraint_constants.to_snark_keys_header constraint_constants) ~choices:(fun ~self -> - [Base.rule ~constraint_constants; Merge.rule ~proof_level self] ) ) + [ Base.rule ~constraint_constants; Merge.rule ~proof_level self ])) module Verification = struct module type S = sig @@ -3048,8 +3103,8 @@ module type S = sig t -> t -> sok_digest:Sok_message.Digest.t -> t Async.Deferred.Or_error.t end -let check_transaction_union ?(preeval = false) ~constraint_constants - sok_message source target init_stack pending_coinbase_stack_state +let check_transaction_union ?(preeval = false) ~constraint_constants sok_message + source target init_stack pending_coinbase_stack_state next_available_token_before next_available_token_after transaction state_body handler = if preeval then failwith "preeval currently disabled" ; @@ -3060,12 +3115,13 @@ let check_transaction_union ?(preeval = false) ~constraint_constants let statement : Statement.With_sok.t = { source ; target - ; supply_increase= Transaction_union.supply_increase transaction + ; supply_increase = Transaction_union.supply_increase transaction ; pending_coinbase_stack_state - ; fee_excess= Transaction_union.fee_excess transaction + ; fee_excess = Transaction_union.fee_excess transaction ; next_available_token_before ; next_available_token_after - ; sok_digest } + ; sok_digest + } in let open Tick in ignore @@ -3087,17 +3143,18 @@ let command_to_proofs (p : Snapp_command.t) : match c with | Proof p -> p - | Both {proof; _} -> + | Both { proof; _ } -> proof | _ -> failwith "proof_exn" in let f (ps : (Snapp_command.Party.Authorized.Proved.t, _) At_most.t) = At_most.map ps ~f:(fun p -> - ( { Snapp_statement.Poly.predicate= p.data.predicate - ; body1= p.data.body - ; body2= Snapp_command.Party.Body.dummy } - , proof_exn p.authorization ) ) + ( { Snapp_statement.Poly.predicate = p.data.predicate + ; body1 = p.data.body + ; body2 = Snapp_command.Party.Body.dummy + } + , proof_exn p.authorization )) in match p with | Signed_empty _ -> @@ -3105,11 +3162,11 @@ let command_to_proofs (p : Snapp_command.t) : | Signed_signed _ -> [] | Proved_empty p -> - f [p.one] + f [ p.one ] | Proved_signed p -> - f [p.one] + f [ p.one ] | Proved_proved p -> - f [p.one; p.two] + f [ p.one; p.two ] let command_to_statements c = At_most.map (command_to_proofs c) ~f:fst @@ -3127,12 +3184,13 @@ let check_snapp_command ?(preeval = false) ~constraint_constants ~sok_message let statement : Statement.With_sok.t = { source ; target - ; supply_increase= Currency.Amount.zero + ; supply_increase = Currency.Amount.zero ; pending_coinbase_stack_state - ; fee_excess= Or_error.ok_exn (Snapp_command.fee_excess t) + ; fee_excess = Or_error.ok_exn (Snapp_command.fee_excess t) ; next_available_token_before ; next_available_token_after - ; sok_digest } + ; sok_digest + } in let open Tick in let comp = @@ -3143,8 +3201,8 @@ let check_snapp_command ?(preeval = false) ~constraint_constants ~sok_message match command_to_statements t with | [] -> Impl.make_checked (fun () -> - Base.Snapp_command.Zero_proved.main ~constraint_constants s ) - | [s1] -> + Base.Snapp_command.Zero_proved.main ~constraint_constants s) + | [ s1 ] -> let%bind s1 = exists Snapp_statement.typ ~compute:(As_prover.return s1) in @@ -3152,15 +3210,15 @@ let check_snapp_command ?(preeval = false) ~constraint_constants ~sok_message let (_ : Boolean.var) = Base.Snapp_command.One_proved.main ~constraint_constants s1 s in - () ) - | [s1; s2] -> + ()) + | [ s1; s2 ] -> let%bind s1 = exists Snapp_statement.typ ~compute:(As_prover.return s1) and s2 = exists Snapp_statement.typ ~compute:(As_prover.return s2) in Impl.make_checked (fun () -> let (_ : Boolean.var * Boolean.var) = Base.Snapp_command.Two_proved.main ~constraint_constants s1 s2 s in - () ) + ()) in ignore ( Or_error.ok_exn @@ -3178,9 +3236,7 @@ let check_transaction ?preeval ~constraint_constants ~sok_message ~source let transaction = Transaction_protocol_state.transaction transaction_in_block in - let state_body = - Transaction_protocol_state.block_data transaction_in_block - in + let state_body = Transaction_protocol_state.block_data transaction_in_block in match to_preunion (transaction :> Transaction.t) with | `Snapp_command c -> check_snapp_command ?preeval ~constraint_constants ~sok_message ~source @@ -3201,7 +3257,7 @@ let check_user_command ~constraint_constants ~sok_message ~source ~target check_transaction ~constraint_constants ~sok_message ~source ~target ~init_stack ~pending_coinbase_stack_state ~next_available_token_before ~next_available_token_after ~snapp_account1:None ~snapp_account2:None - {t_in_block with transaction= Command (Signed_command user_command)} + { t_in_block with transaction = Command (Signed_command user_command) } handler let generate_transaction_union_witness ?(preeval = false) ~constraint_constants @@ -3212,9 +3268,7 @@ let generate_transaction_union_witness ?(preeval = false) ~constraint_constants let transaction = Transaction_protocol_state.transaction transaction_in_block in - let state_body = - Transaction_protocol_state.block_data transaction_in_block - in + let state_body = Transaction_protocol_state.block_data transaction_in_block in let sok_digest = Sok_message.digest sok_message in let handler = Base.transaction_union_handler handler transaction state_body init_stack @@ -3222,16 +3276,17 @@ let generate_transaction_union_witness ?(preeval = false) ~constraint_constants let statement : Statement.With_sok.t = { source ; target - ; supply_increase= Transaction_union.supply_increase transaction + ; supply_increase = Transaction_union.supply_increase transaction ; pending_coinbase_stack_state - ; fee_excess= Transaction_union.fee_excess transaction + ; fee_excess = Transaction_union.fee_excess transaction ; next_available_token_before ; next_available_token_after - ; sok_digest } + ; sok_digest + } in let open Tick in let main x = handle (Base.main ~constraint_constants x) handler in - generate_auxiliary_input [Statement.With_sok.typ] () main statement + generate_auxiliary_input [ Statement.With_sok.typ ] () main statement let generate_snapp_command_witness ?(preeval = false) ~constraint_constants ~sok_message ~source ~target ~init_stack:_ ~pending_coinbase_stack_state @@ -3241,9 +3296,7 @@ let generate_snapp_command_witness ?(preeval = false) ~constraint_constants let transaction : Snapp_command.t = Transaction_protocol_state.transaction transaction_in_block in - let state_body = - Transaction_protocol_state.block_data transaction_in_block - in + let state_body = Transaction_protocol_state.block_data transaction_in_block in let sok_digest = Sok_message.digest sok_message in let handler = Base.Snapp_command.handler ~state_body ~snapp_account1 ~snapp_account2 @@ -3252,12 +3305,13 @@ let generate_snapp_command_witness ?(preeval = false) ~constraint_constants let statement : Statement.With_sok.t = { source ; target - ; supply_increase= Currency.Amount.zero + ; supply_increase = Currency.Amount.zero ; pending_coinbase_stack_state - ; fee_excess= Or_error.ok_exn (Snapp_command.fee_excess transaction) + ; fee_excess = Or_error.ok_exn (Snapp_command.fee_excess transaction) ; next_available_token_before ; next_available_token_after - ; sok_digest } + ; sok_digest + } in let open Tick in match command_to_statements transaction with @@ -3265,22 +3319,22 @@ let generate_snapp_command_witness ?(preeval = false) ~constraint_constants let main x = handle (make_checked (fun () -> - Base.Snapp_command.Zero_proved.main ~constraint_constants x )) + Base.Snapp_command.Zero_proved.main ~constraint_constants x)) handler in - generate_auxiliary_input [Statement.With_sok.typ] () main statement - | [s1] -> + generate_auxiliary_input [ Statement.With_sok.typ ] () main statement + | [ s1 ] -> let main x = handle (let%bind s1 = exists Snapp_statement.typ ~compute:(As_prover.return s1) in make_checked (fun () -> - Base.Snapp_command.One_proved.main s1 ~constraint_constants x )) + Base.Snapp_command.One_proved.main s1 ~constraint_constants x)) handler in - generate_auxiliary_input [Statement.With_sok.typ] () main statement - | [s1; s2] -> + generate_auxiliary_input [ Statement.With_sok.typ ] () main statement + | [ s1; s2 ] -> let main x = handle (let%bind s1 = @@ -3290,11 +3344,10 @@ let generate_snapp_command_witness ?(preeval = false) ~constraint_constants exists Snapp_statement.typ ~compute:(As_prover.return s2) in make_checked (fun () -> - Base.Snapp_command.Two_proved.main s1 s2 ~constraint_constants x - )) + Base.Snapp_command.Two_proved.main s1 s2 ~constraint_constants x)) handler in - generate_auxiliary_input [Statement.With_sok.typ] () main statement + generate_auxiliary_input [ Statement.With_sok.typ ] () main statement let generate_transaction_witness ?preeval ~constraint_constants ~sok_message ~source ~target ~init_stack ~pending_coinbase_stack_state @@ -3308,32 +3361,33 @@ let generate_transaction_witness ?preeval ~constraint_constants ~sok_message :> Transaction.t ) with | `Snapp_command c -> - generate_snapp_command_witness ?preeval ~constraint_constants - ~sok_message ~source ~target ~init_stack ~pending_coinbase_stack_state - ~next_available_token_before ~next_available_token_after - ~snapp_account1 ~snapp_account2 - {transaction_in_block with transaction= c} + generate_snapp_command_witness ?preeval ~constraint_constants ~sok_message + ~source ~target ~init_stack ~pending_coinbase_stack_state + ~next_available_token_before ~next_available_token_after ~snapp_account1 + ~snapp_account2 + { transaction_in_block with transaction = c } handler | `Transaction t -> generate_transaction_union_witness ?preeval ~constraint_constants sok_message source target { transaction_in_block with - transaction= Transaction_union.of_transaction t } + transaction = Transaction_union.of_transaction t + } init_stack next_available_token_before next_available_token_after pending_coinbase_stack_state handler let verify (ts : (t * _) list) ~key = if - List.for_all ts ~f:(fun ({statement; _}, message) -> + List.for_all ts ~f:(fun ({ statement; _ }, message) -> Sok_message.Digest.equal (Sok_message.digest message) - statement.sok_digest ) + statement.sok_digest) then Pickles.verify (module Nat.N2) (module Statement.With_sok) key - (List.map ts ~f:(fun ({statement; proof}, _) -> (statement, proof))) + (List.map ts ~f:(fun ({ statement; proof }, _) -> (statement, proof))) else Async.return false let constraint_system_digests ~constraint_constants () = @@ -3341,16 +3395,17 @@ let constraint_system_digests ~constraint_constants () = [ ( "transaction-merge" , digest Merge.( - Tick.constraint_system ~exposing:[Statement.With_sok.typ] (fun x -> + Tick.constraint_system ~exposing:[ Statement.With_sok.typ ] (fun x -> let open Tick in let%bind x1 = exists Statement.With_sok.typ in let%bind x2 = exists Statement.With_sok.typ in - main [x1; x2] x )) ) + main [ x1; x2 ] x)) ) ; ( "transaction-base" , digest Base.( - Tick.constraint_system ~exposing:[Statement.With_sok.typ] - (main ~constraint_constants)) ) ] + Tick.constraint_system ~exposing:[ Statement.With_sok.typ ] + (main ~constraint_constants)) ) + ] module Make (Inputs : sig val constraint_constants : Genesis_constants.Constraint_constants.t @@ -3360,7 +3415,7 @@ end) = struct open Inputs - let tag, cache_handle, p, Pickles.Provers.[base; merge] = + let tag, cache_handle, p, Pickles.Provers.[ base; merge ] = system ~proof_level ~constraint_constants module Proof = (val p) @@ -3369,17 +3424,16 @@ struct let verification_key = Proof.verification_key - let verify_against_digest {statement; proof} = - Proof.verify [(statement, proof)] + let verify_against_digest { statement; proof } = + Proof.verify [ (statement, proof) ] let verify ts = if List.for_all ts ~f:(fun (p, m) -> - Sok_message.Digest.equal (Sok_message.digest m) - p.statement.sok_digest ) + Sok_message.Digest.equal (Sok_message.digest m) p.statement.sok_digest) then Proof.verify - (List.map ts ~f:(fun ({statement; proof}, _) -> (statement, proof))) + (List.map ts ~f:(fun ({ statement; proof }, _) -> (statement, proof))) else Async.return false let of_transaction_union sok_digest source target ~init_stack @@ -3391,9 +3445,10 @@ struct ; sok_digest ; next_available_token_before ; next_available_token_after - ; fee_excess= Transaction_union.fee_excess transaction - ; supply_increase= Transaction_union.supply_increase transaction - ; pending_coinbase_stack_state } + ; fee_excess = Transaction_union.fee_excess transaction + ; supply_increase = Transaction_union.supply_increase transaction + ; pending_coinbase_stack_state + } in let%map.Async.Deferred proof = base [] @@ -3402,7 +3457,7 @@ struct init_stack) s in - {statement= s; proof} + { statement = s; proof } let of_snapp_command ~sok_digest ~source ~target ~init_stack:_ ~pending_coinbase_stack_state ~next_available_token_before @@ -3415,19 +3470,20 @@ struct let statement : Statement.With_sok.t = { source ; target - ; supply_increase= Currency.Amount.zero + ; supply_increase = Currency.Amount.zero ; pending_coinbase_stack_state - ; fee_excess= Or_error.ok_exn (Snapp_command.fee_excess t) + ; fee_excess = Or_error.ok_exn (Snapp_command.fee_excess t) ; next_available_token_before ; next_available_token_after - ; sok_digest } + ; sok_digest + } in let proof = match command_to_proofs t with - | [] | [_] | [_; _] -> + | [] | [ _ ] | [ _; _ ] -> failwith "unimplemented" in - {statement; proof} + { statement; proof } let of_transaction ~sok_digest ~source ~target ~init_stack ~pending_coinbase_stack_state ~next_available_token_before @@ -3461,7 +3517,7 @@ struct ~pending_coinbase_stack_state ~next_available_token_before ~next_available_token_after ~snapp_account1:None ~snapp_account2:None { user_command_in_block with - transaction= + transaction = Command (Signed_command (Transaction_protocol_state.transaction user_command_in_block)) @@ -3475,12 +3531,13 @@ struct ~pending_coinbase_stack_state ~next_available_token_before ~next_available_token_after ~snapp_account1:None ~snapp_account2:None { transfer_in_block with - transaction= + transaction = Fee_transfer - (Transaction_protocol_state.transaction transfer_in_block) } + (Transaction_protocol_state.transaction transfer_in_block) + } handler - let merge ({statement= t12; _} as x12) ({statement= t23; _} as x23) + let merge ({ statement = t12; _ } as x12) ({ statement = t23; _ } as x23) ~sok_digest = if not (Frozen_ledger_hash.( = ) t12.target t23.source) then failwithf @@ -3509,21 +3566,23 @@ struct |> Async.return in let s : Statement.With_sok.t = - { Statement.source= t12.source - ; target= t23.target + { Statement.source = t12.source + ; target = t23.target ; supply_increase ; fee_excess - ; next_available_token_before= t12.next_available_token_before - ; next_available_token_after= t23.next_available_token_after - ; pending_coinbase_stack_state= - { source= t12.pending_coinbase_stack_state.source - ; target= t23.pending_coinbase_stack_state.target } - ; sok_digest } + ; next_available_token_before = t12.next_available_token_before + ; next_available_token_after = t23.next_available_token_after + ; pending_coinbase_stack_state = + { source = t12.pending_coinbase_stack_state.source + ; target = t23.pending_coinbase_stack_state.target + } + ; sok_digest + } in let%map.Async.Deferred proof = - merge [(x12.statement, x12.proof); (x23.statement, x23.proof)] s + merge [ (x12.statement, x12.proof); (x23.statement, x23.proof) ] s in - Ok {statement= s; proof} + Ok { statement = s; proof } let constraint_system_digests = lazy (constraint_system_digests ~constraint_constants ()) @@ -3571,7 +3630,7 @@ let%test_module "transaction_snark" = let merkle_root t = Frozen_ledger_hash.of_ledger_hash @@ merkle_root t end - type wallet = {private_key: Private_key.t; account: Account.t} + type wallet = { private_key : Private_key.t; account : Account.t } let ledger_depth = constraint_constants.ledger_depth @@ -3583,9 +3642,10 @@ let%test_module "transaction_snark" = in let account_id = Account_id.create public_key Token_id.default in { private_key - ; account= + ; account = Account.create account_id - (Balance.of_int ((50 + Random.int 100) * 1_000_000_000)) } + (Balance.of_int ((50 + Random.int 100) * 1_000_000_000)) + } in Array.init n ~f:(fun _ -> random_wallet ()) @@ -3599,8 +3659,9 @@ let%test_module "transaction_snark" = (Payment { source_pk ; receiver_pk - ; token_id= token - ; amount= Amount.of_int amt }) + ; token_id = token + ; amount = Amount.of_int amt + }) in let signature = Signed_command.sign_payload fee_payer.private_key payload @@ -3608,8 +3669,9 @@ let%test_module "transaction_snark" = Signed_command.check Signed_command.Poly.Stable.Latest. { payload - ; signer= Public_key.of_private_key_exn fee_payer.private_key - ; signature } + ; signer = Public_key.of_private_key_exn fee_payer.private_key + ; signature + } |> Option.value_exn let user_command_with_wallet wallets ~sender:i ~receiver:j amt fee @@ -3669,13 +3731,14 @@ let%test_module "transaction_snark" = ~txn_global_slot:current_global_slot user_command in let user_command_in_block = - { Transaction_protocol_state.Poly.transaction= user_command - ; block_data= state_body } + { Transaction_protocol_state.Poly.transaction = user_command + ; block_data = state_body + } in Async.Thread_safe.block_on_async_exn (fun () -> of_user_command ~sok_digest ~source ~target ~init_stack ~pending_coinbase_stack_state ~next_available_token_before - ~next_available_token_after user_command_in_block handler ) + ~next_available_token_after user_command_in_block handler) (* ~proposer: @@ -3725,14 +3788,14 @@ let%test_module "transaction_snark" = pending_coinbase_init in let txn_in_block = - {Transaction_protocol_state.Poly.transaction; block_data= state_body} + { Transaction_protocol_state.Poly.transaction; block_data = state_body } in Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> Ledger.create_new_account_exn ledger producer_id (Account.create receiver_id Balance.zero) ; let sparse_ledger = Sparse_ledger.of_ledger_subset_exn ledger - [producer_id; receiver_id; other_id] + [ producer_id; receiver_id; other_id ] in let sparse_ledger_after = Sparse_ledger.apply_transaction_exn ~constraint_constants @@ -3754,16 +3817,16 @@ let%test_module "transaction_snark" = (Sparse_ledger.next_available_token sparse_ledger_after) ~init_stack:pending_coinbase_init ~pending_coinbase_stack_state: - {source= source_stack; target= pending_coinbase_stack_target} - ~snapp_account1:None ~snapp_account2:None ) + { source = source_stack; target = pending_coinbase_stack_target } + ~snapp_account1:None ~snapp_account2:None) let%test_unit "coinbase with new state body hash" = Test_util.with_randomness 123456789 (fun () -> - coinbase_test state_body ~carryforward:false ) + coinbase_test state_body ~carryforward:false) let%test_unit "coinbase with carry-forward state body hash" = Test_util.with_randomness 123456789 (fun () -> - coinbase_test state_body ~carryforward:true ) + coinbase_test state_body ~carryforward:true) let%test_unit "new_account" = Test_util.with_randomness 123456789 (fun () -> @@ -3771,10 +3834,10 @@ let%test_module "transaction_snark" = Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> Array.iter (Array.sub wallets ~pos:1 ~len:(Array.length wallets - 1)) - ~f:(fun {account; private_key= _} -> + ~f:(fun { account; private_key = _ } -> Ledger.create_new_account_exn ledger (Account.identifier account) - account ) ; + account) ; let t1 = user_command_with_wallet wallets ~sender:1 ~receiver:0 8_000_000_000 @@ -3814,16 +3877,17 @@ let%test_module "transaction_snark" = state_body_hash pending_coinbase_stack in let pending_coinbase_stack_state = - { Pending_coinbase_stack_state.source= pending_coinbase_stack - ; target= pending_coinbase_stack_target } + { Pending_coinbase_stack_state.source = pending_coinbase_stack + ; target = pending_coinbase_stack_target + } in check_user_command ~constraint_constants ~sok_message ~source:(Ledger.merkle_root ledger) ~target ~init_stack:pending_coinbase_stack ~pending_coinbase_stack_state ~next_available_token_before ~next_available_token_after - {transaction= t1; block_data= state_body} - (unstage @@ Sparse_ledger.handler sparse_ledger) ) ) + { transaction = t1; block_data = state_body } + (unstage @@ Sparse_ledger.handler sparse_ledger))) let signed_signed ~wallets i j = let full_amount = 8_000_000_000 in @@ -3840,29 +3904,35 @@ let%test_module "transaction_snark" = Vector.init Snapp_state.Max_state_size.n ~f:Field.of_int in let data1 : Party.Predicated.Signed.t = - { predicate= acct1.account.nonce - ; body= - { pk= acct1.account.public_key - ; update= - { app_state= + { predicate = acct1.account.nonce + ; body = + { pk = acct1.account.public_key + ; update = + { app_state = Vector.map new_state ~f:(fun x -> Set_or_keep.Set x) - ; delegate= Keep - ; verification_key= Keep - ; permissions= Keep } - ; delta= - Amount.Signed.( - negate (of_unsigned (Amount.of_int full_amount))) } } + ; delegate = Keep + ; verification_key = Keep + ; permissions = Keep + } + ; delta = + Amount.Signed.(negate (of_unsigned (Amount.of_int full_amount))) + } + } in let data2 : Party.Predicated.Signed.t = - { predicate= acct2.account.nonce - ; body= - { pk= acct2.account.public_key - ; update= - { app_state= Vector.map new_state ~f:(fun _ -> Set_or_keep.Keep) - ; delegate= Keep - ; verification_key= Keep - ; permissions= Keep } - ; delta= Amount.Signed.of_unsigned receiver_amount } } + { predicate = acct2.account.nonce + ; body = + { pk = acct2.account.public_key + ; update = + { app_state = + Vector.map new_state ~f:(fun _ -> Set_or_keep.Keep) + ; delegate = Keep + ; verification_key = Keep + ; permissions = Keep + } + ; delta = Amount.Signed.of_unsigned receiver_amount + } + } in Snapp_command.signed_signed ~token_id:Token_id.default (acct1.private_key, data1) (acct2.private_key, data2) @@ -3873,10 +3943,10 @@ let%test_module "transaction_snark" = Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> Array.iter (Array.sub wallets ~pos:1 ~len:(Array.length wallets - 1)) - ~f:(fun {account; private_key= _} -> + ~f:(fun { account; private_key = _ } -> Ledger.create_new_account_exn ledger (Account.identifier account) - account ) ; + account) ; let t1 = let i, j = (1, 2) in signed_signed ~wallets i j @@ -3890,17 +3960,17 @@ let%test_module "transaction_snark" = ~txn_state_view t1 in let hash_post = Ledger.merkle_root ledger in - [%test_eq: Field.t] hash_pre hash_post ) ) + [%test_eq: Field.t] hash_pre hash_post)) let%test_unit "signed_signed" = Test_util.with_randomness 123456789 (fun () -> let wallets = random_wallets () in Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> Array.iter (Array.sub wallets ~pos:1 ~len:2) - ~f:(fun {account; private_key= _} -> + ~f:(fun { account; private_key = _ } -> Ledger.create_new_account_exn ledger (Account.identifier account) - account ) ; + account) ; let i, j = (1, 2) in let t1 = signed_signed ~wallets i j in let txn_state_view = @@ -3927,20 +3997,20 @@ let%test_module "transaction_snark" = state_body_hash pending_coinbase_stack in let pending_coinbase_stack_state = - { Pending_coinbase_stack_state.source= pending_coinbase_stack - ; target= pending_coinbase_stack_target } + { Pending_coinbase_stack_state.source = pending_coinbase_stack + ; target = pending_coinbase_stack_target + } in let snapp_account1, snapp_account2 = Sparse_ledger.snapp_accounts sparse_ledger (Command (Snapp_command t1)) in - check_snapp_command ~constraint_constants ~sok_message - ~state_body + check_snapp_command ~constraint_constants ~sok_message ~state_body ~source:(Ledger.merkle_root ledger) ~target ~init_stack:pending_coinbase_stack ~pending_coinbase_stack_state ~next_available_token_before ~next_available_token_after ~snapp_account1 ~snapp_account2 t1 - (unstage @@ Sparse_ledger.handler sparse_ledger) ) ) + (unstage @@ Sparse_ledger.handler sparse_ledger))) let account_fee = Fee.to_int constraint_constants.account_creation_fee @@ -4014,8 +4084,7 @@ let%test_module "transaction_snark" = in let _undo = Or_error.ok_exn - @@ Ledger.apply_transaction ledger ~constraint_constants - ~txn_state_view + @@ Ledger.apply_transaction ledger ~constraint_constants ~txn_state_view (txn :> Transaction.t) in let target = Ledger.merkle_root ledger in @@ -4023,12 +4092,13 @@ let%test_module "transaction_snark" = check_transaction ~constraint_constants ~sok_message ~source ~target ~init_stack:pending_coinbase_stack ~pending_coinbase_stack_state: - { Pending_coinbase_stack_state.source= pending_coinbase_stack - ; target= pending_coinbase_stack_target } + { Pending_coinbase_stack_state.source = pending_coinbase_stack + ; target = pending_coinbase_stack_target + } ~next_available_token_before:next_available_token ~next_available_token_after:(Ledger.next_available_token ledger) ~snapp_account1:None ~snapp_account2:None - {transaction= txn; block_data= state_body} + { transaction = txn; block_data = state_body } (unstage @@ Sparse_ledger.handler sparse_ledger) let%test_unit "account creation fee - user commands" = @@ -4060,7 +4130,7 @@ let%test_module "transaction_snark" = ~fee_token:Token_id.default ~token:Token_id.default amount (Fee.of_int txn_fee) nonce memo in - (Account.Nonce.succ nonce, txns @ [uc]) ) + (Account.Nonce.succ nonce, txns @ [ uc ])) in Ledger.create_new_account_exn ledger (Account.identifier sender.account) @@ -4068,19 +4138,19 @@ let%test_module "transaction_snark" = let () = List.iter ucs ~f:(fun uc -> test_transaction ~constraint_constants ledger - (Transaction.Command (Signed_command uc)) ) + (Transaction.Command (Signed_command uc))) in List.iter receivers ~f:(fun receiver -> check_balance (Account.identifier receiver.account) ((amount * txns_per_receiver) - account_fee) - ledger ) ; + ledger) ; check_balance (Account.identifier sender.account) ( Balance.to_int sender.account.balance - - (amount + txn_fee) * txns_per_receiver - * List.length receivers ) - ledger ) ) + - (amount + txn_fee) * txns_per_receiver * List.length receivers + ) + ledger)) let%test_unit "account creation fee - fee transfers" = Test_util.with_randomness 123456789 (fun () -> @@ -4102,20 +4172,20 @@ let%test_module "transaction_snark" = Fee_transfer.Single.create ~receiver_pk:receiver.account.public_key ~fee:(Currency.Fee.of_int fee) - ~fee_token:receiver.account.token_id ) + ~fee_token:receiver.account.token_id) in - txns @ [ft] ) + txns @ [ ft ]) in let () = List.iter fts ~f:(fun ft -> let txn = Transaction.Fee_transfer ft in - test_transaction ~constraint_constants ledger txn ) + test_transaction ~constraint_constants ledger txn) in List.iter receivers ~f:(fun receiver -> check_balance (Account.identifier receiver.account) ((fee * txns_per_receiver) - account_fee) - ledger ) ) ) + ledger))) let%test_unit "account creation fee - coinbase" = Test_util.with_randomness 123456789 (fun () -> @@ -4133,7 +4203,7 @@ let%test_module "transaction_snark" = List.map (List.init ft_count ~f:Fn.id) ~f:(fun _ -> Coinbase.Fee_transfer.create ~receiver_pk:other.account.public_key - ~fee:constraint_constants.account_creation_fee ) + ~fee:constraint_constants.account_creation_fee) in List.fold ~init:(fts, []) (List.init coinbase_count ~f:Fn.id) ~f:(fun (fts, cbs) _ -> @@ -4144,7 +4214,7 @@ let%test_module "transaction_snark" = ~fee_transfer:(List.hd fts) |> Or_error.ok_exn in - (Option.value ~default:[] (List.tl fts), cb :: cbs) ) + (Option.value ~default:[] (List.tl fts), cb :: cbs)) in Ledger.create_new_account_exn ledger (Account.identifier dummy_account.account) @@ -4152,7 +4222,7 @@ let%test_module "transaction_snark" = let () = List.iter cbs ~f:(fun cb -> let txn = Transaction.Coinbase cb in - test_transaction ~constraint_constants ledger txn ) + test_transaction ~constraint_constants ledger txn) in let fees = fee * ft_count in check_balance @@ -4161,12 +4231,13 @@ let%test_module "transaction_snark" = ledger ; check_balance (Account.identifier other.account) - (fees - account_fee) ledger ) ) + (fees - account_fee) ledger)) module Pc_with_init_stack = struct type t = - { pc: Pending_coinbase_stack_state.t - ; init_stack: Pending_coinbase.Stack.t } + { pc : Pending_coinbase_stack_state.t + ; init_stack : Pending_coinbase.Stack.t + } end let test_base_and_merge ~state_hash_and_body1 ~state_hash_and_body2 @@ -4174,14 +4245,14 @@ let%test_module "transaction_snark" = Test_util.with_randomness 123456789 (fun () -> let wallets = random_wallets () in (*let state_body = Lazy.force state_body in - let state_body_hash = Lazy.force state_body_hash in*) + let state_body_hash = Lazy.force state_body_hash in*) let state_body_hash1, state_body1 = state_hash_and_body1 in let state_body_hash2, state_body2 = state_hash_and_body2 in Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> - Array.iter wallets ~f:(fun {account; private_key= _} -> + Array.iter wallets ~f:(fun { account; private_key = _ } -> Ledger.create_new_account_exn ledger (Account.identifier account) - account ) ; + account) ; let memo = Signed_command_memo.create_by_digesting_string_exn (Test_util.arbitrary_string @@ -4217,15 +4288,14 @@ let%test_module "transaction_snark" = *) Signed_command.accounts_accessed ~next_available_token:next_available_token1 - (Signed_command.forget_check t) ) - [t1; t2]) + (Signed_command.forget_check t)) + [ t1; t2 ]) in let init_stack1 = Pending_coinbase.Stack.empty in let pending_coinbase_stack_state1 = (* No coinbase to add to the stack. *) let stack_with_state = - Pending_coinbase.Stack.push_state state_body_hash1 - init_stack1 + Pending_coinbase.Stack.push_state state_body_hash1 init_stack1 in (* Since protocol state body is added once per block, the source would already have the state if [carryforward=true] @@ -4237,9 +4307,10 @@ let%test_module "transaction_snark" = if carryforward1 then (stack_with_state, stack_with_state) else (init_stack1, stack_with_state) in - { Pc_with_init_stack.pc= - {source= source_stack; target= target_stack} - ; init_stack= init_stack1 } + { Pc_with_init_stack.pc = + { source = source_stack; target = target_stack } + ; init_stack = init_stack1 + } in let proof12 = of_user_command' sok_digest ledger t1 @@ -4281,9 +4352,10 @@ let%test_module "transaction_snark" = , previous_stack , state_body2 ) in - ( { Pc_with_init_stack.pc= - {source= source_stack; target= target_stack} - ; init_stack } + ( { Pc_with_init_stack.pc = + { source = source_stack; target = target_stack } + ; init_stack + } , state_body2 ) in ignore @@ -4319,12 +4391,12 @@ let%test_module "transaction_snark" = (Sparse_ledger.merkle_root sparse_ledger) ; let proof13 = Async.Thread_safe.block_on_async_exn (fun () -> - merge ~sok_digest proof12 proof23 ) + merge ~sok_digest proof12 proof23) |> Or_error.ok_exn in Async.Thread_safe.block_on_async (fun () -> - Proof.verify [(proof13.statement, proof13.proof)] ) - |> Result.ok_exn ) ) + Proof.verify [ (proof13.statement, proof13.proof) ]) + |> Result.ok_exn)) let%test "base_and_merge: transactions in one block (t1,t2 in b1), \ carryforward the state from a previous transaction t0 in b1" = @@ -4401,7 +4473,7 @@ let%test_module "transaction_snark" = Array.iter accounts ~f:(fun account -> Ledger.create_new_account_exn ledger (Account.identifier account) - account ) ; + account) ; let get_account aid = Option.bind (Ledger.location_of_account ledger aid) @@ -4412,13 +4484,13 @@ let%test_module "transaction_snark" = | Some nonce -> nonce | None -> ( - match get_account (Account_id.create fee_payer_pk fee_token) with - | Some {nonce; _} -> - nonce - | None -> - failwith - "Could not infer a valid nonce for this test. Provide one \ - explicitly" ) + match get_account (Account_id.create fee_payer_pk fee_token) with + | Some { nonce; _ } -> + nonce + | None -> + failwith + "Could not infer a valid nonce for this test. Provide one \ + explicitly" ) in let payload = Signed_command.Payload.create ~fee ~fee_payer_pk ~fee_token ~nonce @@ -4461,12 +4533,11 @@ let%test_module "transaction_snark" = let source_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 - ; create_account source_pk token_id 30_000_000_000 |] + ; create_account source_pk token_id 30_000_000_000 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let amount = @@ -4477,7 +4548,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Payment {source_pk; receiver_pk; token_id; amount}) + (Payment { source_pk; receiver_pk; token_id; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let source_account = Option.value_exn source_account in @@ -4488,7 +4559,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Balance.equal accounts.(1).balance source_account.balance) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "transfer non-default tokens to an existing account" = Test_util.with_randomness 123456789 (fun () -> @@ -4499,13 +4570,12 @@ let%test_module "transaction_snark" = let source_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; create_account source_pk token_id 30_000_000_000 - ; create_account receiver_pk token_id 0 |] + ; create_account receiver_pk token_id 0 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let amount = @@ -4516,7 +4586,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Payment {source_pk; receiver_pk; token_id; amount}) + (Payment { source_pk; receiver_pk; token_id; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let source_account = Option.value_exn source_account in @@ -4536,8 +4606,8 @@ let%test_module "transaction_snark" = accounts.(2).balance |> add_amount amount in assert ( - Balance.equal receiver_account.balance - expected_receiver_balance ) ) ) + Balance.equal receiver_account.balance expected_receiver_balance + ))) let%test_unit "insufficient account creation fee for non-default token \ transfer" = @@ -4549,12 +4619,11 @@ let%test_module "transaction_snark" = let source_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 - ; create_account source_pk token_id 30_000_000_000 |] + ; create_account source_pk token_id 30_000_000_000 + |] in let fee = Fee.of_int 20_000_000_000 in let amount = @@ -4565,7 +4634,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Payment {source_pk; receiver_pk; token_id; amount}) + (Payment { source_pk; receiver_pk; token_id; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let source_account = Option.value_exn source_account in @@ -4578,10 +4647,9 @@ let%test_module "transaction_snark" = let expected_source_balance = accounts.(1).balance in assert ( Balance.equal source_account.balance expected_source_balance ) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) - let%test_unit "insufficient source balance for non-default token transfer" - = + let%test_unit "insufficient source balance for non-default token transfer" = Test_util.with_randomness 123456789 (fun () -> Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> let wallets = random_wallets ~n:2 () in @@ -4590,12 +4658,11 @@ let%test_module "transaction_snark" = let source_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 - ; create_account source_pk token_id 30_000_000_000 |] + ; create_account source_pk token_id 30_000_000_000 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let amount = Amount.of_int 40_000_000_000 in @@ -4604,7 +4671,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Payment {source_pk; receiver_pk; token_id; amount}) + (Payment { source_pk; receiver_pk; token_id; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let source_account = Option.value_exn source_account in @@ -4617,7 +4684,7 @@ let%test_module "transaction_snark" = let expected_source_balance = accounts.(1).balance in assert ( Balance.equal source_account.balance expected_source_balance ) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "transfer non-existing source" = Test_util.with_randomness 123456789 (fun () -> @@ -4628,11 +4695,9 @@ let%test_module "transaction_snark" = let source_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = - [|create_account fee_payer_pk fee_token 20_000_000_000|] + [| create_account fee_payer_pk fee_token 20_000_000_000 |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let amount = Amount.of_int 20_000_000_000 in @@ -4641,7 +4706,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Payment {source_pk; receiver_pk; token_id; amount}) + (Payment { source_pk; receiver_pk; token_id; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let expected_fee_payer_balance = @@ -4651,7 +4716,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Option.is_none source_account) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "payment predicate failure" = Test_util.with_randomness 123456789 (fun () -> @@ -4662,12 +4727,11 @@ let%test_module "transaction_snark" = let source_pk = wallets.(1).account.public_key in let receiver_pk = wallets.(2).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 - ; create_account source_pk token_id 30_000_000_000 |] + ; create_account source_pk token_id 30_000_000_000 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let amount = Amount.of_int 20_000_000_000 in @@ -4676,7 +4740,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Payment {source_pk; receiver_pk; token_id; amount}) + (Payment { source_pk; receiver_pk; token_id; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let source_account = Option.value_exn source_account in @@ -4689,7 +4753,7 @@ let%test_module "transaction_snark" = let expected_source_balance = accounts.(1).balance in assert ( Balance.equal source_account.balance expected_source_balance ) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "delegation predicate failure" = Test_util.with_randomness 123456789 (fun () -> @@ -4704,7 +4768,8 @@ let%test_module "transaction_snark" = let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; create_account source_pk token_id 30_000_000_000 - ; create_account receiver_pk token_id 30_000_000_000 |] + ; create_account receiver_pk token_id 30_000_000_000 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -4714,7 +4779,7 @@ let%test_module "transaction_snark" = ~accounts ~signer ~fee ~fee_payer_pk ~fee_token (Stake_delegation (Set_delegate - {delegator= source_pk; new_delegate= receiver_pk})) + { delegator = source_pk; new_delegate = receiver_pk })) in let fee_payer_account = Option.value_exn fee_payer_account in let source_account = Option.value_exn source_account in @@ -4728,7 +4793,7 @@ let%test_module "transaction_snark" = Public_key.Compressed.equal (Option.value_exn source_account.delegate) source_pk ) ; - assert (Option.is_some receiver_account) ) ) + assert (Option.is_some receiver_account))) let%test_unit "delegation delegatee does not exist" = Test_util.with_randomness 123456789 (fun () -> @@ -4740,7 +4805,7 @@ let%test_module "transaction_snark" = let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in let accounts = - [|create_account fee_payer_pk fee_token 20_000_000_000|] + [| create_account fee_payer_pk fee_token 20_000_000_000 |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -4750,7 +4815,7 @@ let%test_module "transaction_snark" = ~accounts ~signer ~fee ~fee_payer_pk ~fee_token (Stake_delegation (Set_delegate - {delegator= source_pk; new_delegate= receiver_pk})) + { delegator = source_pk; new_delegate = receiver_pk })) in let fee_payer_account = Option.value_exn fee_payer_account in let source_account = Option.value_exn source_account in @@ -4764,7 +4829,7 @@ let%test_module "transaction_snark" = Public_key.Compressed.equal (Option.value_exn source_account.delegate) source_pk ) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "delegation delegator does not exist" = Test_util.with_randomness 123456789 (fun () -> @@ -4778,7 +4843,8 @@ let%test_module "transaction_snark" = let token_id = Token_id.default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 - ; create_account receiver_pk token_id 30_000_000_000 |] + ; create_account receiver_pk token_id 30_000_000_000 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -4788,7 +4854,7 @@ let%test_module "transaction_snark" = ~accounts ~signer ~fee ~fee_payer_pk ~fee_token (Stake_delegation (Set_delegate - {delegator= source_pk; new_delegate= receiver_pk})) + { delegator = source_pk; new_delegate = receiver_pk })) in let fee_payer_account = Option.value_exn fee_payer_account in let expected_fee_payer_balance = @@ -4798,7 +4864,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Option.is_none source_account) ; - assert (Option.is_some receiver_account) ) ) + assert (Option.is_some receiver_account))) let%test_unit "timed account - transactions" = Test_util.with_randomness 123456789 (fun () -> @@ -4822,12 +4888,13 @@ let%test_module "transaction_snark" = let txn_global_slot = Global_slot.of_int 1002 in let sender = { sender with - account= + account = Or_error.ok_exn @@ Account.create_timed (Account.identifier sender.account) balance ~initial_minimum_balance ~cliff_time ~cliff_amount - ~vesting_period ~vesting_increment } + ~vesting_period ~vesting_increment + } in Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> let _, ucs = @@ -4846,7 +4913,7 @@ let%test_module "transaction_snark" = amount (Fee.of_int txn_fee) ~fee_token:Token_id.default ~token:Token_id.default nonce memo in - (Account.Nonce.succ nonce, txns @ [uc]) ) + (Account.Nonce.succ nonce, txns @ [ uc ])) in Ledger.create_new_account_exn ledger (Account.identifier sender.account) @@ -4854,19 +4921,19 @@ let%test_module "transaction_snark" = let () = List.iter ucs ~f:(fun uc -> test_transaction ~constraint_constants ~txn_global_slot - ledger (Transaction.Command (Signed_command uc)) ) + ledger (Transaction.Command (Signed_command uc))) in List.iter receivers ~f:(fun receiver -> check_balance (Account.identifier receiver.account) ((amount * txns_per_receiver) - account_fee) - ledger ) ; + ledger) ; check_balance (Account.identifier sender.account) ( Balance.to_int sender.account.balance - - (amount + txn_fee) * txns_per_receiver - * List.length receivers ) - ledger ) ) + - (amount + txn_fee) * txns_per_receiver * List.length receivers + ) + ledger)) let%test_unit "create own new token" = Test_util.with_randomness 123456789 (fun () -> @@ -4878,7 +4945,7 @@ let%test_module "transaction_snark" = let token_owner_pk = fee_payer_pk in let fee_token = Token_id.default in let accounts = - [|create_account fee_payer_pk fee_token 20_000_000_000|] + [| create_account fee_payer_pk fee_token 20_000_000_000 |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -4887,7 +4954,7 @@ let%test_module "transaction_snark" = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token (Create_new_token - {token_owner_pk; disable_new_accounts= false}) + { token_owner_pk; disable_new_accounts = false }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -4902,7 +4969,7 @@ let%test_module "transaction_snark" = assert (Option.is_none token_owner_account.delegate) ; assert ( Token_permissions.equal token_owner_account.token_permissions - (Token_owned {disable_new_accounts= false}) ) ) ) + (Token_owned { disable_new_accounts = false }) ))) let%test_unit "create new token for a different pk" = Test_util.with_randomness 123456789 (fun () -> @@ -4914,7 +4981,7 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in let accounts = - [|create_account fee_payer_pk fee_token 20_000_000_000|] + [| create_account fee_payer_pk fee_token 20_000_000_000 |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -4923,7 +4990,7 @@ let%test_module "transaction_snark" = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token (Create_new_token - {token_owner_pk; disable_new_accounts= false}) + { token_owner_pk; disable_new_accounts = false }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -4938,7 +5005,7 @@ let%test_module "transaction_snark" = assert (Option.is_none token_owner_account.delegate) ; assert ( Token_permissions.equal token_owner_account.token_permissions - (Token_owned {disable_new_accounts= false}) ) ) ) + (Token_owned { disable_new_accounts = false }) ))) let%test_unit "create new token for a different pk new accounts disabled" = Test_util.with_randomness 123456789 (fun () -> @@ -4950,7 +5017,7 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in let accounts = - [|create_account fee_payer_pk fee_token 20_000_000_000|] + [| create_account fee_payer_pk fee_token 20_000_000_000 |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -4958,7 +5025,8 @@ let%test_module "transaction_snark" = , `Receiver_account _also_token_owner_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Create_new_token {token_owner_pk; disable_new_accounts= true}) + (Create_new_token + { token_owner_pk; disable_new_accounts = true }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -4973,7 +5041,7 @@ let%test_module "transaction_snark" = assert (Option.is_none token_owner_account.delegate) ; assert ( Token_permissions.equal token_owner_account.token_permissions - (Token_owned {disable_new_accounts= true}) ) ) ) + (Token_owned { disable_new_accounts = true }) ))) let%test_unit "create own new token account" = Test_util.with_randomness 123456789 (fun () -> @@ -4985,14 +5053,14 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = fee_payer_pk in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5004,7 +5072,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5021,7 +5090,7 @@ let%test_module "transaction_snark" = assert (Option.is_none receiver_account.delegate) ; assert ( Token_permissions.equal receiver_account.token_permissions - (Not_owned {account_disabled= false}) ) ) ) + (Not_owned { account_disabled = false }) ))) let%test_unit "create new token account for a different pk" = Test_util.with_randomness 123456789 (fun () -> @@ -5033,14 +5102,14 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = wallets.(2).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5052,7 +5121,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5069,7 +5139,7 @@ let%test_module "transaction_snark" = assert (Option.is_none receiver_account.delegate) ; assert ( Token_permissions.equal receiver_account.token_permissions - (Not_owned {account_disabled= false}) ) ) ) + (Not_owned { account_disabled = false }) ))) let%test_unit "create new token account for a different pk in a locked \ token" = @@ -5082,14 +5152,14 @@ let%test_module "transaction_snark" = let token_owner_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= Token_owned {disable_new_accounts= true} - } |] + token_permissions = + Token_owned { disable_new_accounts = true } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5101,7 +5171,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5118,7 +5189,7 @@ let%test_module "transaction_snark" = assert (Option.is_none receiver_account.delegate) ; assert ( Token_permissions.equal receiver_account.token_permissions - (Not_owned {account_disabled= false}) ) ) ) + (Not_owned { account_disabled = false }) ))) let%test_unit "create new own locked token account in a locked token" = Test_util.with_randomness 123456789 (fun () -> @@ -5130,14 +5201,14 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = fee_payer_pk in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= Token_owned {disable_new_accounts= true} - } |] + token_permissions = + Token_owned { disable_new_accounts = true } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5149,7 +5220,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= true }) + ; account_disabled = true + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5166,7 +5238,7 @@ let%test_module "transaction_snark" = assert (Option.is_none receiver_account.delegate) ; assert ( Token_permissions.equal receiver_account.token_permissions - (Not_owned {account_disabled= true}) ) ) ) + (Not_owned { account_disabled = true }) ))) let%test_unit "create new token account fails for locked token, non-owner \ fee-payer" = @@ -5179,14 +5251,14 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = wallets.(2).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= Token_owned {disable_new_accounts= true} - } |] + token_permissions = + Token_owned { disable_new_accounts = true } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5198,7 +5270,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5209,7 +5282,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Balance.(equal zero) token_owner_account.balance) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "create new locked token account fails for unlocked token, \ non-owner fee-payer" = @@ -5222,14 +5295,14 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = wallets.(2).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5241,7 +5314,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= true }) + ; account_disabled = true + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5252,7 +5326,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Balance.(equal zero) token_owner_account.balance) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "create new token account fails if account exists" = Test_util.with_randomness 123456789 (fun () -> @@ -5264,15 +5338,15 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = wallets.(2).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } - ; create_account receiver_pk token_id 0 |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + ; create_account receiver_pk token_id 0 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5284,7 +5358,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5297,7 +5372,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Balance.(equal zero) token_owner_account.balance) ; - assert (Balance.(equal zero) receiver_account.balance) ) ) + assert (Balance.(equal zero) receiver_account.balance))) let%test_unit "create new token account fails if receiver is token owner" = Test_util.with_randomness 123456789 (fun () -> @@ -5309,14 +5384,14 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = token_owner_pk in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5328,7 +5403,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5341,7 +5417,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Balance.(equal zero) token_owner_account.balance) ; - assert (Balance.(equal zero) receiver_account.balance) ) ) + assert (Balance.(equal zero) receiver_account.balance))) let%test_unit "create new token account fails if claimed token owner \ doesn't own the token" = @@ -5354,12 +5430,11 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = wallets.(2).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 - ; create_account token_owner_pk token_id 0 |] + ; create_account token_owner_pk token_id 0 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5371,7 +5446,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5383,7 +5459,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Balance.(equal zero) token_owner_account.balance) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "create new token account fails if claimed token owner is \ also the account creation target and does not exist" = @@ -5394,11 +5470,9 @@ let%test_module "transaction_snark" = (* Fee-payer, receiver, and token owner are the same. *) let fee_payer_pk = wallets.(0).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let accounts = - [|create_account fee_payer_pk fee_token 20_000_000_000|] + [| create_account fee_payer_pk fee_token 20_000_000_000 |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5407,10 +5481,11 @@ let%test_module "transaction_snark" = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token (Create_token_account - { token_owner_pk= fee_payer_pk + { token_owner_pk = fee_payer_pk ; token_id - ; receiver_pk= fee_payer_pk - ; account_disabled= false }) + ; receiver_pk = fee_payer_pk + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in (* No account creation fee: the command fails. *) @@ -5421,7 +5496,7 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert (Option.is_none token_owner_account) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "create new token account works for default token" = Test_util.with_randomness 123456789 (fun () -> @@ -5435,7 +5510,7 @@ let%test_module "transaction_snark" = let fee_token = Token_id.default in let token_id = Token_id.default in let accounts = - [|create_account fee_payer_pk fee_token 20_000_000_000|] + [| create_account fee_payer_pk fee_token 20_000_000_000 |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5447,7 +5522,8 @@ let%test_module "transaction_snark" = { token_owner_pk ; token_id ; receiver_pk - ; account_disabled= false }) + ; account_disabled = false + }) in let fee_payer_account = Option.value_exn fee_payer_account in let receiver_account = Option.value_exn receiver_account in @@ -5464,7 +5540,7 @@ let%test_module "transaction_snark" = (Option.value_exn receiver_account.delegate) ) ; assert ( Token_permissions.equal receiver_account.token_permissions - (Not_owned {account_disabled= false}) ) ) ) + (Not_owned { account_disabled = false }) ))) let%test_unit "mint tokens in owner's account" = Test_util.with_randomness 123456789 (fun () -> @@ -5476,17 +5552,17 @@ let%test_module "transaction_snark" = let token_owner_pk = fee_payer_pk in let receiver_pk = fee_payer_pk in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let amount = Amount.of_int (random_int_incl 2 15 * 1_000_000_000) in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5494,7 +5570,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Mint_tokens {token_owner_pk; token_id; receiver_pk; amount}) + (Mint_tokens { token_owner_pk; token_id; receiver_pk; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let receiver_account = Option.value_exn receiver_account in @@ -5508,8 +5584,8 @@ let%test_module "transaction_snark" = Balance.equal fee_payer_account.balance expected_fee_payer_balance ) ; assert ( - Balance.equal expected_receiver_balance - receiver_account.balance ) ) ) + Balance.equal expected_receiver_balance receiver_account.balance + ))) let%test_unit "mint tokens in another pk's account" = Test_util.with_randomness 123456789 (fun () -> @@ -5521,18 +5597,18 @@ let%test_module "transaction_snark" = let token_owner_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let amount = Amount.of_int (random_int_incl 2 15 * 1_000_000_000) in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } - ; create_account receiver_pk token_id 0 |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + ; create_account receiver_pk token_id 0 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5540,7 +5616,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Mint_tokens {token_owner_pk; token_id; receiver_pk; amount}) + (Mint_tokens { token_owner_pk; token_id; receiver_pk; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let receiver_account = Option.value_exn receiver_account in @@ -5558,8 +5634,8 @@ let%test_module "transaction_snark" = Balance.equal accounts.(1).balance token_owner_account.balance ) ; assert ( - Balance.equal expected_receiver_balance - receiver_account.balance ) ) ) + Balance.equal expected_receiver_balance receiver_account.balance + ))) let%test_unit "mint tokens fails if the claimed token owner is not the \ token owner" = @@ -5572,16 +5648,15 @@ let%test_module "transaction_snark" = let token_owner_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let amount = Amount.of_int (random_int_incl 2 15 * 1_000_000_000) in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; create_account token_owner_pk token_id 0 - ; create_account receiver_pk token_id 0 |] + ; create_account receiver_pk token_id 0 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5589,7 +5664,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Mint_tokens {token_owner_pk; token_id; receiver_pk; amount}) + (Mint_tokens { token_owner_pk; token_id; receiver_pk; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let receiver_account = Option.value_exn receiver_account in @@ -5604,8 +5679,7 @@ let%test_module "transaction_snark" = Balance.equal accounts.(1).balance token_owner_account.balance ) ; assert ( - Balance.equal accounts.(2).balance receiver_account.balance ) - ) ) + Balance.equal accounts.(2).balance receiver_account.balance ))) let%test_unit "mint tokens fails if the token owner account is not present" = @@ -5618,15 +5692,14 @@ let%test_module "transaction_snark" = let token_owner_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let amount = Amount.of_int (random_int_incl 2 15 * 1_000_000_000) in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 - ; create_account receiver_pk token_id 0 |] + ; create_account receiver_pk token_id 0 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5634,7 +5707,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Mint_tokens {token_owner_pk; token_id; receiver_pk; amount}) + (Mint_tokens { token_owner_pk; token_id; receiver_pk; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let receiver_account = Option.value_exn receiver_account in @@ -5646,11 +5719,10 @@ let%test_module "transaction_snark" = expected_fee_payer_balance ) ; assert (Option.is_none token_owner_account) ; assert ( - Balance.equal accounts.(1).balance receiver_account.balance ) - ) ) + Balance.equal accounts.(1).balance receiver_account.balance ))) - let%test_unit "mint tokens fails if the fee-payer does not have \ - permission to mint" = + let%test_unit "mint tokens fails if the fee-payer does not have permission \ + to mint" = Test_util.with_randomness 123456789 (fun () -> Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> let wallets = random_wallets ~n:2 () in @@ -5660,18 +5732,18 @@ let%test_module "transaction_snark" = let token_owner_pk = wallets.(1).account.public_key in let receiver_pk = fee_payer_pk in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let amount = Amount.of_int (random_int_incl 2 15 * 1_000_000_000) in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } - ; create_account receiver_pk token_id 0 |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + ; create_account receiver_pk token_id 0 + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5679,7 +5751,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Mint_tokens {token_owner_pk; token_id; receiver_pk; amount}) + (Mint_tokens { token_owner_pk; token_id; receiver_pk; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let receiver_account = Option.value_exn receiver_account in @@ -5694,8 +5766,7 @@ let%test_module "transaction_snark" = Balance.equal accounts.(1).balance token_owner_account.balance ) ; assert ( - Balance.equal accounts.(2).balance receiver_account.balance ) - ) ) + Balance.equal accounts.(2).balance receiver_account.balance ))) let%test_unit "mint tokens fails if the receiver account is not present" = Test_util.with_randomness 123456789 (fun () -> @@ -5707,17 +5778,17 @@ let%test_module "transaction_snark" = let token_owner_pk = fee_payer_pk in let receiver_pk = wallets.(1).account.public_key in let fee_token = Token_id.default in - let token_id = - Quickcheck.random_value Token_id.gen_non_default - in + let token_id = Quickcheck.random_value Token_id.gen_non_default in let amount = Amount.of_int (random_int_incl 2 15 * 1_000_000_000) in let accounts = [| create_account fee_payer_pk fee_token 20_000_000_000 ; { (create_account token_owner_pk token_id 0) with - token_permissions= - Token_owned {disable_new_accounts= false} } |] + token_permissions = + Token_owned { disable_new_accounts = false } + } + |] in let fee = Fee.of_int (random_int_incl 2 15 * 1_000_000_000) in let ( `Fee_payer_account fee_payer_account @@ -5725,7 +5796,7 @@ let%test_module "transaction_snark" = , `Receiver_account receiver_account ) = test_user_command_with_accounts ~constraint_constants ~ledger ~accounts ~signer ~fee ~fee_payer_pk ~fee_token - (Mint_tokens {token_owner_pk; token_id; receiver_pk; amount}) + (Mint_tokens { token_owner_pk; token_id; receiver_pk; amount }) in let fee_payer_account = Option.value_exn fee_payer_account in let token_owner_account = Option.value_exn token_owner_account in @@ -5738,14 +5809,14 @@ let%test_module "transaction_snark" = assert ( Balance.equal accounts.(1).balance token_owner_account.balance ) ; - assert (Option.is_none receiver_account) ) ) + assert (Option.is_none receiver_account))) let%test_unit "unchanged timings for fee transfers and coinbase" = Test_util.with_randomness 123456789 (fun () -> let receivers = Array.init 2 ~f:(fun _ -> Public_key.of_private_key_exn (Private_key.create ()) - |> Public_key.compress ) + |> Public_key.compress) in let timed_account pk = let account_id = Account_id.create pk Token_id.default in @@ -5792,17 +5863,18 @@ let%test_module "transaction_snark" = [ Fee_transfer ft1 ; Fee_transfer ft2 ; Coinbase coinbase_with_ft - ; Coinbase coinbase_wo_ft ] + ; Coinbase coinbase_wo_ft + ] in Ledger.with_ledger ~depth:ledger_depth ~f:(fun ledger -> - List.iter [timed_account1; timed_account2] ~f:(fun acc -> + List.iter [ timed_account1; timed_account2 ] ~f:(fun acc -> Ledger.create_new_account_exn ledger (Account.identifier acc) - acc ) ; + acc) ; (* well over the vesting period, the timing field shouldn't change*) let txn_global_slot = Global_slot.of_int 100 in List.iter transactions ~f:(fun txn -> - test_transaction ~txn_global_slot ~constraint_constants - ledger txn ) ) ) + test_transaction ~txn_global_slot ~constraint_constants ledger + txn))) end ) let%test_module "account timing check" = @@ -5842,7 +5914,7 @@ let%test_module "account timing check" = let snarky_bools = List.map bools ~f:(fun b -> let open Tick.Boolean in - if b then true_ else false_ ) + if b then true_ else false_) in let bitstring_lsb = Bitstring_lib.Bitstring.Lsb_first.of_list snarky_bools @@ -5860,9 +5932,7 @@ let%test_module "account timing check" = let%bind () = as_prover As_prover.( - let%map checked_timing = - read Account.Timing.typ checked_timing - in + let%map checked_timing = read Account.Timing.typ checked_timing in assert (Account.Timing.equal checked_timing unchecked_timing)) in let%bind checked_min_balance = @@ -5871,13 +5941,12 @@ let%test_module "account timing check" = in let%bind unchecked_min_balance_as_snarky_integer = Run.make_checked (fun () -> - snarky_integer_of_bools (Balance.to_bits unchecked_min_balance) - ) + snarky_integer_of_bools (Balance.to_bits unchecked_min_balance)) in let%map equal_balances_checked = Run.make_checked (fun () -> Snarky_integer.Integer.equal ~m checked_min_balance - unchecked_min_balance_as_snarky_integer ) + unchecked_min_balance_as_snarky_integer) in Snarky_backendless.As_prover.read Tick.Boolean.typ equal_balances_checked @@ -5948,7 +6017,7 @@ let%test_module "account timing check" = (* we're 900 slots past the cliff, which is 90 vesting periods subtract 90 * 100 = 9,000 from init min balance of 10,000 to get 1000 so we should still be timed - *) + *) match timing_with_min_balance with | Ok ((Timed _ as unchecked_timing), `Min_balance unchecked_min_balance) -> @@ -5979,7 +6048,7 @@ let%test_module "account timing check" = (* we're 2_000 - 1_000 = 1_000 slots past the cliff, which is 100 vesting periods subtract 100 * 100_000_000_000 = 10_000_000_000_000 from init min balance of 10_000_000_000 to get zero, so we should be untimed now - *) + *) match timing_with_min_balance with | Ok ((Untimed as unchecked_timing), `Min_balance unchecked_min_balance) -> @@ -6158,11 +6227,11 @@ let%test_module "transaction_undos" = let account_id = Account_id.create sender_pk Token_id.default in if List.find cmds ~f:(fun cmd -> - Account_id.equal (User_command.fee_payer cmd) account_id ) + Account_id.equal (User_command.fee_payer cmd) account_id) |> Option.is_some then None else if Currency.Amount.(balance >= amount) then Some s - else None ) + else None) in let new_cmds = let source_accounts = @@ -6171,7 +6240,7 @@ let%test_module "transaction_undos" = assert (not (List.is_empty source_accounts)) ; let new_keys = List.init (List.length source_accounts) ~f:(fun _ -> - Signature_lib.Keypair.create () ) + Signature_lib.Keypair.create ()) in List.map (List.zip_exn source_accounts new_keys) ~f:(fun ((s, _, nonce, _), r) -> @@ -6184,13 +6253,14 @@ let%test_module "transaction_undos" = ~valid_until:None ~body: (Payment - { source_pk= sender_pk - ; receiver_pk= reciever_pk - ; token_id= Token_id.default - ; amount }) + { source_pk = sender_pk + ; receiver_pk = reciever_pk + ; token_id = Token_id.default + ; amount + }) in let c = Signed_command.sign s payload in - User_command.Signed_command (Signed_command.forget_check c) ) + User_command.Signed_command (Signed_command.forget_check c)) in List.map ~f:(fun c -> Transaction.Command c) (cmds @ new_cmds) @@ -6229,7 +6299,7 @@ let%test_module "transaction_undos" = let keypair, _, _, _ = Array.random_element_exn ledger_init_state in - keypair )) + keypair)) remaining in List.map @@ -6251,7 +6321,7 @@ let%test_module "transaction_undos" = let keypair, _, _, _ = Array.random_element_exn ledger_init_state in - keypair ) + keypair) in let min_amount = Option.value_exn @@ -6291,13 +6361,13 @@ let%test_module "transaction_undos" = let test_undos ledger transactions = let res = List.fold ~init:[] transactions ~f:(fun acc t -> - test_undo ledger t :: acc ) + test_undo ledger t :: acc) in List.iter res ~f:(fun (root_before, u) -> let () = Ledger.undo ~constraint_constants ledger u |> Or_error.ok_exn in - assert (Ledger_hash.equal (Ledger.merkle_root ledger) root_before) ) + assert (Ledger_hash.equal (Ledger.merkle_root ledger) root_before)) let%test_unit "undo_coinbase" = let gen = @@ -6312,7 +6382,7 @@ let%test_module "transaction_undos" = Ledger.with_ephemeral_ledger ~depth:constraint_constants.ledger_depth ~f:(fun ledger -> Ledger.apply_initial_ledger_state ledger ledger_init_state ; - test_undos ledger coinbase_list ) ) + test_undos ledger coinbase_list)) let%test_unit "undo_fee_transfers" = let gen = @@ -6327,7 +6397,7 @@ let%test_module "transaction_undos" = Ledger.with_ephemeral_ledger ~depth:constraint_constants.ledger_depth ~f:(fun ledger -> Ledger.apply_initial_ledger_state ledger ledger_init_state ; - test_undos ledger ft_list ) ) + test_undos ledger ft_list)) let%test_unit "undo_user_commands" = let gen = @@ -6342,7 +6412,7 @@ let%test_module "transaction_undos" = Ledger.with_ephemeral_ledger ~depth:constraint_constants.ledger_depth ~f:(fun ledger -> Ledger.apply_initial_ledger_state ledger ledger_init_state ; - test_undos ledger cmd_list ) ) + test_undos ledger cmd_list)) let%test_unit "undo_all_txns" = let gen = @@ -6366,5 +6436,5 @@ let%test_module "transaction_undos" = Ledger.with_ephemeral_ledger ~depth:constraint_constants.ledger_depth ~f:(fun ledger -> Ledger.apply_initial_ledger_state ledger ledger_init_state ; - test_undos ledger txn_list ) ) + test_undos ledger txn_list)) end ) diff --git a/src/lib/transaction_snark/transaction_snark.mli b/src/lib/transaction_snark/transaction_snark.mli index 2101609a1ed..3bbccc1dccf 100644 --- a/src/lib/transaction_snark/transaction_snark.mli +++ b/src/lib/transaction_snark/transaction_snark.mli @@ -21,7 +21,7 @@ module Pending_coinbase_stack_state : sig module Stable : sig module V1 : sig type 'pending_coinbase t = - {source: 'pending_coinbase; target: 'pending_coinbase} + { source : 'pending_coinbase; target : 'pending_coinbase } [@@deriving compare, equal, fields, hash, sexp, yojson] val to_latest : @@ -37,7 +37,7 @@ module Pending_coinbase_stack_state : sig end type 'pending_coinbase poly = 'pending_coinbase Poly.t = - {source: 'pending_coinbase; target: 'pending_coinbase} + { source : 'pending_coinbase; target : 'pending_coinbase } [@@deriving sexp, hash, compare, equal, fields, yojson] [%%versioned: @@ -71,14 +71,15 @@ module Statement : sig , 'token_id , 'sok_digest ) t = - { source: 'ledger_hash - ; target: 'ledger_hash - ; supply_increase: 'amount - ; pending_coinbase_stack_state: 'pending_coinbase - ; fee_excess: 'fee_excess - ; next_available_token_before: 'token_id - ; next_available_token_after: 'token_id - ; sok_digest: 'sok_digest } + { source : 'ledger_hash + ; target : 'ledger_hash + ; supply_increase : 'amount + ; pending_coinbase_stack_state : 'pending_coinbase + ; fee_excess : 'fee_excess + ; next_available_token_before : 'token_id + ; next_available_token_after : 'token_id + ; sok_digest : 'sok_digest + } [@@deriving compare, equal, hash, sexp, yojson] val to_latest : @@ -120,14 +121,15 @@ module Statement : sig , 'token_id , 'sok_digest ) Poly.t = - { source: 'ledger_hash - ; target: 'ledger_hash - ; supply_increase: 'amount - ; pending_coinbase_stack_state: 'pending_coinbase - ; fee_excess: 'fee_excess - ; next_available_token_before: 'token_id - ; next_available_token_after: 'token_id - ; sok_digest: 'sok_digest } + { source : 'ledger_hash + ; target : 'ledger_hash + ; supply_increase : 'amount + ; pending_coinbase_stack_state : 'pending_coinbase + ; fee_excess : 'fee_excess + ; next_available_token_before : 'token_id + ; next_available_token_after : 'token_id + ; sok_digest : 'sok_digest + } [@@deriving compare, equal, hash, sexp, yojson] [%%versioned: @@ -349,7 +351,8 @@ module Make (Inputs : sig val constraint_constants : Genesis_constants.Constraint_constants.t val proof_level : Genesis_constants.Proof_level.t -end) : S [@@warning "-67"] +end) : S +[@@warning "-67"] val constraint_system_digests : constraint_constants:Genesis_constants.Constraint_constants.t diff --git a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml index 13da06b3592..bd7e21b06a8 100644 --- a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml +++ b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml @@ -11,7 +11,7 @@ let map2_or_error xs ys ~f = | [], [] -> Ok (List.rev acc) | x :: xs, y :: ys -> ( - match f x y with Error e -> Error e | Ok z -> go xs ys (z :: acc) ) + match f x y with Error e -> Error e | Ok z -> go xs ys (z :: acc) ) | _, _ -> Or_error.error_string "Length mismatch" in @@ -37,16 +37,17 @@ module Transaction_with_witness = struct witness and the transaction *) type t = - { transaction_with_info: + { transaction_with_info : Transaction_logic.Transaction_applied.Stable.V1.t - ; state_hash: State_hash.Stable.V1.t * State_body_hash.Stable.V1.t + ; state_hash : State_hash.Stable.V1.t * State_body_hash.Stable.V1.t (* TODO: It's inefficient to store this here. Optimize it someday. *) - ; state_view: Mina_base.Snapp_predicate.Protocol_state.View.Stable.V1.t - ; statement: Transaction_snark.Statement.Stable.V1.t - ; init_stack: + ; state_view : Mina_base.Snapp_predicate.Protocol_state.View.Stable.V1.t + ; statement : Transaction_snark.Statement.Stable.V1.t + ; init_stack : Transaction_snark.Pending_coinbase_stack_state.Init_stack.Stable.V1 .t - ; ledger_witness: Mina_base.Sparse_ledger.Stable.V1.t [@sexp.opaque] } + ; ledger_witness : Mina_base.Sparse_ledger.Stable.V1.t [@sexp.opaque] + } [@@deriving sexp] let to_latest = Fn.id @@ -80,7 +81,7 @@ module Job_view = struct type t = Transaction_snark.Statement.t Parallel_scan.Job_view.t [@@deriving sexp] - let to_yojson ({value; position} : t) : Yojson.Safe.t = + let to_yojson ({ value; position } : t) : Yojson.Safe.t = let hash_yojson h = Frozen_ledger_hash.to_yojson h in let statement_to_yojson (s : Transaction_snark.Statement.t) = `Assoc @@ -96,21 +97,23 @@ module Job_view = struct ; `Assoc [ ("token", Token_id.to_yojson s.fee_excess.fee_token_r) ; ("amount", Fee.Signed.to_yojson s.fee_excess.fee_excess_r) - ] ] ) + ] + ] ) ; ("Supply Increase", Currency.Amount.to_yojson s.supply_increase) ; ( "Pending coinbase stack" , Transaction_snark.Pending_coinbase_stack_state.to_yojson - s.pending_coinbase_stack_state ) ] + s.pending_coinbase_stack_state ) + ] in let job_to_yojson = match value with | BEmpty -> - `Assoc [("B", `List [])] + `Assoc [ ("B", `List []) ] | MEmpty -> - `Assoc [("M", `List [])] + `Assoc [ ("M", `List []) ] | MPart x -> - `Assoc [("M", `List [statement_to_yojson x])] - | MFull (x, y, {seq_no; status}) -> + `Assoc [ ("M", `List [ statement_to_yojson x ]) ] + | MFull (x, y, { seq_no; status }) -> `Assoc [ ( "M" , `List @@ -119,9 +122,11 @@ module Job_view = struct ; `Int seq_no ; `Assoc [ ( "Status" - , `String (Parallel_scan.Job_status.to_string status) - ) ] ] ) ] - | BFull (x, {seq_no; status}) -> + , `String (Parallel_scan.Job_status.to_string status) ) + ] + ] ) + ] + | BFull (x, { seq_no; status }) -> `Assoc [ ( "B" , `List @@ -129,10 +134,12 @@ module Job_view = struct ; `Int seq_no ; `Assoc [ ( "Status" - , `String (Parallel_scan.Job_status.to_string status) - ) ] ] ) ] + , `String (Parallel_scan.Job_status.to_string status) ) + ] + ] ) + ] in - `List [`Int position; job_to_yojson] + `List [ `Int position; job_to_yojson ] end type job = Available_job.t [@@deriving sexp] @@ -163,8 +170,7 @@ module Stable = struct end end] -[%%define_locally -Stable.Latest.(hash)] +[%%define_locally Stable.Latest.(hash)] (**********Helpers*************) @@ -174,7 +180,8 @@ let create_expected_statement ~constraint_constants ; state_view ; ledger_witness ; init_stack - ; statement } = + ; statement + } = let open Or_error.Let_syntax in let source = Frozen_ledger_hash.of_ledger_hash @@ -183,13 +190,13 @@ let create_expected_statement ~constraint_constants let next_available_token_before = Sparse_ledger.next_available_token ledger_witness in - let {With_status.data= transaction; status= _} = + let { With_status.data = transaction; status = _ } = Ledger.Transaction_applied.transaction transaction_with_info in let%bind after = Or_error.try_with (fun () -> Sparse_ledger.apply_transaction_exn ~constraint_constants - ~txn_state_view:state_view ledger_witness transaction ) + ~txn_state_view:state_view ledger_witness transaction) in let target = Frozen_ledger_hash.of_ledger_hash @@ Sparse_ledger.merkle_root after @@ -201,8 +208,8 @@ let create_expected_statement ~constraint_constants Ok source | Merge -> Or_error.errorf - !"Invalid init stack in Pending coinbase stack state . Expected \ - Base found Merge" + !"Invalid init stack in Pending coinbase stack state . Expected Base \ + found Merge" in let pending_coinbase_after = let state_body_hash = snd state_hash in @@ -223,17 +230,19 @@ let create_expected_statement ~constraint_constants ; next_available_token_before ; next_available_token_after ; supply_increase - ; pending_coinbase_stack_state= + ; pending_coinbase_stack_state = { statement.pending_coinbase_stack_state with - target= pending_coinbase_after } - ; sok_digest= () } + target = pending_coinbase_after + } + ; sok_digest = () + } let completed_work_to_scanable_work (job : job) (fee, current_proof, prover) : 'a Or_error.t = let sok_digest = Ledger_proof.sok_digest current_proof and proof = Ledger_proof.underlying_proof current_proof in match job with - | Base {statement; _} -> + | Base { statement; _ } -> let ledger_proof = Ledger_proof.create ~statement ~sok_digest ~proof in Ok (ledger_proof, Sok_message.create ~fee ~prover) | Merge ((p, _), (p', _)) -> @@ -259,16 +268,18 @@ let completed_work_to_scanable_work (job : job) (fee, current_proof, prover) : "Statements have incompatible next_available_token state" in let statement : Transaction_snark.Statement.t = - { source= s.source - ; target= s'.target + { source = s.source + ; target = s'.target ; supply_increase - ; pending_coinbase_stack_state= - { source= s.pending_coinbase_stack_state.source - ; target= s'.pending_coinbase_stack_state.target } + ; pending_coinbase_stack_state = + { source = s.pending_coinbase_stack_state.source + ; target = s'.pending_coinbase_stack_state.target + } ; fee_excess - ; next_available_token_before= s.next_available_token_before - ; next_available_token_after= s'.next_available_token_after - ; sok_digest= () } + ; next_available_token_before = s.next_available_token_before + ; next_available_token_after = s'.next_available_token_after + ; sok_digest = () + } in ( Ledger_proof.create ~statement ~sok_digest ~proof , Sok_message.create ~fee ~prover ) @@ -284,9 +295,9 @@ end module Make_statement_scanner (M : Monad_with_Or_error_intf) (Verifier : sig - type t + type t - val verify : verifier:t -> P.t list -> bool M.Or_error.t + val verify : verifier:t -> P.t list -> bool M.Or_error.t end) = struct module Fold = Parallel_scan.State.Make_foldable (Monad.Ident) @@ -300,7 +311,8 @@ struct let%map x = f () in [%log debug] ~metadata: - [("time_elapsed", `Float Core.Time.(Span.to_ms @@ diff (now ()) start))] + [ ("time_elapsed", `Float Core.Time.(Span.to_ms @@ diff (now ()) start)) + ] "%s took $time_elapsed" label ; x @@ -313,16 +325,21 @@ struct end type t = - {total: Time_span.t; count: int; min: Time_span.t; max: Time_span.t} + { total : Time_span.t + ; count : int + ; min : Time_span.t + ; max : Time_span.t + } [@@deriving to_yojson] - let singleton time = {total= time; count= 1; max= time; min= time} + let singleton time = { total = time; count = 1; max = time; min = time } let update (t : t) time = - { total= Time.Span.( + ) t.total time - ; count= t.count + 1 - ; min= Time.Span.min t.min time - ; max= Time.Span.max t.max time } + { total = Time.Span.( + ) t.total time + ; count = t.count + 1 + ; min = Time.Span.min t.min time + ; max = Time.Span.max t.max time + } end type t = Info.t String.Table.t @@ -337,7 +354,7 @@ struct | None -> Info.singleton elapsed | Some acc -> - Info.update acc elapsed ) ; + Info.update acc elapsed) ; x let log label (t : t) = @@ -345,13 +362,13 @@ struct [%log debug] ~metadata: (List.map (Hashtbl.to_alist t) ~f:(fun (k, info) -> - (k, Info.to_yojson info) )) + (k, Info.to_yojson info))) "%s timing" label end (*TODO: fold over the pending_coinbase tree and validate the statements?*) let scan_statement ~constraint_constants tree ~verifier : - (Transaction_snark.Statement.t, [`Error of Error.t | `Empty]) Result.t + (Transaction_snark.Statement.t, [ `Error of Error.t | `Empty ]) Result.t M.t = let timer = Timer.create () in let module Acc = struct @@ -363,7 +380,7 @@ struct let with_error ~f message = let result = f () in Result.map_error result ~f:(fun e -> - Error.createf !"%s: %{sexp:Error.t}" (write_error message) e ) + Error.createf !"%s: %{sexp:Error.t}" (write_error message) e) in let merge_acc ~proofs (acc : Acc.t) s2 : Acc.t Or_error.t = let open Or_error.Let_syntax in @@ -376,39 +393,38 @@ struct let%map merged_statement = Transaction_snark.Statement.merge s1 s2 in - Some (merged_statement, proofs @ ps) ) ) + Some (merged_statement, proofs @ ps))) in let fold_step_a acc_statement job = match job with | Parallel_scan.Merge.Job.Part (proof, message) -> let statement = Ledger_proof.statement proof in - merge_acc ~proofs:[(proof, message)] acc_statement statement - | Empty | Full {status= Parallel_scan.Job_status.Done; _} -> + merge_acc ~proofs:[ (proof, message) ] acc_statement statement + | Empty | Full { status = Parallel_scan.Job_status.Done; _ } -> Or_error.return acc_statement - | Full {left= proof_1, message_1; right= proof_2, message_2; _} -> + | Full { left = proof_1, message_1; right = proof_2, message_2; _ } -> let open Or_error.Let_syntax in let%bind merged_statement = Timer.time timer (sprintf "merge:%s" __LOC__) (fun () -> Transaction_snark.Statement.merge (Ledger_proof.statement proof_1) - (Ledger_proof.statement proof_2) ) + (Ledger_proof.statement proof_2)) in merge_acc acc_statement merged_statement - ~proofs:[(proof_1, message_1); (proof_2, message_2)] + ~proofs:[ (proof_1, message_1); (proof_2, message_2) ] in let fold_step_d acc_statement job = match job with | Parallel_scan.Base.Job.Empty - | Full {status= Parallel_scan.Job_status.Done; _} -> + | Full { status = Parallel_scan.Job_status.Done; _ } -> Or_error.return acc_statement - | Full {job= transaction; _} -> + | Full { job = transaction; _ } -> with_error "Bad base statement" ~f:(fun () -> let open Or_error.Let_syntax in let%bind expected_statement = Timer.time timer (sprintf "create_expected_statement:%s" __LOC__) (fun () -> - create_expected_statement ~constraint_constants transaction - ) + create_expected_statement ~constraint_constants transaction) in if Transaction_snark.Statement.equal transaction.statement @@ -420,7 +436,7 @@ struct !"Bad base statement expected: \ %{sexp:Transaction_snark.Statement.t} got: \ %{sexp:Transaction_snark.Statement.t}" - transaction.statement expected_statement) ) + transaction.statement expected_statement)) in let res = Fold.fold_chronological_until tree ~init:None @@ -430,14 +446,14 @@ struct | Ok next -> Continue next | e -> - Stop e ) + Stop e) ~f_base:(fun acc (_weight, job) -> let open Container.Continue_or_stop in match fold_step_d acc job with | Ok next -> Continue next | e -> - Stop e ) + Stop e) ~finish:Result.return in Timer.log "scan_statement" timer ; @@ -448,7 +464,7 @@ struct let open M.Let_syntax in match%map ksprintf time "verify:%s" __LOC__ (fun () -> - Verifier.verify ~verifier proofs ) + Verifier.verify ~verifier proofs) with | Ok true -> Ok res @@ -470,7 +486,7 @@ struct let open M.Let_syntax in match%map time "scan_statement" (fun () -> - scan_statement ~constraint_constants ~verifier t ) + scan_statement ~constraint_constants ~verifier t) with | Error (`Error e) -> Error e @@ -479,22 +495,23 @@ struct Option.value_map ~default:(Ok ()) snarked_ledger_hash ~f:(fun hash -> clarify_error (Frozen_ledger_hash.equal hash current_ledger_hash) - "did not connect with snarked ledger hash" ) + "did not connect with snarked ledger hash") | Ok - { fee_excess= {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} + { fee_excess = { fee_token_l; fee_excess_l; fee_token_r; fee_excess_r } ; source ; target ; next_available_token_before ; next_available_token_after - ; supply_increase= _ - ; pending_coinbase_stack_state= _ (*TODO: check pending coinbases?*) - ; sok_digest= () } -> + ; supply_increase = _ + ; pending_coinbase_stack_state = _ (*TODO: check pending coinbases?*) + ; sok_digest = () + } -> let open Or_error.Let_syntax in let%map () = Option.value_map ~default:(Ok ()) snarked_ledger_hash ~f:(fun hash -> clarify_error (Frozen_ledger_hash.equal hash source) - "did not connect with snarked ledger hash" ) + "did not connect with snarked ledger hash") and () = clarify_error (Frozen_ledger_hash.equal current_ledger_hash target) @@ -520,7 +537,7 @@ struct ~f:(fun next_tkn -> clarify_error Token_id.(next_available_token_before = next_tkn) - "next available token from snarked ledger does not match" ) + "next available token from snarked ledger does not match") and () = clarify_error Token_id.( @@ -539,11 +556,11 @@ module Staged_undos = struct List.fold_left t ~init:(Ok ()) ~f:(fun acc t -> Or_error.bind (Or_error.map acc ~f:(fun _ -> t)) - ~f:(fun u -> Ledger.undo ~constraint_constants ledger u) ) + ~f:(fun u -> Ledger.undo ~constraint_constants ledger u)) end let statement_of_job : job -> Transaction_snark.Statement.t option = function - | Base {statement; _} -> + | Base { statement; _ } -> Some statement | Merge ((p1, _), (p2, _)) -> let stmt1 = Ledger_proof.statement p1 @@ -566,16 +583,18 @@ let statement_of_job : job -> Transaction_snark.Statement.t option = function stmt2.next_available_token_before) () in - ( { source= stmt1.source - ; target= stmt2.target + ( { source = stmt1.source + ; target = stmt2.target ; supply_increase - ; pending_coinbase_stack_state= - { source= stmt1.pending_coinbase_stack_state.source - ; target= stmt2.pending_coinbase_stack_state.target } + ; pending_coinbase_stack_state = + { source = stmt1.pending_coinbase_stack_state.source + ; target = stmt2.pending_coinbase_stack_state.target + } ; fee_excess - ; next_available_token_before= stmt1.next_available_token_before - ; next_available_token_after= stmt2.next_available_token_after - ; sok_digest= () } + ; next_available_token_before = stmt1.next_available_token_before + ; next_available_token_after = stmt2.next_available_token_after + ; sok_digest = () + } : Transaction_snark.Statement.t ) let create ~work_delay ~transaction_capacity_log_2 = @@ -596,7 +615,7 @@ let extract_txns txns_with_witnesses = txn_with_witness.transaction_with_info in let state_hash = fst txn_with_witness.state_hash in - (txn, state_hash) ) + (txn, state_hash)) let latest_ledger_proof t = let open Option.Let_syntax in @@ -622,7 +641,7 @@ let target_merkle_root t = (*All the transactions in the order in which they were applied*) let staged_transactions t = List.map ~f:(fun (t : Transaction_with_witness.t) -> - t.transaction_with_info |> Ledger.Transaction_applied.transaction ) + t.transaction_with_info |> Ledger.Transaction_applied.transaction) @@ Parallel_scan.pending_data t let staged_transactions_with_protocol_states t @@ -633,7 +652,7 @@ let staged_transactions_with_protocol_states t t.transaction_with_info |> Ledger.Transaction_applied.transaction in let%map protocol_state = get_state (fst t.state_hash) in - (txn, protocol_state) ) + (txn, protocol_state)) @@ Parallel_scan.pending_data t |> Or_error.all @@ -645,13 +664,14 @@ let staged_undos t : Staged_undos.t = let partition_if_overflowing t = let bundle_count work_count = (work_count + 1) / 2 in - let {Space_partition.first= slots, job_count; second} = + let { Space_partition.first = slots, job_count; second } = Parallel_scan.partition_if_overflowing t in - { Space_partition.first= (slots, bundle_count job_count) - ; second= + { Space_partition.first = (slots, bundle_count job_count) + ; second = Option.map second ~f:(fun (slots, job_count) -> - (slots, bundle_count job_count) ) } + (slots, bundle_count job_count)) + } let extract_from_job (job : job) = match job with @@ -676,7 +696,7 @@ let snark_job_list_json t = Yojson.Safe.to_string (`List (List.map all_jobs ~f:(fun tree -> - `List (List.map tree ~f:Job_view.to_yojson) ))) + `List (List.map tree ~f:Job_view.to_yojson)))) (*Always the same pairing of jobs*) let all_work_statements_exn t : Transaction_snark_work.Statement.t list = @@ -688,7 +708,7 @@ let all_work_statements_exn t : Transaction_snark_work.Statement.t list = | None -> assert false | Some stmt -> - stmt )) ) + stmt))) let required_work_pairs t ~slots = let work_list = Parallel_scan.jobs_for_slots t ~slots in @@ -709,7 +729,7 @@ let work_statements_for_new_diff t : Transaction_snark_work.Statement.t list = | None -> assert false | Some stmt -> - stmt )) ) + stmt))) let all_work_pairs t ~(get_state : State_hash.t -> Mina_state.Protocol_state.value Or_error.t) : @@ -731,7 +751,7 @@ let all_work_pairs t , state_hash , ledger_witness , init_stack ) -> - let {With_status.data= transaction; status} = + let { With_status.data = transaction; status } = Ledger.Transaction_applied.transaction transaction_with_info in let%bind protocol_state_body = @@ -748,10 +768,11 @@ let all_work_pairs t Snark_work_lib.Work.Single.Spec.Transition ( statement , transaction - , { Transaction_witness.ledger= ledger_witness + , { Transaction_witness.ledger = ledger_witness ; protocol_state_body ; init_stack - ; status } ) + ; status + } ) | Second (p1, p2) -> let%map merged = Transaction_snark.Statement.merge @@ -768,20 +789,20 @@ let all_work_pairs t ~f:(fun acc' pair -> let%bind acc' = acc' in let%map spec = One_or_two.Or_error.map ~f:single_spec pair in - spec :: acc' ) + spec :: acc') in match specs_list with | Ok list -> Continue (acc @ List.rev list) | Error e -> - Stop (Error e) ) + Stop (Error e)) let update_metrics = Parallel_scan.update_metrics let fill_work_and_enqueue_transactions t transactions work = let open Or_error.Let_syntax in - let fill_in_transaction_snark_work t (works : Transaction_snark_work.t list) - : (Ledger_proof.t * Sok_message.t) list Or_error.t = + let fill_in_transaction_snark_work t (works : Transaction_snark_work.t list) : + (Ledger_proof.t * Sok_message.t) list Or_error.t = let next_jobs = List.( take @@ -790,9 +811,9 @@ let fill_work_and_enqueue_transactions t transactions work = in map2_or_error next_jobs (List.concat_map works - ~f:(fun {Transaction_snark_work.fee; proofs; prover} -> + ~f:(fun { Transaction_snark_work.fee; proofs; prover } -> One_or_two.map proofs ~f:(fun proof -> (fee, proof, prover)) - |> One_or_two.to_list )) + |> One_or_two.to_list)) ~f:completed_work_to_scanable_work in let old_proof = Parallel_scan.last_emitted_value t in @@ -811,14 +832,14 @@ let fill_work_and_enqueue_transactions t transactions work = in if Frozen_ledger_hash.equal curr_source prev_target then Ok (Some (proof, extract_txns txns_with_witnesses)) - else Or_error.error_string "Unexpected ledger proof emitted" ) + else Or_error.error_string "Unexpected ledger proof emitted") in (result_opt, updated_scan_state) let required_state_hashes t = List.fold ~init:State_hash.Set.empty ~f:(fun acc (t : Transaction_with_witness.t) -> - Set.add acc (fst t.state_hash) ) + Set.add acc (fst t.state_hash)) (Parallel_scan.pending_data t) let check_required_protocol_states t ~protocol_states = @@ -838,15 +859,14 @@ let check_required_protocol_states t ~protocol_states = let received_state_map = List.fold protocol_states ~init:Mina_base.State_hash.Map.empty ~f:(fun m ps -> - State_hash.Map.set m ~key:(Mina_state.Protocol_state.hash ps) ~data:ps - ) + State_hash.Map.set m ~key:(Mina_state.Protocol_state.hash ps) ~data:ps) in let protocol_states_assoc = List.filter_map (State_hash.Set.to_list required_state_hashes) ~f:(fun hash -> let open Option.Let_syntax in let%map state = State_hash.Map.find received_state_map hash in - (hash, state) ) + (hash, state)) in let%map () = check_length protocol_states_assoc in protocol_states_assoc diff --git a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli index e644a3a8623..803fd162258 100644 --- a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli +++ b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli @@ -13,12 +13,13 @@ end] module Transaction_with_witness : sig (* TODO: The statement is redundant here - it can be computed from the witness and the transaction *) type t = - { transaction_with_info: Ledger.Transaction_applied.t - ; state_hash: State_hash.t * State_body_hash.t - ; state_view: Mina_base.Snapp_predicate.Protocol_state.View.Stable.V1.t - ; statement: Transaction_snark.Statement.t - ; init_stack: Transaction_snark.Pending_coinbase_stack_state.Init_stack.t - ; ledger_witness: Sparse_ledger.t } + { transaction_with_info : Ledger.Transaction_applied.t + ; state_hash : State_hash.t * State_body_hash.t + ; state_view : Mina_base.Snapp_predicate.Protocol_state.View.Stable.V1.t + ; statement : Transaction_snark.Statement.t + ; init_stack : Transaction_snark.Pending_coinbase_stack_state.Init_stack.t + ; ledger_witness : Sparse_ledger.t + } [@@deriving sexp] end @@ -31,7 +32,7 @@ module Available_job : sig end module Space_partition : sig - type t = {first: int * int; second: (int * int) option} [@@deriving sexp] + type t = { first : int * int; second : (int * int) option } [@@deriving sexp] end module Job_view : sig @@ -52,18 +53,17 @@ end module Make_statement_scanner (M : Monad_with_Or_error_intf) (Verifier : sig - type t + type t - val verify : - verifier:t - -> Ledger_proof_with_sok_message.t list - -> bool M.Or_error.t + val verify : + verifier:t -> Ledger_proof_with_sok_message.t list -> bool M.Or_error.t end) : sig val scan_statement : constraint_constants:Genesis_constants.Constraint_constants.t -> t -> verifier:Verifier.t - -> (Transaction_snark.Statement.t, [`Empty | `Error of Error.t]) result M.t + -> (Transaction_snark.Statement.t, [ `Empty | `Error of Error.t ]) result + M.t val check_invariants : t diff --git a/src/lib/transaction_snark_work/transaction_snark_work.ml b/src/lib/transaction_snark_work/transaction_snark_work.ml index 7d105d810b6..9b5de95e086 100644 --- a/src/lib/transaction_snark_work/transaction_snark_work.ml +++ b/src/lib/transaction_snark_work/transaction_snark_work.ml @@ -53,10 +53,11 @@ module Info = struct module V1 = struct type t = - { statements: Statement.Stable.V1.t - ; work_ids: int One_or_two.Stable.V1.t - ; fee: Fee.Stable.V1.t - ; prover: Public_key.Compressed.Stable.V1.t } + { statements : Statement.Stable.V1.t + ; work_ids : int One_or_two.Stable.V1.t + ; fee : Fee.Stable.V1.t + ; prover : Public_key.Compressed.Stable.V1.t + } [@@deriving compare, sexp, to_yojson] let to_latest = Fn.id @@ -64,10 +65,11 @@ module Info = struct end] type t = Stable.Latest.t = - { statements: Statement.t - ; work_ids: int One_or_two.t - ; fee: Fee.t - ; prover: Public_key.Compressed.t } + { statements : Statement.t + ; work_ids : int One_or_two.t + ; fee : Fee.t + ; prover : Public_key.Compressed.t + } [@@deriving to_yojson, sexp, compare] end @@ -78,9 +80,10 @@ module T = struct module V1 = struct type t = - { fee: Fee.Stable.V1.t - ; proofs: Ledger_proof.Stable.V1.t One_or_two.Stable.V1.t - ; prover: Public_key.Compressed.Stable.V1.t } + { fee : Fee.Stable.V1.t + ; proofs : Ledger_proof.Stable.V1.t One_or_two.Stable.V1.t + ; prover : Public_key.Compressed.Stable.V1.t + } [@@deriving compare, sexp, yojson] let to_latest = Fn.id @@ -88,9 +91,10 @@ module T = struct end] type t = Stable.Latest.t = - { fee: Fee.t - ; proofs: Ledger_proof.t One_or_two.t - ; prover: Public_key.Compressed.t } + { fee : Fee.t + ; proofs : Ledger_proof.t One_or_two.t + ; prover : Public_key.Compressed.t + } [@@deriving compare, yojson, sexp] let statement t = One_or_two.map t.proofs ~f:Ledger_proof.statement @@ -98,9 +102,10 @@ module T = struct let info t = let statements = One_or_two.map t.proofs ~f:Ledger_proof.statement in { Info.statements - ; work_ids= One_or_two.map statements ~f:Transaction_snark.Statement.hash - ; fee= t.fee - ; prover= t.prover } + ; work_ids = One_or_two.map statements ~f:Transaction_snark.Statement.hash + ; fee = t.fee + ; prover = t.prover + } end include T @@ -115,4 +120,4 @@ end let forget = Fn.id -let fee {fee; _} = fee +let fee { fee; _ } = fee diff --git a/src/lib/transaction_snark_work/transaction_snark_work.mli b/src/lib/transaction_snark_work/transaction_snark_work.mli index dc4935bd725..1f0b5c8dea4 100644 --- a/src/lib/transaction_snark_work/transaction_snark_work.mli +++ b/src/lib/transaction_snark_work/transaction_snark_work.mli @@ -8,15 +8,14 @@ module Statement : sig include Hashable.S with type t := t - module Stable : - sig - module V1 : sig - type t [@@deriving bin_io, compare, sexp, version, yojson, equal] + module Stable : sig + module V1 : sig + type t [@@deriving bin_io, compare, sexp, version, yojson, equal] - include Hashable.S_binable with type t := t - end + include Hashable.S_binable with type t := t end - with type V1.t = t + end + with type V1.t = t val gen : t Quickcheck.Generator.t @@ -27,19 +26,19 @@ end module Info : sig type t = - { statements: Statement.Stable.V1.t - ; work_ids: int One_or_two.Stable.V1.t - ; fee: Fee.Stable.V1.t - ; prover: Public_key.Compressed.Stable.V1.t } + { statements : Statement.Stable.V1.t + ; work_ids : int One_or_two.Stable.V1.t + ; fee : Fee.Stable.V1.t + ; prover : Public_key.Compressed.Stable.V1.t + } [@@deriving to_yojson, sexp, compare] - module Stable : - sig - module V1 : sig - type t [@@deriving compare, to_yojson, version, sexp, bin_io] - end + module Stable : sig + module V1 : sig + type t [@@deriving compare, to_yojson, version, sexp, bin_io] end - with type V1.t = t + end + with type V1.t = t end (* TODO: The SOK message actually should bind the SNARK to @@ -49,9 +48,10 @@ end *) type t = - { fee: Fee.t - ; proofs: Ledger_proof.t One_or_two.t - ; prover: Public_key.Compressed.t } + { fee : Fee.t + ; proofs : Ledger_proof.t One_or_two.t + ; prover : Public_key.Compressed.t + } [@@deriving compare, sexp, yojson] val fee : t -> Fee.t @@ -60,21 +60,21 @@ val info : t -> Info.t val statement : t -> Statement.t -module Stable : - sig - module V1 : sig - type t [@@deriving sexp, compare, bin_io, yojson, version] - end +module Stable : sig + module V1 : sig + type t [@@deriving sexp, compare, bin_io, yojson, version] end - with type V1.t = t +end +with type V1.t = t type unchecked = t module Checked : sig type nonrec t = t = - { fee: Fee.t - ; proofs: Ledger_proof.t One_or_two.t - ; prover: Public_key.Compressed.t } + { fee : Fee.t + ; proofs : Ledger_proof.t One_or_two.t + ; prover : Public_key.Compressed.t + } [@@deriving sexp, compare, to_yojson] module Stable : module type of Stable diff --git a/src/lib/transaction_witness/transaction_witness.ml b/src/lib/transaction_witness/transaction_witness.ml index 218523df855..8c4bbf35b43 100644 --- a/src/lib/transaction_witness/transaction_witness.ml +++ b/src/lib/transaction_witness/transaction_witness.ml @@ -4,10 +4,11 @@ open Core_kernel module Stable = struct module V1 = struct type t = - { ledger: Mina_base.Sparse_ledger.Stable.V1.t - ; protocol_state_body: Mina_state.Protocol_state.Body.Value.Stable.V1.t - ; init_stack: Mina_base.Pending_coinbase.Stack_versioned.Stable.V1.t - ; status: Mina_base.Transaction_status.Stable.V1.t } + { ledger : Mina_base.Sparse_ledger.Stable.V1.t + ; protocol_state_body : Mina_state.Protocol_state.Body.Value.Stable.V1.t + ; init_stack : Mina_base.Pending_coinbase.Stack_versioned.Stable.V1.t + ; status : Mina_base.Transaction_status.Stable.V1.t + } [@@deriving sexp, to_yojson] let to_latest = Fn.id diff --git a/src/lib/transaction_witness/transaction_witness.mli b/src/lib/transaction_witness/transaction_witness.mli index 1eb8400bd06..9d1b1fce4a7 100644 --- a/src/lib/transaction_witness/transaction_witness.mli +++ b/src/lib/transaction_witness/transaction_witness.mli @@ -4,10 +4,11 @@ open Core_kernel module Stable : sig module V1 : sig type t = - { ledger: Mina_base.Sparse_ledger.Stable.V1.t - ; protocol_state_body: Mina_state.Protocol_state.Body.Value.Stable.V1.t - ; init_stack: Mina_base.Pending_coinbase.Stack_versioned.Stable.V1.t - ; status: Mina_base.Transaction_status.Stable.V1.t } + { ledger : Mina_base.Sparse_ledger.Stable.V1.t + ; protocol_state_body : Mina_state.Protocol_state.Body.Value.Stable.V1.t + ; init_stack : Mina_base.Pending_coinbase.Stack_versioned.Stable.V1.t + ; status : Mina_base.Transaction_status.Stable.V1.t + } [@@deriving sexp, to_yojson] end end] diff --git a/src/lib/transition_chain_prover/transition_chain_prover.ml b/src/lib/transition_chain_prover/transition_chain_prover.ml index 61b138d65de..4ebffc6292d 100644 --- a/src/lib/transition_chain_prover/transition_chain_prover.ml +++ b/src/lib/transition_chain_prover/transition_chain_prover.ml @@ -9,7 +9,7 @@ end module Make (Inputs : Inputs_intf) : Mina_intf.Transition_chain_prover_intf - with type transition_frontier := Inputs.Transition_frontier.t = struct + with type transition_frontier := Inputs.Transition_frontier.t = struct open Inputs let find_in_root_history frontier state_hash = diff --git a/src/lib/transition_chain_verifier/transition_chain_verifier.ml b/src/lib/transition_chain_verifier/transition_chain_verifier.ml index c992356240f..9f94e5b08f9 100644 --- a/src/lib/transition_chain_verifier/transition_chain_verifier.ml +++ b/src/lib/transition_chain_verifier/transition_chain_verifier.ml @@ -9,10 +9,9 @@ module Merkle_list_verifier = Merkle_list_verifier.Make (struct let hash previous_state_hash state_body_hash = Protocol_state.hash_abstract ~hash_body:Fn.id - {previous_state_hash; body= state_body_hash} + { previous_state_hash; body = state_body_hash } end) -let verify ~target_hash ~transition_chain_proof:(init_state_hash, merkle_list) - = +let verify ~target_hash ~transition_chain_proof:(init_state_hash, merkle_list) = (* TODO: Should we check the length here too? *) Merkle_list_verifier.verify ~init:init_state_hash merkle_list target_hash diff --git a/src/lib/transition_frontier_controller/transition_frontier_controller.ml b/src/lib/transition_frontier_controller/transition_frontier_controller.ml index b7a5681021a..c930ebb34ef 100644 --- a/src/lib/transition_frontier_controller/transition_frontier_controller.ml +++ b/src/lib/transition_frontier_controller/transition_frontier_controller.ml @@ -39,14 +39,14 @@ let run ~logger ~trust_system ~verifier ~network ~time_controller * are no duplicates in the list *) Transition_handler.Unprocessed_transition_cache.register_exn unprocessed_transition_cache t - |> Strict_pipe.Writer.write primary_transition_writer ) ; + |> Strict_pipe.Writer.write primary_transition_writer) ; trace_recurring "validator" (fun () -> Transition_handler.Validator.run ~consensus_constants: (Precomputed_values.consensus_constants precomputed_values) ~logger ~trust_system ~time_controller ~frontier ~transition_reader:network_transition_reader ~valid_transition_writer - ~unprocessed_transition_cache ) ; + ~unprocessed_transition_cache) ; Strict_pipe.Reader.iter_without_pushback valid_transition_reader ~f:(Strict_pipe.Writer.write primary_transition_writer) |> don't_wait_for ; @@ -57,11 +57,11 @@ let run ~logger ~trust_system ~verifier ~network ~time_controller ~primary_transition_reader ~producer_transition_reader ~clean_up_catchup_scheduler ~catchup_job_writer ~catchup_breadcrumbs_reader ~catchup_breadcrumbs_writer - ~processed_transition_writer ) ; + ~processed_transition_writer) ; trace_recurring "catchup" (fun () -> Ledger_catchup.run ~logger ~precomputed_values ~trust_system ~verifier ~network ~frontier ~catchup_job_reader ~catchup_breadcrumbs_writer - ~unprocessed_transition_cache ) ; + ~unprocessed_transition_cache) ; Strict_pipe.Reader.iter_without_pushback clear_reader ~f:(fun _ -> let open Strict_pipe.Writer in kill valid_transition_writer ; @@ -71,6 +71,6 @@ let run ~logger ~trust_system ~verifier ~network ~time_controller kill catchup_breadcrumbs_writer ; if Ivar.is_full clean_up_catchup_scheduler then [%log error] "Ivar.fill bug is here!" ; - Ivar.fill clean_up_catchup_scheduler () ) + Ivar.fill clean_up_catchup_scheduler ()) |> don't_wait_for ; processed_transition_reader diff --git a/src/lib/transition_handler/breadcrumb_builder.ml b/src/lib/transition_handler/breadcrumb_builder.ml index fe7ad80e33f..0fbe724467f 100644 --- a/src/lib/transition_handler/breadcrumb_builder.ml +++ b/src/lib/transition_handler/breadcrumb_builder.ml @@ -24,15 +24,15 @@ let build_subtrees_of_breadcrumbs ~logger ~precomputed_values ~verifier [ ("state_hash", Mina_base.State_hash.to_yojson initial_hash) ; ( "transition_hashes" , `List - (List.map subtrees_of_enveloped_transitions - ~f:(fun subtree -> + (List.map subtrees_of_enveloped_transitions ~f:(fun subtree -> Rose_tree.to_yojson (fun enveloped_transitions -> Cached.peek enveloped_transitions |> Envelope.Incoming.data |> External_transition.Initial_validated.state_hash - |> Mina_base.State_hash.to_yojson ) - subtree )) ) ] + |> Mina_base.State_hash.to_yojson) + subtree)) ) + ] "Transition frontier already garbage-collected the parent of \ $state_hash" ; Or_error.error_string missing_parent_msg @@ -45,13 +45,14 @@ let build_subtrees_of_breadcrumbs ~logger ~precomputed_values ~verifier let%bind init_breadcrumb = breadcrumb_if_present (Logger.extend logger - [("Check", `String "Before creating breadcrumb")]) + [ ("Check", `String "Before creating breadcrumb") ]) |> Deferred.return in Rose_tree.Deferred.Or_error.fold_map_over_subtrees subtree_of_enveloped_transitions ~init:(Cached.pure init_breadcrumb) - ~f:(fun cached_parent - (Rose_tree.T (cached_enveloped_transition, _) as subtree) + ~f:(fun + cached_parent + (Rose_tree.T (cached_enveloped_transition, _) as subtree) -> let open Deferred.Let_syntax in let%map cached_result = @@ -99,64 +100,65 @@ let build_subtrees_of_breadcrumbs ~logger ~precomputed_values ~verifier Transition_frontier.Breadcrumb.build ~logger ~precomputed_values ~verifier ~trust_system ~parent ~transition:mostly_validated_transition - ~sender:(Some sender) ~transition_receipt_time () - ) ) + ~sender:(Some sender) ~transition_receipt_time ())) with | Error _ -> Deferred.return @@ Or_error.error_string missing_parent_msg | Ok result -> ( - match result with - | Ok new_breadcrumb -> - let open Result.Let_syntax in - Mina_metrics.( - Counter.inc_one - Transition_frontier_controller - .breadcrumbs_built_by_builder) ; - Deferred.return - (let%map (_ : Transition_frontier.Breadcrumb.t) = - breadcrumb_if_present - (Logger.extend logger - [("Check", `String "After creating breadcrumb")]) - in - new_breadcrumb) - | Error err -> ( - (* propagate bans through subtree *) - let subtree_nodes = Rose_tree.flatten subtree in - let ip_address_set = - let sender_from_tree_node node = - Envelope.Incoming.sender (Cached.peek node) + match result with + | Ok new_breadcrumb -> + let open Result.Let_syntax in + Mina_metrics.( + Counter.inc_one + Transition_frontier_controller + .breadcrumbs_built_by_builder) ; + Deferred.return + (let%map (_ : Transition_frontier.Breadcrumb.t) = + breadcrumb_if_present + (Logger.extend logger + [ ( "Check" + , `String "After creating breadcrumb" ) + ]) + in + new_breadcrumb) + | Error err -> ( + (* propagate bans through subtree *) + let subtree_nodes = Rose_tree.flatten subtree in + let ip_address_set = + let sender_from_tree_node node = + Envelope.Incoming.sender (Cached.peek node) + in + List.fold subtree_nodes + ~init:(Set.empty (module Network_peer.Peer)) + ~f:(fun inet_addrs node -> + match sender_from_tree_node node with + | Local -> + failwith + "build_subtrees_of_breadcrumbs: sender of \ + external transition should not be Local" + | Remote peer -> + Set.add inet_addrs peer) in - List.fold subtree_nodes - ~init:(Set.empty (module Network_peer.Peer)) - ~f:(fun inet_addrs node -> - match sender_from_tree_node node with - | Local -> - failwith - "build_subtrees_of_breadcrumbs: sender of \ - external transition should not be Local" - | Remote peer -> - Set.add inet_addrs peer ) - in - let ip_addresses = Set.to_list ip_address_set in - let trust_system_record_invalid msg error = - let%map () = - Deferred.List.iter ip_addresses ~f:(fun ip_addr -> - Trust_system.record trust_system logger ip_addr - ( Trust_system.Actions - .Gossiped_invalid_transition - , Some (msg, []) ) ) + let ip_addresses = Set.to_list ip_address_set in + let trust_system_record_invalid msg error = + let%map () = + Deferred.List.iter ip_addresses ~f:(fun ip_addr -> + Trust_system.record trust_system logger ip_addr + ( Trust_system.Actions + .Gossiped_invalid_transition + , Some (msg, []) )) + in + Error error in - Error error - in - match err with - | `Invalid_staged_ledger_hash error -> - trust_system_record_invalid - "invalid staged ledger hash" error - | `Invalid_staged_ledger_diff error -> - trust_system_record_invalid - "invalid staged ledger diff" error - | `Fatal_error exn -> - Deferred.return (Or_error.of_exn exn) ) ) ) + match err with + | `Invalid_staged_ledger_hash error -> + trust_system_record_invalid + "invalid staged ledger hash" error + | `Invalid_staged_ledger_diff error -> + trust_system_record_invalid + "invalid staged ledger diff" error + | `Fatal_error exn -> + Deferred.return (Or_error.of_exn exn) ) )) |> Cached.sequence_deferred in - Cached.sequence_result cached_result ) ) + Cached.sequence_result cached_result)) diff --git a/src/lib/transition_handler/catchup_scheduler.ml b/src/lib/transition_handler/catchup_scheduler.ml index 03b64cae436..cf6adcfb3fa 100644 --- a/src/lib/transition_handler/catchup_scheduler.ml +++ b/src/lib/transition_handler/catchup_scheduler.ml @@ -21,9 +21,9 @@ open Mina_transition open Network_peer type t = - { logger: Logger.t - ; time_controller: Block_time.Controller.t - ; catchup_job_writer: + { logger : Logger.t + ; time_controller : Block_time.Controller.t + ; catchup_job_writer : ( State_hash.t * ( External_transition.Initial_validated.t Envelope.Incoming.t , State_hash.t ) @@ -39,7 +39,7 @@ type t = a key in this table. Even if a transition doesn't has a child, its corresponding value in the hash table would just be an empty list. *) - ; collected_transitions: + ; collected_transitions : ( External_transition.Initial_validated.t Envelope.Incoming.t , State_hash.t ) Cached.t @@ -48,15 +48,16 @@ type t = (** `parent_root_timeouts` stores the timeouts for catchup job. The keys are the missing transitions, and the values are the timeouts. *) - ; parent_root_timeouts: unit Block_time.Timeout.t State_hash.Table.t - ; breadcrumb_builder_supervisor: + ; parent_root_timeouts : unit Block_time.Timeout.t State_hash.Table.t + ; breadcrumb_builder_supervisor : ( State_hash.t * ( External_transition.Initial_validated.t Envelope.Incoming.t , State_hash.t ) Cached.t Rose_tree.t list ) - Capped_supervisor.t } + Capped_supervisor.t + } let create ~logger ~precomputed_values ~verifier ~trust_system ~frontier ~time_controller @@ -73,7 +74,7 @@ let create ~logger ~precomputed_values ~verifier ~trust_system ~frontier ~(catchup_breadcrumbs_writer : ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t Rose_tree.t list - * [`Ledger_catchup of unit Ivar.t | `Catchup_scheduler] + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] , crash buffered , unit ) Writer.t) ~clean_up_signal = @@ -82,9 +83,9 @@ let create ~logger ~precomputed_values ~verifier ~trust_system ~frontier upon (Ivar.read clean_up_signal) (fun () -> Hashtbl.iter collected_transitions ~f:(fun cached_transitions -> List.iter cached_transitions - ~f:(Fn.compose ignore Cached.invalidate_with_failure) ) ; + ~f:(Fn.compose ignore Cached.invalidate_with_failure)) ; Hashtbl.iter parent_root_timeouts ~f:(fun timeout -> - Block_time.Timeout.cancel time_controller timeout () ) ) ; + Block_time.Timeout.cancel time_controller timeout ())) ; let breadcrumb_builder_supervisor = O1trace.trace_recurring "breadcrumb builder" (fun () -> Capped_supervisor.create ~job_capacity:30 @@ -94,7 +95,8 @@ let create ~logger ~precomputed_values ~verifier ~trust_system ~frontier ~logger: (Logger.extend logger [ ( "catchup_scheduler" - , `String "Called from catchup scheduler" ) ]) + , `String "Called from catchup scheduler" ) + ]) ~precomputed_values ~verifier ~trust_system ~frontier ~initial_hash transition_branches with @@ -103,22 +105,23 @@ let create ~logger ~precomputed_values ~verifier ~trust_system ~frontier (trees_of_breadcrumbs, `Catchup_scheduler) | Error err -> [%log debug] - !"Error during buildup breadcrumbs inside \ - catchup_scheduler: $error" - ~metadata:[("error", Error_json.error_to_yojson err)] ; + !"Error during buildup breadcrumbs inside catchup_scheduler: \ + $error" + ~metadata:[ ("error", Error_json.error_to_yojson err) ] ; List.iter transition_branches ~f:(fun subtree -> Rose_tree.iter subtree ~f:(fun cached_transition -> ignore ( Cached.invalidate_with_failure cached_transition : External_transition.Initial_validated.t - Envelope.Incoming.t ) ) ) ) ) + Envelope.Incoming.t ))))) in { logger ; collected_transitions ; time_controller ; catchup_job_writer ; parent_root_timeouts - ; breadcrumb_builder_supervisor } + ; breadcrumb_builder_supervisor + } let mem t transition = Hashtbl.mem t.collected_transitions @@ -144,15 +147,13 @@ let cancel_timeout t hash = (Hashtbl.find t.parent_root_timeouts hash) ~f:Block_time.Timeout.remaining_time in - let cancel timeout = - Block_time.Timeout.cancel t.time_controller timeout () - in + let cancel timeout = Block_time.Timeout.cancel t.time_controller timeout () in Hashtbl.change t.parent_root_timeouts hash ~f:Fn.(compose (const None) (Option.iter ~f:cancel)) ; remaining_time let rec extract_subtree t cached_transition = - let {With_hash.hash; _}, _ = + let { With_hash.hash; _ }, _ = Envelope.Incoming.data (Cached.peek cached_transition) in let successors = @@ -175,10 +176,10 @@ let rec remove_tree t parent_hash = Gauge.dec_one Transition_frontier_controller.transitions_in_catchup_scheduler) ; List.iter children ~f:(fun child -> - let {With_hash.hash; _}, _ = + let { With_hash.hash; _ }, _ = Envelope.Incoming.data (Cached.peek child) in - remove_tree t hash ) + remove_tree t hash) let watch t ~timeout_duration ~cached_transition = let transition_with_hash, _ = @@ -203,20 +204,21 @@ let watch t ~timeout_duration ~cached_transition = , `Int (Block_time.Span.to_ms duration |> Int64.to_int_trunc) ) ; ( "cached_transition" , With_hash.data transition_with_hash - |> External_transition.to_yojson ) ] + |> External_transition.to_yojson ) + ] "Timed out waiting for the parent of $cached_transition after \ $duration ms, signalling a catchup job" ; (* it's ok to create a new thread here because the thread essentially does no work *) if Writer.is_closed t.catchup_job_writer then [%log' trace t.logger] "catchup job pipe was closed; attempt to write to closed pipe" - else Writer.write t.catchup_job_writer forest ) + else Writer.write t.catchup_job_writer forest) in match Hashtbl.find t.collected_transitions parent_hash with | None -> let remaining_time = cancel_timeout t hash in Hashtbl.add_exn t.collected_transitions ~key:parent_hash - ~data:[cached_transition] ; + ~data:[ cached_transition ] ; Hashtbl.update t.collected_transitions hash ~f:(Option.value ~default:[]) ; ignore ( Hashtbl.add t.parent_root_timeouts ~key:parent_hash @@ -224,8 +226,8 @@ let watch t ~timeout_duration ~cached_transition = (make_timeout (Option.fold remaining_time ~init:timeout_duration ~f:(fun _ remaining_time -> - Block_time.Span.min remaining_time timeout_duration ))) - : [`Duplicate | `Ok] ) ; + Block_time.Span.min remaining_time timeout_duration))) + : [ `Duplicate | `Ok ] ) ; Mina_metrics.( Gauge.inc_one Transition_frontier_controller.transitions_in_catchup_scheduler) @@ -233,13 +235,13 @@ let watch t ~timeout_duration ~cached_transition = if List.exists cached_sibling_transitions ~f:(fun cached_sibling_transition -> - let {With_hash.hash= sibling_hash; _}, _ = + let { With_hash.hash = sibling_hash; _ }, _ = Envelope.Incoming.data (Cached.peek cached_sibling_transition) in - State_hash.equal hash sibling_hash ) + State_hash.equal hash sibling_hash) then [%log' debug t.logger] - ~metadata:[("state_hash", State_hash.to_yojson hash)] + ~metadata:[ ("state_hash", State_hash.to_yojson hash) ] "Received request to watch transition for catchup that already is \ being watched: $state_hash" else @@ -269,7 +271,7 @@ let notify t ~hash = List.map collected_transitions ~f:(extract_subtree t) in Capped_supervisor.dispatch t.breadcrumb_builder_supervisor - (hash, transition_subtrees) ) ; + (hash, transition_subtrees)) ; remove_tree t hash ; Or_error.return () @@ -302,14 +304,13 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants - ~conf_dir:None ~pids ) + ~conf_dir:None ~pids) (* cast a breadcrumb into a cached, enveloped, partially validated transition *) let downcast_breadcrumb breadcrumb = let transition = Transition_frontier.Breadcrumb.validated_transition breadcrumb - |> External_transition.Validation - .reset_frontier_dependencies_validation + |> External_transition.Validation.reset_frontier_dependencies_validation |> External_transition.Validation.reset_staged_ledger_diff_validation in Envelope.Incoming.wrap ~data:transition ~sender:Envelope.Sender.Local @@ -361,7 +362,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = failwith "catchup scheduler should be empty after job is emitted" ; Strict_pipe.Writer.close catchup_breadcrumbs_writer ; - Strict_pipe.Writer.close catchup_job_writer ) ) + Strict_pipe.Writer.close catchup_job_writer)) let%test_unit "catchup jobs do not fire after timeout if they are \ invalidated" = @@ -385,7 +386,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = Strict_pipe.create ~name:(__MODULE__ ^ __LOC__) (Buffered (`Capacity 10, `Overflow Crash)) in - let[@warning "-8"] [breadcrumb_1; breadcrumb_2] = + let[@warning "-8"] [ breadcrumb_1; breadcrumb_2 ] = List.map ~f:register_breadcrumb branch in let scheduler = @@ -397,7 +398,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = (Cached.transform ~f:downcast_breadcrumb breadcrumb_2) ; Async.Thread_safe.block_on_async_exn (fun () -> Transition_frontier.add_breadcrumb_exn frontier - (Cached.peek breadcrumb_1) ) ; + (Cached.peek breadcrumb_1)) ; Or_error.ok_exn (notify scheduler ~hash: @@ -418,7 +419,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = | `Ok (`Ok _) -> failwith "job was emitted from the catchup scheduler even though \ - the job was invalidated" ) ; + the job was invalidated") ; Async.Thread_safe.block_on_async_exn (fun () -> match%map Block_time.Timeout.await ~timeout_duration:test_delta @@ -431,7 +432,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = failwith "pipe closed unexpectedly" | `Ok (`Ok - ( [Rose_tree.T (received_breadcrumb, [])] + ( [ Rose_tree.T (received_breadcrumb, []) ] , `Catchup_scheduler )) -> [%test_eq: State_hash.t] (Transition_frontier.Breadcrumb.state_hash @@ -439,7 +440,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = (Transition_frontier.Breadcrumb.state_hash (Cached.peek breadcrumb_2)) | `Ok (`Ok _) -> - failwith "invalid breadcrumb builder response" ) ; + failwith "invalid breadcrumb builder response") ; ignore ( Cached.invalidate_with_success breadcrumb_1 : Transition_frontier.Breadcrumb.t ) ; @@ -447,7 +448,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = ( Cached.invalidate_with_success breadcrumb_2 : Transition_frontier.Breadcrumb.t ) ; Strict_pipe.Writer.close catchup_breadcrumbs_writer ; - Strict_pipe.Writer.close catchup_job_writer ) + Strict_pipe.Writer.close catchup_job_writer) let%test_unit "catchup scheduler should not create duplicate jobs when a \ sequence of transitions is added in reverse order" = @@ -492,7 +493,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = has_timeout_parent_hash scheduler (Transition_frontier.Breadcrumb.parent_hash curr_breadcrumb) ) ; - curr_breadcrumb ) + curr_breadcrumb) : Frontier_base.Breadcrumb.t ) ; Async.Thread_safe.block_on_async_exn (fun () -> match%map Strict_pipe.Reader.read catchup_job_reader with @@ -501,5 +502,5 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = | `Ok (job_hash, _) -> [%test_eq: State_hash.t] job_hash ( Transition_frontier.Breadcrumb.parent_hash - @@ List.hd_exn branch ) ) ) + @@ List.hd_exn branch ))) end ) diff --git a/src/lib/transition_handler/core_extended_cache.ml b/src/lib/transition_handler/core_extended_cache.ml index c54c528eb2e..229dab45dde 100644 --- a/src/lib/transition_handler/core_extended_cache.ml +++ b/src/lib/transition_handler/core_extended_cache.ml @@ -26,11 +26,7 @@ module Memoized = struct raise e let create ~f arg = - try Result.Ok (f arg) with - | Sys.Break as e -> - raise e - | e -> - Result.Error e + try Result.Ok (f arg) with Sys.Break as e -> raise e | e -> Result.Error e end module type Store = sig @@ -82,10 +78,11 @@ struct type 'a with_init_args = 'a Store.with_init_args Strat.with_init_args type ('k, 'v) t = - { destruct: ('v -> unit) option + { destruct : ('v -> unit) option (** Function to be called on removal of values from the store *) - ; strat: 'k Strat.t - ; store: ('k, 'v) Store.t (** The actual key value store*) } + ; strat : 'k Strat.t + ; store : ('k, 'v) Store.t (** The actual key value store*) + } type ('a, 'b) memo = ('a, ('b, exn) Result.t) t @@ -100,7 +97,7 @@ struct let touch_key cache key = List.iter (Strat.touch cache.strat key) ~f:(fun k -> - clear_from_store cache k ) + clear_from_store cache k) let find cache k = let res = Store.find cache.store k in @@ -115,17 +112,17 @@ struct Option.iter (Store.find cache.store key) ~f:(fun v -> Strat.remove cache.strat key ; Option.call ~f:cache.destruct v ; - Store.remove cache.store key ) + Store.remove cache.store key) let clear cache = Option.iter cache.destruct ~f:(fun destruct -> - List.iter (Store.data cache.store) ~f:destruct ) ; + List.iter (Store.data cache.store) ~f:destruct) ; Strat.clear cache.strat ; Store.clear cache.store let create ~destruct = Strat.cps_create ~f:(fun strat -> - Store.cps_create ~f:(fun store -> {strat; destruct; store}) ) + Store.cps_create ~f:(fun store -> { strat; destruct; store })) let call_with_cache ~cache f arg = match find cache arg with @@ -141,20 +138,21 @@ struct Strat.cps_create ~f:(fun strat -> Store.cps_create ~f:(fun store -> let destruct = Option.map destruct ~f:(fun f -> Result.iter ~f) in - let cache = {strat; destruct; store} in + let cache = { strat; destruct; store } in let memd_f arg = call_with_cache ~cache f arg in - (cache, memd_f) ) ) + (cache, memd_f))) end module Strategy = struct module Lru = struct type 'a t = { (* sorted in order of descending recency *) - list: 'a Doubly_linked.t + list : 'a Doubly_linked.t ; (* allows fast lookup in the list above *) - table: ('a, 'a Doubly_linked.Elt.t) Hashtbl.t - ; mutable maxsize: int - ; mutable size: int } + table : ('a, 'a Doubly_linked.Elt.t) Hashtbl.t + ; mutable maxsize : int + ; mutable size : int + } type 'a with_init_args = int -> 'a @@ -185,13 +183,14 @@ module Strategy = struct let remove lru x = Option.iter (Hashtbl.find lru.table x) ~f:(fun el -> Doubly_linked.remove lru.list el ; - Hashtbl.remove lru.table x ) + Hashtbl.remove lru.table x) let create maxsize = - { list= Doubly_linked.create () - ; table= Hashtbl.Poly.create () ~size:100 + { list = Doubly_linked.create () + ; table = Hashtbl.Poly.create () ~size:100 ; maxsize - ; size= 0 } + ; size = 0 + } let cps_create ~f maxsize = f (create maxsize) diff --git a/src/lib/transition_handler/core_extended_cache.mli b/src/lib/transition_handler/core_extended_cache.mli index 2b58af0645f..036e6e09d97 100644 --- a/src/lib/transition_handler/core_extended_cache.mli +++ b/src/lib/transition_handler/core_extended_cache.mli @@ -15,7 +15,7 @@ open! Core *) val memoize : ?destruct:('b -> unit) - -> ?expire:[`Lru of int | `Keep_all | `Keep_one] + -> ?expire:[ `Lru of int | `Keep_all | `Keep_one ] -> ('a -> 'b) -> 'a -> 'b diff --git a/src/lib/transition_handler/processor.ml b/src/lib/transition_handler/processor.ml index 6629d17a873..8b5e1f6f538 100644 --- a/src/lib/transition_handler/processor.ml +++ b/src/lib/transition_handler/processor.ml @@ -48,9 +48,7 @@ let add_and_finalize ~logger ~frontier ~catchup_scheduler in let%map () = if only_if_present then ( - let parent_hash = - Transition_frontier.Breadcrumb.parent_hash breadcrumb - in + let parent_hash = Transition_frontier.Breadcrumb.parent_hash breadcrumb in match Transition_frontier.find frontier parent_hash with | Some _ -> Transition_frontier.add_breadcrumb_exn frontier breadcrumb @@ -98,10 +96,10 @@ let process_transition ~logger ~trust_system ~verifier ~frontier let initially_validated_transition = Envelope.Incoming.data enveloped_initially_validated_transition in - let {With_hash.hash= transition_hash; data= transition}, _ = + let { With_hash.hash = transition_hash; data = transition }, _ = initially_validated_transition in - let metadata = [("state_hash", State_hash.to_yojson transition_hash)] in + let metadata = [ ("state_hash", State_hash.to_yojson transition_hash) ] in Deferred.map ~f:(Fn.const ()) (let open Deferred.Result.Let_syntax in let%bind mostly_validated_transition = @@ -125,8 +123,7 @@ let process_transition ~logger ~trust_system ~verifier ~frontier in let (_ : External_transition.Initial_validated.t Envelope.Incoming.t) = - Cached.invalidate_with_failure - cached_initially_validated_transition + Cached.invalidate_with_failure cached_initially_validated_transition in Error () | Error `Already_in_frontier -> @@ -135,8 +132,7 @@ let process_transition ~logger ~trust_system ~verifier ~frontier is is already in the transition frontier" ; let (_ : External_transition.Initial_validated.t Envelope.Incoming.t) = - Cached.invalidate_with_failure - cached_initially_validated_transition + Cached.invalidate_with_failure cached_initially_validated_transition in return (Error ()) | Error `Parent_missing_from_frontier -> ( @@ -173,9 +169,7 @@ let process_transition ~logger ~trust_system ~verifier ~frontier Protocol_state.previous_state_hash (External_transition.protocol_state transition) in - let parent_breadcrumb = - Transition_frontier.find_exn frontier parent_hash - in + let parent_breadcrumb = Transition_frontier.find_exn frontier parent_hash in let%bind breadcrumb = cached_transform_deferred_result cached_initially_validated_transition ~transform_cached:(fun _ -> @@ -183,20 +177,20 @@ let process_transition ~logger ~trust_system ~verifier ~frontier ~verifier ~trust_system ~transition_receipt_time ~sender:(Some sender) ~parent:parent_breadcrumb ~transition:mostly_validated_transition - (* TODO: Can we skip here? *) () ) + (* TODO: Can we skip here? *) ()) ~transform_result:(function | Error (`Invalid_staged_ledger_hash error) | Error (`Invalid_staged_ledger_diff error) -> [%log error] ~metadata: - (metadata @ [("error", Error_json.error_to_yojson error)]) + (metadata @ [ ("error", Error_json.error_to_yojson error) ]) "Error while building breadcrumb in the transition handler \ processor: $error" ; Deferred.return (Error ()) | Error (`Fatal_error exn) -> raise exn | Ok breadcrumb -> - Deferred.return (Ok breadcrumb) ) + Deferred.return (Ok breadcrumb)) in Mina_metrics.( Counter.inc_one @@ -228,19 +222,19 @@ let run ~logger ~(precomputed_values : Precomputed_values.t) ~verifier ~(catchup_breadcrumbs_reader : ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t Rose_tree.t list - * [`Ledger_catchup of unit Ivar.t | `Catchup_scheduler] ) + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] ) Reader.t) ~(catchup_breadcrumbs_writer : ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t Rose_tree.t list - * [`Ledger_catchup of unit Ivar.t | `Catchup_scheduler] + * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] , crash buffered , unit ) Writer.t) ~processed_transition_writer = let catchup_scheduler = - Catchup_scheduler.create ~logger ~precomputed_values ~verifier - ~trust_system ~frontier ~time_controller ~catchup_job_writer - ~catchup_breadcrumbs_writer ~clean_up_signal:clean_up_catchup_scheduler + Catchup_scheduler.create ~logger ~precomputed_values ~verifier ~trust_system + ~frontier ~time_controller ~catchup_job_writer ~catchup_breadcrumbs_writer + ~clean_up_signal:clean_up_catchup_scheduler in let add_and_finalize = add_and_finalize ~frontier ~catchup_scheduler ~processed_transition_writer @@ -254,21 +248,22 @@ let run ~logger ~(precomputed_values : Precomputed_values.t) ~verifier ignore ( Reader.Merge.iter (* It is fine to skip the cache layer on blocks produced by this node - * because it is extraordinarily unlikely we would write an internal bug - * triggering this case, and the external case (where we received an - * identical external transition from the network) can happen iff there - * is another node with the exact same private key and view of the - * transaction pool. *) + * because it is extraordinarily unlikely we would write an internal bug + * triggering this case, and the external case (where we received an + * identical external transition from the network) can happen iff there + * is another node with the exact same private key and view of the + * transaction pool. *) [ Reader.map producer_transition_reader ~f:(fun breadcrumb -> Mina_metrics.( Gauge.inc_one Transition_frontier_controller.transitions_being_processed) ; - `Local_breadcrumb (Cached.pure breadcrumb) ) + `Local_breadcrumb (Cached.pure breadcrumb)) ; Reader.map catchup_breadcrumbs_reader ~f:(fun (cb, catchup_breadcrumbs_callback) -> - `Catchup_breadcrumbs (cb, catchup_breadcrumbs_callback) ) + `Catchup_breadcrumbs (cb, catchup_breadcrumbs_callback)) ; Reader.map primary_transition_reader ~f:(fun vt -> - `Partially_valid_transition vt ) ] + `Partially_valid_transition vt) + ] ~f:(fun msg -> let open Deferred.Let_syntax in trace_recurring "transition_handler_processor" (fun () -> @@ -281,11 +276,11 @@ let run ~logger ~(precomputed_values : Precomputed_values.t) ~verifier Rose_tree.Deferred.Or_error.iter subtree (* It could be the case that by the time we try and - * add the breadcrumb, it's no longer relevant when - * we're catching up *) + * add the breadcrumb, it's no longer relevant when + * we're catching up *) ~f: (add_and_finalize ~logger ~only_if_present:true - ~source:`Catchup) ) + ~source:`Catchup)) with | Ok () -> () @@ -293,14 +288,13 @@ let run ~logger ~(precomputed_values : Precomputed_values.t) ~verifier List.iter breadcrumb_subtrees ~f:(fun tree -> Rose_tree.iter tree ~f:(fun cached_breadcrumb -> let (_ : Transition_frontier.Breadcrumb.t) = - Cached.invalidate_with_failure - cached_breadcrumb + Cached.invalidate_with_failure cached_breadcrumb in - () ) ) ; + ())) ; [%log error] "Error, failed to attach all catchup breadcrumbs to \ transition frontier: $error" - ~metadata:[("error", Error_json.error_to_yojson err)] + ~metadata:[ ("error", Error_json.error_to_yojson err) ] ) >>| fun () -> match subsequent_callback_action with @@ -332,7 +326,8 @@ let run ~logger ~(precomputed_values : Precomputed_values.t) ~verifier () | Error err -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson err)] + ~metadata: + [ ("error", Error_json.error_to_yojson err) ] "Error, failed to attach produced breadcrumb to \ transition frontier: $error" ; let (_ : Transition_frontier.Breadcrumb.t) = @@ -342,10 +337,9 @@ let run ~logger ~(precomputed_values : Precomputed_values.t) ~verifier in Mina_metrics.( Gauge.dec_one - Transition_frontier_controller - .transitions_being_processed) + Transition_frontier_controller.transitions_being_processed) | `Partially_valid_transition transition -> - process_transition ~transition ) ) + process_transition ~transition)) : unit Deferred.t ) let%test_module "Transition_handler.Processor tests" = @@ -374,13 +368,12 @@ let%test_module "Transition_handler.Processor tests" = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants ~conf_dir:None - ~pids:(Child_processes.Termination.create_pid_table ()) ) + ~pids:(Child_processes.Termination.create_pid_table ())) let downcast_breadcrumb breadcrumb = let transition = Transition_frontier.Breadcrumb.validated_transition breadcrumb - |> External_transition.Validation - .reset_frontier_dependencies_validation + |> External_transition.Validation.reset_frontier_dependencies_validation |> External_transition.Validation.reset_staged_ledger_diff_validation in Envelope.Incoming.wrap ~data:transition ~sender:Envelope.Sender.Local @@ -424,15 +417,16 @@ let%test_module "Transition_handler.Processor tests" = List.iter branch ~f:(fun breadcrumb -> downcast_breadcrumb breadcrumb |> Unprocessed_transition_cache.register_exn cache - |> Strict_pipe.Writer.write valid_transition_writer ) ; + |> Strict_pipe.Writer.write valid_transition_writer) ; match%map Block_time.Timeout.await ~timeout_duration:(Block_time.Span.of_ms 30000L) time_controller (Strict_pipe.Reader.fold_until processed_transition_reader ~init:branch - ~f:(fun remaining_breadcrumbs - (`Transition newly_added_transition, _) + ~f:(fun + remaining_breadcrumbs + (`Transition newly_added_transition, _) -> Deferred.return ( match remaining_breadcrumbs with @@ -449,17 +443,18 @@ let%test_module "Transition_handler.Processor tests" = ( External_transition.Validated .blockchain_length newly_added_transition - |> Unsigned.UInt32.to_int ) ) ] + |> Unsigned.UInt32.to_int ) ) + ] "transition of $height passed processor" ; if List.is_empty tail then `Stop true else `Continue tail | [] -> - `Stop false ) )) + `Stop false ))) with | `Timeout -> failwith "test timed out" | `Ok (`Eof _) -> failwith "pipe closed unexpectedly" | `Ok (`Terminated x) -> - x ) ) ) + x) )) end ) diff --git a/src/lib/transition_handler/unprocessed_transition_cache.ml b/src/lib/transition_handler/unprocessed_transition_cache.ml index 17a4cf01fad..e4473acca0a 100644 --- a/src/lib/transition_handler/unprocessed_transition_cache.ml +++ b/src/lib/transition_handler/unprocessed_transition_cache.ml @@ -20,7 +20,7 @@ module Transmuter = struct module Target = State_hash let transmute enveloped_transition = - let {With_hash.hash; data= _}, _ = + let { With_hash.hash; data = _ }, _ = Envelope.Incoming.data enveloped_transition in hash diff --git a/src/lib/transition_handler/validator.ml b/src/lib/transition_handler/validator.ml index 6757a0c4e6d..3d10dd9236f 100644 --- a/src/lib/transition_handler/validator.ml +++ b/src/lib/transition_handler/validator.ml @@ -37,7 +37,8 @@ let validate_transition ~consensus_constants ~logger ~frontier (Consensus.Hooks.select ~constants:consensus_constants ~logger: (Logger.extend logger - [("selection_context", `String "Transition_handler.Validator")]) + [ ("selection_context", `String "Transition_handler.Validator") + ]) ~existing: (Transition_frontier.Breadcrumb.consensus_state_with_hash root_breadcrumb) @@ -61,7 +62,7 @@ let run ~logger ~consensus_constants ~trust_system ~time_controller ~frontier let module Lru = Core_extended_cache.Lru in don't_wait_for (Reader.iter transition_reader ~f:(fun transition_env -> - let {With_hash.hash= transition_hash; data= transition}, _ = + let { With_hash.hash = transition_hash; data = transition }, _ = Envelope.Incoming.data transition_env in let sender = Envelope.Incoming.sender transition_env in @@ -76,16 +77,15 @@ let run ~logger ~consensus_constants ~trust_system ~time_controller ~frontier , Some ( "external transition $state_hash" , [ ("state_hash", State_hash.to_yojson transition_hash) - ; ( "transition" - , External_transition.to_yojson transition ) ] ) ) + ; ("transition", External_transition.to_yojson transition) + ] ) ) in let transition_time = External_transition.protocol_state transition |> Protocol_state.blockchain_state |> Blockchain_state.timestamp |> Block_time.to_time in - Perf_histograms.add_span - ~name:"accepted_transition_remote_latency" + Perf_histograms.add_span ~name:"accepted_transition_remote_latency" (Core_kernel.Time.diff Block_time.(now time_controller |> to_time) transition_time) ; @@ -106,7 +106,8 @@ let run ~logger ~consensus_constants ~trust_system ~time_controller ~frontier ; ("reason", `String "not selected over current root") ; ( "protocol_state" , External_transition.protocol_state transition - |> Protocol_state.value_to_yojson ) ] + |> Protocol_state.value_to_yojson ) + ] "Validation error: external transition with state hash \ $state_hash was rejected for reason $reason" ; Trust_system.record_envelope_sender trust_system logger sender @@ -118,4 +119,4 @@ let run ~logger ~consensus_constants ~trust_system ~time_controller ~frontier , Envelope.Sender.to_yojson (Envelope.Incoming.sender transition_env) ) ; ("transition", External_transition.to_yojson transition) - ] ) ) )) + ] ) ))) diff --git a/src/lib/transition_router/initial_validator.ml b/src/lib/transition_router/initial_validator.ml index bcde4423bf9..a618b68187a 100644 --- a/src/lib/transition_router/initial_validator.ml +++ b/src/lib/transition_router/initial_validator.ml @@ -8,7 +8,7 @@ open Mina_transition open Network_peer type validation_error = - [ `Invalid_time_received of [`Too_early | `Too_late of int64] + [ `Invalid_time_received of [ `Too_early | `Too_late of int64 ] | `Invalid_genesis_protocol_state | `Invalid_proof | `Invalid_delta_transition_chain_proof @@ -26,7 +26,7 @@ let handle_validation_error ~logger ~rejected_blocks_logger ~time_received let message' = "external transition with state hash $state_hash" ^ Option.value_map message ~default:"" ~f:(fun (txt, _) -> - sprintf ", %s" txt ) + sprintf ", %s" txt) in let metadata = ("state_hash", State_hash.to_yojson state_hash) @@ -39,13 +39,15 @@ let handle_validation_error ~logger ~rejected_blocks_logger ~time_received match error with | `Invalid_time_received `Too_early -> [ ("reason", `String "invalid time") - ; ("time_error", `String "too early") ] + ; ("time_error", `String "too early") + ] | `Invalid_time_received (`Too_late slot_diff) -> [ ("reason", `String "invalid time") ; ("time_error", `String "too late") - ; ("slot_diff", `String (Int64.to_string slot_diff)) ] + ; ("slot_diff", `String (Int64.to_string slot_diff)) + ] | `Invalid_genesis_protocol_state -> - [("reason", `String "invalid genesis state")] + [ ("reason", `String "invalid genesis state") ] | `Invalid_proof -> [ ("reason", `String "invalid proof") ; ( "protocol_state" @@ -53,16 +55,18 @@ let handle_validation_error ~logger ~rejected_blocks_logger ~time_received |> Protocol_state.value_to_yojson ) ; ( "proof" , External_transition.protocol_state_proof transition - |> Proof.to_yojson ) ] + |> Proof.to_yojson ) + ] | `Invalid_delta_transition_chain_proof -> - [("reason", `String "invalid delta transition chain proof")] + [ ("reason", `String "invalid delta transition chain proof") ] | `Verifier_error err -> [ ("reason", `String "verifier error") - ; ("error", Error_json.error_to_yojson err) ] + ; ("error", Error_json.error_to_yojson err) + ] | `Mismatched_protocol_version -> - [("reason", `String "protocol version mismatch")] + [ ("reason", `String "protocol version mismatch") ] | `Invalid_protocol_version -> - [("reason", `String "invalid protocol version")] + [ ("reason", `String "invalid protocol version") ] in let metadata = [ ("state_hash", State_hash.to_yojson state_hash) @@ -70,7 +74,8 @@ let handle_validation_error ~logger ~rejected_blocks_logger ~time_received , `String (Time.to_string_abs (Block_time.to_time time_received) - ~zone:Time.Zone.utc) ) ] + ~zone:Time.Zone.utc) ) + ] @ metadata in [%log error] ~metadata @@ -86,10 +91,10 @@ let handle_validation_error ~logger ~rejected_blocks_logger ~time_received rejected for reason $reason" ; match error with | `Verifier_error err -> - let error_metadata = [("error", Error_json.error_to_yojson err)] in + let error_metadata = [ ("error", Error_json.error_to_yojson err) ] in [%log error] ~metadata: - (error_metadata @ [("state_hash", State_hash.to_yojson state_hash)]) + (error_metadata @ [ ("state_hash", State_hash.to_yojson state_hash) ]) "Error in verifier verifying blockchain proof for $state_hash: $error" ; Deferred.unit | `Invalid_proof -> @@ -108,7 +113,7 @@ let handle_validation_error ~logger ~rejected_blocks_logger ~time_received (Gossiped_old_transition (slot_diff, delta)) (Some ( "off by $slot_diff slots" - , [("slot_diff", `String (Int64.to_string slot_diff))] )) + , [ ("slot_diff", `String (Int64.to_string slot_diff)) ] )) | `Invalid_protocol_version -> punish Sent_invalid_protocol_version None | `Mismatched_protocol_version -> @@ -121,8 +126,9 @@ module Duplicate_block_detector = struct module T = struct (* order of fields significant, compare by epoch, then slot, then producer *) type t = - { consensus_time: Consensus.Data.Consensus_time.t - ; block_producer: Public_key.Compressed.t } + { consensus_time : Consensus.Data.Consensus_time.t + ; block_producer : Public_key.Compressed.t + } [@@deriving sexp, compare] end @@ -130,18 +136,20 @@ module Duplicate_block_detector = struct include Comparable.Make (T) end - type t = {mutable table: State_hash.t Blocks.Map.t; mutable latest_epoch: int} + type t = + { mutable table : State_hash.t Blocks.Map.t; mutable latest_epoch : int } let gc_count = ref 0 (* create dummy block to split map on *) let make_splitting_block ~consensus_constants - ({consensus_time; block_producer= _} : Blocks.t) : Blocks.t = + ({ consensus_time; block_producer = _ } : Blocks.t) : Blocks.t = let block_producer = Public_key.Compressed.empty in - { consensus_time= + { consensus_time = Consensus.Data.Consensus_time.get_old ~constants:consensus_constants consensus_time - ; block_producer } + ; block_producer + } (* every gc_interval blocks seen, discard blocks more than gc_width ago *) let table_gc ~(precomputed_values : Precomputed_values.t) t block = @@ -159,7 +167,7 @@ module Duplicate_block_detector = struct let _, _, gt_map = Map.split t.table splitting_block in t.table <- gt_map - let create () = {table= Map.empty (module Blocks); latest_epoch= 0} + let create () = { table = Map.empty (module Blocks); latest_epoch = 0 } let check ~precomputed_values ~rejected_blocks_logger ~time_received t logger external_transition_with_hash = @@ -173,7 +181,7 @@ module Duplicate_block_detector = struct let block_producer = External_transition.block_producer external_transition in - let block = Blocks.{consensus_time; block_producer} in + let block = Blocks.{ consensus_time; block_producer } in (* try table GC *) table_gc ~precomputed_values t block ; match Map.find t.table block with @@ -192,7 +200,8 @@ module Duplicate_block_detector = struct , `String (Time.to_string_abs (Block_time.to_time time_received) - ~zone:Time.Zone.utc) ) ] + ~zone:Time.Zone.utc) ) + ] in let msg : (_, unit, string, unit) format4 = "Duplicate producer and slot: producer = $block_producer, \ @@ -204,8 +213,7 @@ module Duplicate_block_detector = struct end let run ~logger ~trust_system ~verifier ~transition_reader - ~valid_transition_writer ~initialization_finish_signal ~precomputed_values - = + ~valid_transition_writer ~initialization_finish_signal ~precomputed_values = let genesis_state_hash = Precomputed_values.genesis_state_hash precomputed_values in @@ -265,8 +273,8 @@ let run ~logger ~trust_system ~verifier ~transition_reader (validate_genesis_protocol_state ~genesis_state_hash) >>= (fun x -> Interruptible.uninterruptible - (validate_proofs ~verifier [x]) - >>| List.hd_exn ) + (validate_proofs ~verifier [ x ]) + >>| List.hd_exn) >>= defer validate_delta_transition_chain >>= defer validate_protocol_versions) with @@ -304,8 +312,9 @@ let run ~logger ~trust_system ~verifier ~transition_reader , `String (Time.to_string_abs (Block_time.to_time time_received) - ~zone:Time.Zone.utc) ) ] + ~zone:Time.Zone.utc) ) + ] in [%log error] ~metadata "Dropping blocks because libp2p validation expired" ) - else Deferred.unit )) + else Deferred.unit)) diff --git a/src/lib/transition_router/transition_router.ml b/src/lib/transition_router/transition_router.ml index a1a8eb4d1f7..97f54f9bf5f 100644 --- a/src/lib/transition_router/transition_router.ml +++ b/src/lib/transition_router/transition_router.ml @@ -8,10 +8,10 @@ open Mina_numbers type Structured_log_events.t += Starting_transition_frontier_controller [@@deriving - register_event {msg= "Starting transition frontier controller phase"}] + register_event { msg = "Starting transition frontier controller phase" }] type Structured_log_events.t += Starting_bootstrap_controller - [@@deriving register_event {msg= "Starting bootstrap controller phase"}] + [@@deriving register_event { msg = "Starting bootstrap controller phase" }] let create_bufferred_pipe ?name () = Strict_pipe.create ?name (Buffered (`Capacity 50, `Overflow Crash)) @@ -51,7 +51,8 @@ let is_transition_for_bootstrap ~logger ~logger: (Logger.extend logger [ ( "selection_context" - , `String "Transition_router.is_transition_for_bootstrap" ) ]) + , `String "Transition_router.is_transition_for_bootstrap" ) + ]) let start_transition_frontier_controller ~logger ~trust_system ~verifier ~network ~time_controller ~producer_transition_reader_ref @@ -77,7 +78,7 @@ let start_transition_frontier_controller ~logger ~trust_system ~verifier Transition_frontier_controller.run ~logger ~trust_system ~verifier ~network ~time_controller ~collected_transitions ~frontier ~network_transition_reader:!transition_reader_ref - ~producer_transition_reader ~clear_reader ~precomputed_values ) + ~producer_transition_reader ~clear_reader ~precomputed_values) in Strict_pipe.Reader.iter new_verified_transition_reader ~f: @@ -105,8 +106,7 @@ let start_bootstrap_controller ~logger ~trust_system ~verifier ~network producer_transition_reader_ref := producer_transition_reader ; producer_transition_writer_ref := producer_transition_writer ; Option.iter best_seen_transition ~f:(fun best_seen_transition -> - Strict_pipe.Writer.write bootstrap_controller_writer best_seen_transition - ) ; + Strict_pipe.Writer.write bootstrap_controller_writer best_seen_transition) ; don't_wait_for (Broadcast_pipe.Writer.write frontier_w None) ; trace_recurring "bootstrap controller" (fun () -> upon @@ -120,8 +120,7 @@ let start_bootstrap_controller ~logger ~trust_system ~verifier ~network ~network ~time_controller ~producer_transition_reader_ref ~producer_transition_writer_ref ~verified_transition_writer ~clear_reader ~collected_transitions ~transition_reader_ref - ~transition_writer_ref ~frontier_w ~precomputed_values new_frontier - ) ) + ~transition_writer_ref ~frontier_w ~precomputed_values new_frontier)) let download_best_tip ~logger ~network ~verifier ~trust_system ~most_recent_valid_block_writer ~genesis_constants ~precomputed_values = @@ -140,7 +139,8 @@ let download_best_tip ~logger ~network ~verifier ~trust_system [%log debug] ~metadata: [ ("peer", Network_peer.Peer.to_yojson peer) - ; ("error", Error_json.error_to_yojson e) ] + ; ("error", Error_json.error_to_yojson e) + ] "Couldn't get best tip from peer: $error" ; return None | Ok peer_best_tip -> ( @@ -150,7 +150,8 @@ let download_best_tip ~logger ~network ~verifier ~trust_system ; ( "length" , Length.to_yojson (External_transition.blockchain_length peer_best_tip.data) - ) ] + ) + ] "Successfully downloaded best tip with $length from $peer" ; (* TODO: Use batch verification instead *) match%bind @@ -161,7 +162,8 @@ let download_best_tip ~logger ~network ~verifier ~trust_system [%log warn] ~metadata: [ ("peer", Network_peer.Peer.to_yojson peer) - ; ("error", Error_json.error_to_yojson e) ] + ; ("error", Error_json.error_to_yojson e) + ] "Peer sent us bad proof for their best tip" ; let%map () = Trust_system.( @@ -174,17 +176,17 @@ let download_best_tip ~logger ~network ~verifier ~trust_system None | Ok (`Root _, `Best_tip candidate_best_tip) -> [%log debug] - ~metadata:[("peer", Network_peer.Peer.to_yojson peer)] + ~metadata:[ ("peer", Network_peer.Peer.to_yojson peer) ] "Successfully verified best tip from $peer" ; return (Some (Envelope.Incoming.wrap_peer - ~data:{peer_best_tip with data= candidate_best_tip} - ~sender:peer)) ) ) + ~data:{ peer_best_tip with data = candidate_best_tip } + ~sender:peer)) )) in [%log debug] ~metadata: - [("actual", `Int (List.length tips)); ("expected", `Int num_peers)] + [ ("actual", `Int (List.length tips)); ("expected", `Int num_peers) ] "Finished requesting tips. Got $actual / $expected" ; let res = List.fold tips ~init:None ~f:(fun acc enveloped_candidate_best_tip -> @@ -205,7 +207,7 @@ let download_best_tip ~logger ~network ~verifier ~trust_system | `Keep -> enveloped_existing_best_tip | `Take -> - enveloped_candidate_best_tip ) ) + enveloped_candidate_best_tip)) in Option.iter res ~f:(fun best -> let best_tip_length = @@ -216,19 +218,18 @@ let download_best_tip ~logger ~network ~verifier ~trust_system best_tip_length ; don't_wait_for @@ Broadcast_pipe.Writer.write most_recent_valid_block_writer - best.data.data ) ; + best.data.data) ; Option.map res ~f: (Envelope.Incoming.map ~f:(fun (x : _ Proof_carrying_data.t) -> Ledger_catchup.Best_tip_lru.add x ; - x.data )) + x.data)) let load_frontier ~logger ~verifier ~persistent_frontier ~persistent_root ~consensus_local_state ~precomputed_values ~catchup_mode = match%map Transition_frontier.load ~logger ~verifier ~consensus_local_state - ~persistent_root ~persistent_frontier ~precomputed_values ~catchup_mode - () + ~persistent_root ~persistent_frontier ~precomputed_values ~catchup_mode () with | Ok frontier -> [%log info] "Successfully loaded frontier" ; @@ -263,7 +264,8 @@ let wait_for_high_connectivity ~logger ~network ~is_seed = [%log info] ~metadata: [ ( "max seconds to wait for high connectivity" - , `Float connectivity_time_upperbound ) ] + , `Float connectivity_time_upperbound ) + ] "Will start initialization without connecting with too any peers" else ( [%log error] @@ -275,9 +277,11 @@ let wait_for_high_connectivity ~logger ~network ~is_seed = ~metadata: [ ("num peers", `Int (List.length peers)) ; ( "max seconds to wait for high connectivity" - , `Float connectivity_time_upperbound ) ] + , `Float connectivity_time_upperbound ) + ] "Will start initialization without connecting with too many peers" - ) ] + ) + ] let initialize ~logger ~network ~is_seed ~is_demo_mode ~verifier ~trust_system ~time_controller ~frontier_w ~producer_transition_reader_ref @@ -310,89 +314,94 @@ let initialize ~logger ~network ~is_seed ~is_demo_mode ~verifier ~trust_system ~time_controller ~producer_transition_reader_ref ~producer_transition_writer_ref ~verified_transition_writer ~clear_reader ~transition_reader_ref ~consensus_local_state - ~transition_writer_ref ~frontier_w ~persistent_root - ~persistent_frontier ~initial_root_transition ~catchup_mode - ~best_seen_transition:best_tip ~precomputed_values + ~transition_writer_ref ~frontier_w ~persistent_root ~persistent_frontier + ~initial_root_transition ~catchup_mode ~best_seen_transition:best_tip + ~precomputed_values | best_tip, Some frontier -> ( - match best_tip with - | Some best_tip - when is_transition_for_bootstrap ~logger frontier - (best_tip |> Envelope.Incoming.data) - ~precomputed_values -> - [%log info] - ~metadata: - [ ( "length" - , `Int - (Unsigned.UInt32.to_int - (External_transition.Initial_validated.blockchain_length - best_tip.data)) ) ] - "Network best tip is too new to catchup to (best_tip with $length); \ - starting bootstrap" ; - let initial_root_transition = - Transition_frontier.(Breadcrumb.validated_transition (root frontier)) - in - let%map () = Transition_frontier.close ~loc:__LOC__ frontier in - start_bootstrap_controller ~logger ~trust_system ~verifier ~network - ~time_controller ~producer_transition_reader_ref - ~producer_transition_writer_ref ~verified_transition_writer - ~clear_reader ~transition_reader_ref ~consensus_local_state - ~transition_writer_ref ~frontier_w ~persistent_root - ~persistent_frontier ~initial_root_transition ~catchup_mode - ~best_seen_transition:(Some best_tip) ~precomputed_values - | _ -> - if Option.is_some best_tip then + match best_tip with + | Some best_tip + when is_transition_for_bootstrap ~logger frontier + (best_tip |> Envelope.Incoming.data) + ~precomputed_values -> [%log info] ~metadata: [ ( "length" , `Int (Unsigned.UInt32.to_int (External_transition.Initial_validated.blockchain_length - (Option.value_exn best_tip).data)) ) ] - "Network best tip is recent enough to catchup to (best_tip with \ - $length); syncing local state and starting participation" - else - [%log info] - "Successfully loaded frontier, but failed downloaded best tip \ - from network" ; - let curr_best_tip = Transition_frontier.best_tip frontier in - let%map () = - match - Consensus.Hooks.required_local_state_sync - ~constants:precomputed_values.consensus_constants - ~consensus_state: - (Transition_frontier.Breadcrumb.consensus_state curr_best_tip) - ~local_state:consensus_local_state - with - | None -> - [%log info] "Local state already in sync" ; - Deferred.unit - | Some sync_jobs -> ( - [%log info] "Local state is out of sync; " ; - match%map - Consensus.Hooks.sync_local_state - ~local_state:consensus_local_state ~logger ~trust_system - ~random_peers:(Mina_networking.random_peers network) - ~query_peer: - { Consensus.Hooks.Rpcs.query= - (fun peer rpc query -> - Mina_networking.( - query_peer network peer.peer_id - (Rpcs.Consensus_rpc rpc) query) ) } - ~ledger_depth: - precomputed_values.constraint_constants.ledger_depth - sync_jobs - with - | Error e -> - Error.tag e ~tag:"Local state sync failed" |> Error.raise - | Ok () -> - () ) - in - let collected_transitions = Option.to_list best_tip in - start_transition_frontier_controller ~logger ~trust_system ~verifier - ~network ~time_controller ~producer_transition_reader_ref - ~producer_transition_writer_ref ~verified_transition_writer - ~clear_reader ~collected_transitions ~transition_reader_ref - ~transition_writer_ref ~frontier_w ~precomputed_values frontier ) + best_tip.data)) ) + ] + "Network best tip is too new to catchup to (best_tip with \ + $length); starting bootstrap" ; + let initial_root_transition = + Transition_frontier.( + Breadcrumb.validated_transition (root frontier)) + in + let%map () = Transition_frontier.close ~loc:__LOC__ frontier in + start_bootstrap_controller ~logger ~trust_system ~verifier ~network + ~time_controller ~producer_transition_reader_ref + ~producer_transition_writer_ref ~verified_transition_writer + ~clear_reader ~transition_reader_ref ~consensus_local_state + ~transition_writer_ref ~frontier_w ~persistent_root + ~persistent_frontier ~initial_root_transition ~catchup_mode + ~best_seen_transition:(Some best_tip) ~precomputed_values + | _ -> + if Option.is_some best_tip then + [%log info] + ~metadata: + [ ( "length" + , `Int + (Unsigned.UInt32.to_int + (External_transition.Initial_validated + .blockchain_length (Option.value_exn best_tip).data)) + ) + ] + "Network best tip is recent enough to catchup to (best_tip with \ + $length); syncing local state and starting participation" + else + [%log info] + "Successfully loaded frontier, but failed downloaded best tip \ + from network" ; + let curr_best_tip = Transition_frontier.best_tip frontier in + let%map () = + match + Consensus.Hooks.required_local_state_sync + ~constants:precomputed_values.consensus_constants + ~consensus_state: + (Transition_frontier.Breadcrumb.consensus_state curr_best_tip) + ~local_state:consensus_local_state + with + | None -> + [%log info] "Local state already in sync" ; + Deferred.unit + | Some sync_jobs -> ( + [%log info] "Local state is out of sync; " ; + match%map + Consensus.Hooks.sync_local_state + ~local_state:consensus_local_state ~logger ~trust_system + ~random_peers:(Mina_networking.random_peers network) + ~query_peer: + { Consensus.Hooks.Rpcs.query = + (fun peer rpc query -> + Mina_networking.( + query_peer network peer.peer_id + (Rpcs.Consensus_rpc rpc) query)) + } + ~ledger_depth: + precomputed_values.constraint_constants.ledger_depth + sync_jobs + with + | Error e -> + Error.tag e ~tag:"Local state sync failed" |> Error.raise + | Ok () -> + () ) + in + let collected_transitions = Option.to_list best_tip in + start_transition_frontier_controller ~logger ~trust_system ~verifier + ~network ~time_controller ~producer_transition_reader_ref + ~producer_transition_writer_ref ~verified_transition_writer + ~clear_reader ~collected_transitions ~transition_reader_ref + ~transition_writer_ref ~frontier_w ~precomputed_values frontier ) let wait_till_genesis ~logger ~time_controller ~(precomputed_values : Precomputed_values.t) = @@ -408,7 +417,8 @@ let wait_till_genesis ~logger ~time_controller [%log warn] ~metadata: [ ( "time_till_genesis" - , `Int (Int64.to_int_exn (Time.Span.to_ms time_till_genesis)) ) ] + , `Int (Int64.to_int_exn (Time.Span.to_ms time_till_genesis)) ) + ] "Node started before the chain start time: waiting $time_till_genesis \ milliseconds before starting participation" ; let rec logger_loop () = @@ -424,7 +434,8 @@ let wait_till_genesis ~logger ~time_controller milliseconds before starting participation" ~metadata: [ ( "tm_remaining" - , `Int (Int64.to_int_exn @@ Time.Span.to_ms tm_remaining) ) ] ; + , `Int (Int64.to_int_exn @@ Time.Span.to_ms tm_remaining) ) + ] ; logger_loop () in Time.Timeout.await ~timeout_duration:time_till_genesis time_controller @@ -434,10 +445,10 @@ let wait_till_genesis ~logger ~time_controller let run ~logger ~trust_system ~verifier ~network ~is_seed ~is_demo_mode ~time_controller ~consensus_local_state ~persistent_root_location ~persistent_frontier_location - ~frontier_broadcast_pipe:(frontier_r, frontier_w) - ~network_transition_reader ~producer_transition_reader - ~most_recent_valid_block:( most_recent_valid_block_reader - , most_recent_valid_block_writer ) + ~frontier_broadcast_pipe:(frontier_r, frontier_w) ~network_transition_reader + ~producer_transition_reader + ~most_recent_valid_block: + (most_recent_valid_block_reader, most_recent_valid_block_writer) ~precomputed_values ~catchup_mode = let initialization_finish_signal = Ivar.create () in let clear_reader, clear_writer = @@ -459,7 +470,7 @@ let run ~logger ~trust_system ~verifier ~network ~is_seed ~is_demo_mode in don't_wait_for @@ Strict_pipe.Reader.iter producer_transition_reader ~f:(fun x -> - Strict_pipe.Writer.write !producer_transition_writer_ref x ) ; + Strict_pipe.Writer.write !producer_transition_writer_ref x) ; upon (wait_till_genesis ~logger ~time_controller ~precomputed_values) (fun () -> let valid_transition_reader, valid_transition_writer = @@ -516,7 +527,7 @@ let run ~logger ~trust_system ~verifier ~network ~is_seed ~is_demo_mode then Broadcast_pipe.Writer.write most_recent_valid_block_writer incoming_transition - else Deferred.unit ) ; + else Deferred.unit) ; don't_wait_for @@ Strict_pipe.Reader.iter_without_pushback valid_transition_reader2 ~f:(fun enveloped_transition -> @@ -560,5 +571,5 @@ let run ~logger ~trust_system ~verifier ~network ~is_seed ~is_demo_mode Deferred.unit in Strict_pipe.Writer.write !transition_writer_ref - enveloped_transition ) ) ) ; + enveloped_transition))) ; (verified_transition_reader, initialization_finish_signal) diff --git a/src/lib/transition_router/transition_router.mli b/src/lib/transition_router/transition_router.mli index 0cdfbdb3d44..873caec5cd3 100644 --- a/src/lib/transition_router/transition_router.mli +++ b/src/lib/transition_router/transition_router.mli @@ -6,10 +6,10 @@ type Structured_log_events.t += Starting_bootstrap_controller include Mina_intf.Transition_router_intf - with type transition_frontier := Transition_frontier.t - and type transition_frontier_persistent_root := - Transition_frontier.Persistent_root.t - and type transition_frontier_persistent_frontier := - Transition_frontier.Persistent_frontier.t - and type breadcrumb := Transition_frontier.Breadcrumb.t - and type network := Mina_networking.t + with type transition_frontier := Transition_frontier.t + and type transition_frontier_persistent_root := + Transition_frontier.Persistent_root.t + and type transition_frontier_persistent_frontier := + Transition_frontier.Persistent_frontier.t + and type breadcrumb := Transition_frontier.Breadcrumb.t + and type network := Mina_networking.t diff --git a/src/lib/trust_system/banned_status.ml b/src/lib/trust_system/banned_status.ml index eef32e1cbc2..7c0f4926f5b 100644 --- a/src/lib/trust_system/banned_status.ml +++ b/src/lib/trust_system/banned_status.ml @@ -14,12 +14,13 @@ module Stable = struct | Banned_until tm -> `Assoc [ ( "Banned_until" - , `String (Time.to_string_abs tm ~zone:Time.Zone.utc) ) ] + , `String (Time.to_string_abs tm ~zone:Time.Zone.utc) ) + ] let of_yojson = function | `String "Unbanned" -> Ok Unbanned - | `Assoc [("Banned_until", `String s)] -> + | `Assoc [ ("Banned_until", `String s) ] -> Ok (Banned_until (Time.of_string s)) | _ -> Error "Banned_status.of_yojson: unexpected JSON" @@ -36,5 +37,4 @@ module Stable = struct end end] -[%%define_locally -Stable.Latest.(to_yojson, of_yojson)] +[%%define_locally Stable.Latest.(to_yojson, of_yojson)] diff --git a/src/lib/trust_system/peer_status.ml b/src/lib/trust_system/peer_status.ml index 031bcbf23d2..696c0e56f89 100644 --- a/src/lib/trust_system/peer_status.ml +++ b/src/lib/trust_system/peer_status.ml @@ -3,7 +3,7 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type t = {trust: float; banned: Banned_status.Stable.V1.t} + type t = { trust : float; banned : Banned_status.Stable.V1.t } [@@deriving yojson] let to_latest = Fn.id diff --git a/src/lib/trust_system/peer_trust.ml b/src/lib/trust_system/peer_trust.ml index 3ea35c29f17..f0555fff803 100644 --- a/src/lib/trust_system/peer_trust.ml +++ b/src/lib/trust_system/peer_trust.ml @@ -39,14 +39,15 @@ module type Input_intf = sig module Db : Key_value_database.Intf.Ident - with type key := Peer_id.t - and type value := Record.t - and type config := Config.t + with type key := Peer_id.t + and type value := Record.t + and type config := Config.t module Action : Action_intf type Structured_log_events.t += - | Peer_banned of {sender_id: Peer_id.t; expiration: Time.t; action: string} + | Peer_banned of + { sender_id : Peer_id.t; expiration : Time.t; action : string } [@@deriving register_event] end @@ -75,10 +76,11 @@ module Log_events = struct (* TODO: Split per action. *) type Structured_log_events.t += | Peer_banned of - { sender_id: Network_peer.Peer.t - ; expiration: Time_with_json.t - ; action: string } - [@@deriving register_event {msg= ban_message}] + { sender_id : Network_peer.Peer.t + ; expiration : Time_with_json.t + ; action : string + } + [@@deriving register_event { msg = ban_message }] end include Log_events @@ -87,63 +89,64 @@ module Make0 (Inputs : Input_intf) = struct open Inputs type t = - { db: Db.t option + { db : Db.t option (* This is an option to allow using a fake trust system in tests. This is - ugly, but the alternative is functoring half of Coda over the trust - system. *) - ; bans_reader: (Peer_id.t * Time.t) Strict_pipe.Reader.t - ; bans_writer: + ugly, but the alternative is functoring half of Coda over the trust + system. *) + ; bans_reader : (Peer_id.t * Time.t) Strict_pipe.Reader.t + ; bans_writer : ( Peer_id.t * Time.t , Strict_pipe.synchronous , unit Deferred.t ) Strict_pipe.Writer.t - ; mutable actions_writers: (Action.t * Peer_id.t) Pipe.Writer.t list } + ; mutable actions_writers : (Action.t * Peer_id.t) Pipe.Writer.t list + } module Record_inst = Record.Make (Now) let create db_dir = let reader, writer = Strict_pipe.create Strict_pipe.Synchronous in - { db= Some (Db.create db_dir) - ; bans_reader= reader - ; bans_writer= writer - ; actions_writers= [] } + { db = Some (Db.create db_dir) + ; bans_reader = reader + ; bans_writer = writer + ; actions_writers = [] + } let null : unit -> t = fun () -> - let bans_reader, bans_writer = - Strict_pipe.create Strict_pipe.Synchronous - in - {db= None; bans_reader; bans_writer; actions_writers= []} + let bans_reader, bans_writer = Strict_pipe.create Strict_pipe.Synchronous in + { db = None; bans_reader; bans_writer; actions_writers = [] } - let ban_pipe {bans_reader; _} = bans_reader + let ban_pipe { bans_reader; _ } = bans_reader - let get_db {db; _} peer = Option.bind db ~f:(fun db' -> Db.get db' ~key:peer) + let get_db { db; _ } peer = + Option.bind db ~f:(fun db' -> Db.get db' ~key:peer) - let peer_statuses {db; _} = + let peer_statuses { db; _ } = Option.value_map db ~default:[] ~f:(fun db' -> Db.to_alist db' |> List.map ~f:(fun (peer, record) -> - (peer, Record_inst.to_peer_status record) ) ) + (peer, Record_inst.to_peer_status record))) let lookup_ip t ip = List.filter (peer_statuses t) ~f:(fun (p, _status) -> - Unix.Inet_addr.equal (Peer_id.ip p) ip ) + Unix.Inet_addr.equal (Peer_id.ip p) ip) - let reset_ip ({db; _} as t) ip = + let reset_ip ({ db; _ } as t) ip = Option.value_map db ~default:() ~f:(fun db' -> List.map ~f:(fun (id, _status) -> Db.remove db' ~key:id) (lookup_ip t ip) - |> ignore ) ; + |> ignore) ; lookup_ip t ip - let close {db; bans_writer; _} = + let close { db; bans_writer; _ } = Option.iter db ~f:Db.close ; Strict_pipe.Writer.close bans_writer - let record ({db; bans_writer; _} as t) logger peer action = - t.actions_writers - <- List.filter t.actions_writers ~f:(Fn.compose not Pipe.is_closed) ; + let record ({ db; bans_writer; _ } as t) logger peer action = + t.actions_writers <- + List.filter t.actions_writers ~f:(Fn.compose not Pipe.is_closed) ; List.iter t.actions_writers ~f:(Fn.flip Pipe.write_without_pushback (action, peer)) ; let old_record = @@ -175,7 +178,7 @@ module Make0 (Inputs : Input_intf) = struct else "Decreasing" in [%log debug] - ~metadata:([("sender_id", Peer_id.to_yojson peer)] @ action_metadata) + ~metadata:([ ("sender_id", Peer_id.to_yojson peer) ] @ action_metadata) "%s trust for peer $sender_id due to %s. New trust is %f." verb action_fmt simple_new.trust in @@ -183,7 +186,7 @@ module Make0 (Inputs : Input_intf) = struct match (simple_old.banned, simple_new.banned) with | Unbanned, Banned_until expiration -> [%str_log faulty_peer_without_punishment] ~metadata:action_metadata - (Peer_banned {sender_id= peer; expiration; action= action_fmt}) ; + (Peer_banned { sender_id = peer; expiration; action = action_fmt }) ; if Option.is_some db then ( Mina_metrics.Gauge.inc_one Mina_metrics.Trust_system.banned_peers ; if tmp_bans_are_disabled then Deferred.unit @@ -254,10 +257,11 @@ let%test_module "peer_trust" = type Structured_log_events.t += | Peer_banned of - { sender_id: Peer_id.t - ; expiration: Time_with_json.t - ; action: string } - [@@deriving register_event {msg= "Peer banned"}] + { sender_id : Peer_id.t + ; expiration : Time_with_json.t + ; action : string + } + [@@deriving register_event { msg = "Peer banned" }] end) (* We want to check the output of the pipe in these tests, but it's @@ -274,7 +278,7 @@ let%test_module "peer_trust" = let res = Peer_trust_test.create () in don't_wait_for @@ Strict_pipe.Reader.iter_without_pushback res.bans_reader ~f:(fun v -> - ban_pipe_out := v :: !ban_pipe_out ) ; + ban_pipe_out := v :: !ban_pipe_out) ; res let nolog = Logger.null () @@ -299,24 +303,24 @@ let%test_module "peer_trust" = let db = setup_mock_db () in let%map () = Peer_trust_test.record db nolog 0 Insta_ban in match Peer_trust_test.lookup_ip db peer0 with - | [(_, {trust= -1.0; banned= Banned_until time})] -> + | [ (_, { trust = -1.0; banned = Banned_until time }) ] -> [%test_eq: Time.t] time @@ Time.add !Mock_now.current_time Time.Span.day ; - assert_ban_pipe [0] ; + assert_ban_pipe [ 0 ] ; true | _ -> - false ) + false) let%test "trust decays by half in 24 hours" = Thread_safe.block_on_async_exn (fun () -> let db = setup_mock_db () in let%map () = Peer_trust_test.record db nolog 0 Action.Big_credit in match Peer_trust_test.lookup_ip db peer0 with - | [(_, {trust= start_trust; banned= Unbanned})] -> ( + | [ (_, { trust = start_trust; banned = Unbanned }) ] -> ( Mock_now.advance Time.Span.day ; assert_ban_pipe [] ; match Peer_trust_test.lookup_ip db peer0 with - | [(_, {trust= decayed_trust; banned= Unbanned})] -> + | [ (_, { trust = decayed_trust; banned = Unbanned }) ] -> (* N.b. the floating point equality operator has a built in tolerance i.e. it's approximate equality. *) @@ -324,7 +328,7 @@ let%test_module "peer_trust" = | _ -> false ) | _ -> - false ) + false) let do_constant_rate rate f = (* Simulate running the function at the specified rate, in actions/sec, @@ -348,12 +352,12 @@ let%test_module "peer_trust" = let db = setup_mock_db () in let%map () = act_constant_rate db 1. Action.Slow_punish in match Peer_trust_test.lookup_ip db peer0 with - | [(_, {banned= Banned_until _; _})] -> + | [ (_, { banned = Banned_until _; _ }) ] -> false - | [(_, {banned= Unbanned; _})] -> + | [ (_, { banned = Unbanned; _ }) ] -> assert_ban_pipe [] ; true | _ -> - false ) + false) let%test "peers do get banned for acting faster than the maximum rate" = if tmp_bans_are_disabled then true @@ -362,13 +366,13 @@ let%test_module "peer_trust" = let db = setup_mock_db () in let%map () = act_constant_rate db 1.1 Action.Slow_punish in match Peer_trust_test.lookup_ip db peer0 with - | [(_, {banned= Banned_until _; _})] -> - assert_ban_pipe [0] ; + | [ (_, { banned = Banned_until _; _ }) ] -> + assert_ban_pipe [ 0 ] ; true - | [(_, {banned= Unbanned; _})] -> + | [ (_, { banned = Unbanned; _ }) ] -> false | _ -> - false ) + false) let%test "good cancels bad" = Thread_safe.block_on_async_exn (fun () -> @@ -378,15 +382,15 @@ let%test_module "peer_trust" = let%bind () = Peer_trust_test.record db nolog 0 Action.Slow_punish in - Peer_trust_test.record db nolog 0 Action.Slow_credit ) + Peer_trust_test.record db nolog 0 Action.Slow_credit) in match Peer_trust_test.lookup_ip db peer0 with - | [(_, {banned= Banned_until _; _})] -> + | [ (_, { banned = Banned_until _; _ }) ] -> false - | [(_, {banned= Unbanned; _})] -> + | [ (_, { banned = Unbanned; _ }) ] -> assert_ban_pipe [] ; true | _ -> - false ) + false) let%test "insta-bans ignore positive trust" = if tmp_bans_are_disabled then true @@ -395,24 +399,24 @@ let%test_module "peer_trust" = let db = setup_mock_db () in let%bind () = act_constant_rate db 1. Action.Big_credit in ( match Peer_trust_test.lookup_ip db peer0 with - | [(_, {trust; banned= Unbanned})] -> + | [ (_, { trust; banned = Unbanned }) ] -> assert (Float.(trust > 0.99)) ; assert_ban_pipe [] - | [(_, {banned= Banned_until _; _})] -> + | [ (_, { banned = Banned_until _; _ }) ] -> failwith "Peer is banned after credits" | _ -> failwith "unexpected amount of peers" ) ; let%map () = Peer_trust_test.record db nolog 0 Action.Insta_ban in match Peer_trust_test.lookup_ip db peer0 with - | [(_, {trust= -1.0; banned= Banned_until _})] -> - assert_ban_pipe [0] ; + | [ (_, { trust = -1.0; banned = Banned_until _ }) ] -> + assert_ban_pipe [ 0 ] ; true - | [(_, {banned= Banned_until _; _})] -> + | [ (_, { banned = Banned_until _; _ }) ] -> failwith "Trust not set to -1" - | [(_, {banned= Unbanned; _})] -> + | [ (_, { banned = Unbanned; _ }) ] -> failwith "Peer not banned" | _ -> - false ) + false) let%test "multiple peers getting banned causes multiple ban events" = if tmp_bans_are_disabled then true @@ -421,8 +425,8 @@ let%test_module "peer_trust" = let db = setup_mock_db () in let%bind () = Peer_trust_test.record db nolog 0 Action.Insta_ban in let%map () = Peer_trust_test.record db nolog 1 Action.Insta_ban in - assert_ban_pipe [1; 0] (* Reverse order since it's a snoc list. *) ; - true ) + assert_ban_pipe [ 1; 0 ] (* Reverse order since it's a snoc list. *) ; + true) let%test_unit "actions are written to the pipe" = Thread_safe.block_on_async_exn (fun () -> @@ -434,7 +438,7 @@ let%test_module "peer_trust" = match%bind Pipe.read_exactly pipe ~num_values:3 with | `Exactly queue -> [%test_eq: (Action.t * int) list] - Action.[(Insta_ban, 0); (Big_credit, 1); (Slow_credit, 1)] + Action.[ (Insta_ban, 0); (Big_credit, 1); (Slow_credit, 1) ] (Queue.to_list queue) ; Pipe.close_read pipe ; let%bind () = @@ -443,7 +447,7 @@ let%test_module "peer_trust" = assert (List.is_empty db.actions_writers) ; Deferred.unit | _ -> - failwith "wrong number of actions written to pipe" ) + failwith "wrong number of actions written to pipe") end ) module Make (Action : Action_intf) = Make0 (struct diff --git a/src/lib/trust_system/peer_trust.mli b/src/lib/trust_system/peer_trust.mli index a733181dc9a..57308ee1825 100644 --- a/src/lib/trust_system/peer_trust.mli +++ b/src/lib/trust_system/peer_trust.mli @@ -38,9 +38,7 @@ val max_rate : float -> float type Structured_log_events.t += | Peer_banned of - { sender_id: Network_peer.Peer.t - ; expiration: Time.t - ; action: string } + { sender_id : Network_peer.Peer.t; expiration : Time.t; action : string } [@@deriving register_event] (* FIXME The parameter docs don't render :( *) diff --git a/src/lib/trust_system/record.ml b/src/lib/trust_system/record.ml index ce45769a82f..781f4bc6b4a 100644 --- a/src/lib/trust_system/record.ml +++ b/src/lib/trust_system/record.ml @@ -4,9 +4,9 @@ open Core module Stable = struct module V1 = struct type t = - { trust: float - ; trust_last_updated: Core.Time.Stable.V1.t - ; banned_until_opt: Core.Time.Stable.V1.t Core_kernel.Option.Stable.V1.t + { trust : float + ; trust_last_updated : Core.Time.Stable.V1.t + ; banned_until_opt : Core.Time.Stable.V1.t Core_kernel.Option.Stable.V1.t } let to_latest = Fn.id @@ -35,13 +35,13 @@ module Make (Now : sig end) : S = struct (** Create a new blank trust record. *) let init () = - {trust= 0.; trust_last_updated= Now.now (); banned_until_opt= None} + { trust = 0.; trust_last_updated = Now.now (); banned_until_opt = None } let clamp_trust trust = Float.clamp_exn trust ~min:(-1.0) ~max:1.0 (* Update a trust record. This must be called by every function that reads records, and is not exposed outside this module. *) - let update {trust; trust_last_updated; banned_until_opt} = + let update { trust; trust_last_updated; banned_until_opt } = let now = Now.now () in let elap = Time.diff now trust_last_updated in let elapsed_time = @@ -49,43 +49,48 @@ end) : S = struct in (* ntpd or a user may have reset the system time, yielding a negative elapsed time. in that case, clamp the elapsed time to zero*) let new_trust = (decay_rate ** Time.Span.to_sec elapsed_time) *. trust in - { trust= new_trust - ; trust_last_updated= now - ; banned_until_opt= + { trust = new_trust + ; trust_last_updated = now + ; banned_until_opt = ( match banned_until_opt with | Some banned_until -> if Time.is_later banned_until ~than:(Now.now ()) then Some banned_until else None | None -> - None ) } + None ) + } (** Set the record to banned, updating trust. *) let ban t = let new_record = update t in { new_record with - trust= -1.0 - ; banned_until_opt= Some (Time.add (Now.now ()) Time.Span.day) } + trust = -1.0 + ; banned_until_opt = Some (Time.add (Now.now ()) Time.Span.day) + } (** Add some trust, subtract by passing a negative number. *) let add_trust t increment = let new_record = update t in let new_trust = clamp_trust @@ (new_record.trust +. increment) in { new_record with - trust= new_trust - ; banned_until_opt= + trust = new_trust + ; banned_until_opt = ( if Float.(new_trust <= -1.) then Some (Time.add new_record.trust_last_updated Time.Span.day) - else new_record.banned_until_opt ) } + else new_record.banned_until_opt ) + } (** Convert the internal type to the externally visible one. *) let to_peer_status t = let new_record = update t in match new_record.banned_until_opt with | None -> - Peer_status.{trust= new_record.trust; banned= Banned_status.Unbanned} + Peer_status. + { trust = new_record.trust; banned = Banned_status.Unbanned } | Some banned_until -> Peer_status. - { trust= new_record.trust - ; banned= Banned_status.Banned_until banned_until } + { trust = new_record.trust + ; banned = Banned_status.Banned_until banned_until + } end diff --git a/src/lib/trust_system/record.mli b/src/lib/trust_system/record.mli index 16893554be0..0bbba0857ff 100644 --- a/src/lib/trust_system/record.mli +++ b/src/lib/trust_system/record.mli @@ -24,4 +24,5 @@ val decay_rate : float module Make (Now : sig val now : unit -> Time.t -end) : S [@@warning "-67"] +end) : S +[@@warning "-67"] diff --git a/src/lib/trust_system/trust_system.ml b/src/lib/trust_system/trust_system.ml index f365663be8e..1d83638fb7c 100644 --- a/src/lib/trust_system/trust_system.ml +++ b/src/lib/trust_system/trust_system.ml @@ -34,8 +34,7 @@ module Actions = struct (**Peer gossiped a transition that has a different genesis protocol state from that of mine*) | Sent_invalid_transition_chain_merkle_proof (** Peer sent us a transition chain witness that does not verify *) - | Violated_protocol - (** Peer violated the specification of the protocol. *) + | Violated_protocol (** Peer violated the specification of the protocol. *) | Made_request (** Peer made a valid request. This causes a small decrease to mitigate DoS. *) diff --git a/src/lib/unsigned_extended/intf.ml b/src/lib/unsigned_extended/intf.ml index 89786831bcb..8acda52a2bb 100644 --- a/src/lib/unsigned_extended/intf.ml +++ b/src/lib/unsigned_extended/intf.ml @@ -28,8 +28,8 @@ end module type F = functor (Unsigned : Unsigned.S) - (M :sig - - val length : int - end) - -> S with type t = Unsigned.t [@@warning "-67"] + (M : sig + val length : int + end) + -> S with type t = Unsigned.t +[@@warning "-67"] diff --git a/src/lib/unsigned_extended/unsigned_extended.ml b/src/lib/unsigned_extended/unsigned_extended.ml index 47989d28fc9..e8a69527610 100644 --- a/src/lib/unsigned_extended/unsigned_extended.ml +++ b/src/lib/unsigned_extended/unsigned_extended.ml @@ -1,13 +1,11 @@ (* unsigned_extended.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] open Core_kernel include Intf -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] open Snark_params open Tick @@ -22,7 +20,7 @@ module type Unsigned_intf = Unsigned.S module Extend (Unsigned : Unsigned.S) (M : sig - val length : int + val length : int end) : S with type t = Unsigned.t = struct ;; assert (M.length < Field.size_in_bits - 3) diff --git a/src/lib/user_command_input/user_command_input.ml b/src/lib/user_command_input/user_command_input.ml index b7b5646ab2c..de32706372c 100644 --- a/src/lib/user_command_input/user_command_input.ml +++ b/src/lib/user_command_input/user_command_input.ml @@ -24,7 +24,7 @@ module Payload = struct end] let create ~fee ~fee_token ~fee_payer_pk ?nonce ~valid_until ~memo : t = - {fee; fee_token; fee_payer_pk; nonce; valid_until; memo} + { fee; fee_token; fee_payer_pk; nonce; valid_until; memo } let to_user_command_common (t : t) ~minimum_nonce ~inferred_nonce : (Signed_command_payload.Common.t, string) Result.t = @@ -38,26 +38,26 @@ module Payload = struct (* NB: A lower, explicitly given nonce can be used to cancel transactions or to re-issue them with a higher fee. *) - if - Account_nonce.(minimum_nonce <= nonce && nonce <= inferred_nonce) + if Account_nonce.(minimum_nonce <= nonce && nonce <= inferred_nonce) then Ok nonce else Error (sprintf - !"Input nonce %s either different from inferred nonce %s \ - or below minimum_nonce %s" + !"Input nonce %s either different from inferred nonce %s or \ + below minimum_nonce %s" (Account_nonce.to_string nonce) (Account_nonce.to_string inferred_nonce) (Account_nonce.to_string minimum_nonce)) in - { Signed_command_payload.Common.Poly.fee= t.fee - ; fee_token= t.fee_token - ; fee_payer_pk= t.fee_payer_pk + { Signed_command_payload.Common.Poly.fee = t.fee + ; fee_token = t.fee_token + ; fee_payer_pk = t.fee_payer_pk ; nonce - ; valid_until= t.valid_until - ; memo= t.memo } + ; valid_until = t.valid_until + ; memo = t.memo + } - let fee_payer ({fee_token; fee_payer_pk; _} : t) = + let fee_payer ({ fee_token; fee_payer_pk; _ } : t) = Account_id.create fee_payer_pk fee_token end @@ -74,11 +74,11 @@ module Payload = struct end end] - let create ~fee ~fee_token ~fee_payer_pk ?nonce ~valid_until ~memo ~body : t - = - { common= + let create ~fee ~fee_token ~fee_payer_pk ?nonce ~valid_until ~memo ~body : t = + { common = Common.create ~fee ~fee_token ~fee_payer_pk ?nonce ~valid_until ~memo - ; body } + ; body + } let to_user_command_payload (t : t) ~minimum_nonce ~inferred_nonce : (Signed_command_payload.t, string) Result.t = @@ -86,9 +86,9 @@ module Payload = struct let%map common = Common.to_user_command_common t.common ~minimum_nonce ~inferred_nonce in - {Signed_command_payload.Poly.common; body= t.body} + { Signed_command_payload.Poly.common; body = t.body } - let fee_payer ({common; _} : t) = Common.fee_payer common + let fee_payer ({ common; _ } : t) = Common.fee_payer common end module Sign_choice = struct @@ -120,19 +120,17 @@ module Stable = struct end end] -[%%define_locally -Stable.Latest.(to_yojson)] +[%%define_locally Stable.Latest.(to_yojson)] -let fee_payer ({payload; _} : t) = Payload.fee_payer payload +let fee_payer ({ payload; _ } : t) = Payload.fee_payer payload -let create ?nonce ~fee ~fee_token ~fee_payer_pk ~valid_until ~memo ~body - ~signer ~sign_choice () : t = +let create ?nonce ~fee ~fee_token ~fee_payer_pk ~valid_until ~memo ~body ~signer + ~sign_choice () : t = let valid_until = Option.value valid_until ~default:Global_slot.max_value in let payload = - Payload.create ~fee ~fee_token ~fee_payer_pk ?nonce ~valid_until ~memo - ~body + Payload.create ~fee ~fee_token ~fee_payer_pk ?nonce ~valid_until ~memo ~body in - {payload; signer; signature= sign_choice} + { payload; signer; signature = sign_choice } let sign ~signer ~(user_command_payload : Signed_command_payload.t) = function | Sign_choice.Signature signature -> @@ -203,7 +201,7 @@ let to_user_command ?(nonce_map = Account_id.Map.empty) ~get_current_nonce (Result.map_error ~f:(fun str -> Error.createf "Error creating user command: %s Error: %s" (Yojson.Safe.to_string (to_yojson client_input)) - str )) + str)) @@ let open Deferred.Result.Let_syntax in let fee_payer = fee_payer client_input in @@ -240,6 +238,6 @@ let to_user_commands ?(nonce_map = Account_id.Map.empty) ~get_current_nonce to_user_command ~nonce_map ~get_current_nonce ~get_account ~constraint_constants ~logger uc_input in - (res :: valid_user_commands, updated_nonce_map) ) + (res :: valid_user_commands, updated_nonce_map)) in List.rev user_commands diff --git a/src/lib/user_command_input/user_command_input.mli b/src/lib/user_command_input/user_command_input.mli index e0d999ffc4b..179d12c0900 100644 --- a/src/lib/user_command_input/user_command_input.mli +++ b/src/lib/user_command_input/user_command_input.mli @@ -69,10 +69,9 @@ val create : val to_user_command : ?nonce_map:(Account.Nonce.t * Account.Nonce.t) Account_id.Map.t - -> get_current_nonce:( Account_id.t - -> ( [`Min of Account_nonce.t] * Account_nonce.t - , string ) - Result.t) + -> get_current_nonce: + ( Account_id.t + -> ([ `Min of Account_nonce.t ] * Account_nonce.t, string) Result.t) -> get_account:(Account_id.t -> Account.t option Participating_state.T.t) -> constraint_constants:Genesis_constants.Constraint_constants.t -> logger:Logger.t @@ -82,10 +81,9 @@ val to_user_command : val to_user_commands : ?nonce_map:(Account.Nonce.t * Account.Nonce.t) Account_id.Map.t - -> get_current_nonce:( Account_id.t - -> ( [`Min of Account_nonce.t] * Account_nonce.t - , string ) - Result.t) + -> get_current_nonce: + ( Account_id.t + -> ([ `Min of Account_nonce.t ] * Account_nonce.t, string) Result.t) -> get_account:(Account_id.t -> Account.t option Participating_state.T.t) -> constraint_constants:Genesis_constants.Constraint_constants.t -> logger:Logger.t diff --git a/src/lib/verifier/common.ml b/src/lib/verifier/common.ml index b8a46a05caa..d5a87ccda2e 100644 --- a/src/lib/verifier/common.ml +++ b/src/lib/verifier/common.ml @@ -7,13 +7,13 @@ let check : | `Invalid | `Valid_assuming of User_command.Valid.t * _ list ] = function | User_command.Signed_command c -> ( - match Signed_command.check c with - | None -> - `Invalid - | Some c -> - `Valid (User_command.Signed_command c) ) + match Signed_command.check c with + | None -> + `Invalid + | Some c -> + `Valid (User_command.Signed_command c) ) | Snapp_command (cmd, vks) -> - with_return (fun {return} -> + with_return (fun { return } -> let payload = lazy Snapp_command.( @@ -35,13 +35,16 @@ let check : , (p : Snapp_command.Party.Authorized.Proved.t) , (other : Snapp_command.Party.Body.t) ) = let statement : Snapp_statement.t = - {predicate= p.data.predicate; body1= p.data.body; body2= other} + { predicate = p.data.predicate + ; body1 = p.data.body + ; body2 = other + } in match p.authorization with | Signature s -> check_signature s p.data.body.pk ; None - | Both {signature; proof} -> + | Both { signature; proof } -> check_signature signature p.data.body.pk ; Some (vk, statement, proof) | Proof p -> @@ -54,17 +57,19 @@ let check : List.filter_map ~f:statement_to_check ( match (cmd, vks) with | Proved_proved r, `Two (vk1, vk2) -> - [(vk1, r.one, r.two.data.body); (vk2, r.two, r.one.data.body)] + [ (vk1, r.one, r.two.data.body) + ; (vk2, r.two, r.one.data.body) + ] | Proved_signed r, `One vk1 -> check_signature r.two.authorization r.two.data.body.pk ; - [(vk1, r.one, r.two.data.body)] + [ (vk1, r.one, r.two.data.body) ] | Proved_empty r, `One vk1 -> let two = Option.value_map r.two ~default:Snapp_command.Party.Body.dummy ~f:(fun two -> - two.data.body ) + two.data.body) in - [(vk1, r.one, two)] + [ (vk1, r.one, two) ] | Signed_signed r, `Zero -> check_signature r.one.authorization r.one.data.body.pk ; check_signature r.two.authorization r.two.data.body.pk ; @@ -82,4 +87,4 @@ let check : | [] -> `Valid v | _ :: _ -> - `Valid_assuming (v, statements_to_check) ) + `Valid_assuming (v, statements_to_check)) diff --git a/src/lib/verifier/dummy.ml b/src/lib/verifier/dummy.ml index 78a46337ef1..67f9ec62f03 100644 --- a/src/lib/verifier/dummy.ml +++ b/src/lib/verifier/dummy.ml @@ -32,7 +32,7 @@ let verify_commands _ (cs : User_command.Verifiable.t list) : | `Invalid -> `Invalid | `Valid_assuming (c, _) -> - `Valid c ) + `Valid c) |> Deferred.Or_error.return let verify_transaction_snarks _ ts = @@ -46,5 +46,5 @@ let verify_transaction_snarks _ ts = let msg_digest = Sok_message.digest message in let sok_digest = Transaction_snark.sok_digest proof in Sok_message.Digest.(equal sok_digest default) - || Mina_base.Sok_message.Digest.equal sok_digest msg_digest ) + || Mina_base.Sok_message.Digest.equal sok_digest msg_digest) |> Deferred.Or_error.return diff --git a/src/lib/verifier/prod.ml b/src/lib/verifier/prod.ml index 5422b1f3a5a..42a9781ba97 100644 --- a/src/lib/verifier/prod.ml +++ b/src/lib/verifier/prod.ml @@ -31,16 +31,17 @@ module Worker_state = struct (* bin_io required by rpc_parallel *) type init_arg = - { conf_dir: string option - ; logger: Logger.Stable.Latest.t - ; proof_level: Genesis_constants.Proof_level.Stable.Latest.t - ; constraint_constants: - Genesis_constants.Constraint_constants.Stable.Latest.t } + { conf_dir : string option + ; logger : Logger.Stable.Latest.t + ; proof_level : Genesis_constants.Proof_level.Stable.Latest.t + ; constraint_constants : + Genesis_constants.Constraint_constants.Stable.Latest.t + } [@@deriving bin_io_unversioned] type t = (module S) - let create {logger; proof_level; constraint_constants; _} : t Deferred.t = + let create { logger; proof_level; constraint_constants; _ } : t Deferred.t = Memory_stats.log_memory_stats logger ~process:"verifier" ; match proof_level with | Full -> @@ -70,7 +71,7 @@ module Worker_state = struct | `Invalid -> [] | `Valid_assuming (_, xs) -> - xs ) + xs) in let%map all_verified = Pickles.Side_loaded.verify @@ -83,7 +84,7 @@ module Worker_state = struct | `Invalid -> `Invalid | `Valid_assuming (c, xs) -> - if all_verified then `Valid c else `Valid_assuming xs ) + if all_verified then `Valid c else `Valid_assuming xs) let verify_blockchain_snarks = B.Proof.verify @@ -93,7 +94,7 @@ module Worker_state = struct result | Error e -> [%log error] - ~metadata:[("error", Error_json.error_to_yojson e)] + ~metadata:[ ("error", Error_json.error_to_yojson e) ] "Verifier threw an exception while verifying transaction \ snark" ; failwith "Verifier crashed" @@ -110,14 +111,13 @@ module Worker_state = struct | `Invalid -> `Invalid | `Valid_assuming (c, _) -> - `Valid c ) + `Valid c) |> Deferred.return let verify_blockchain_snarks _ = Deferred.return true let verify_transaction_snarks _ = Deferred.return true - end - : S ) + end : S ) let get = Fn.id end @@ -127,10 +127,10 @@ module Worker = struct module F = Rpc_parallel.Function type 'w functions = - { verify_blockchains: ('w, Blockchain.t list, bool) F.t - ; verify_transaction_snarks: + { verify_blockchains : ('w, Blockchain.t list, bool) F.t + ; verify_transaction_snarks : ('w, (Transaction_snark.t * Sok_message.t) list, bool) F.t - ; verify_commands: + ; verify_commands : ( 'w , User_command.Verifiable.t list , [ `Valid of User_command.Valid.t @@ -141,7 +141,8 @@ module Worker = struct * Pickles.Side_loaded.Proof.t ) list ] list ) - F.t } + F.t + } module Worker_state = Worker_state @@ -154,16 +155,15 @@ module Worker = struct module Functions (C : Rpc_parallel.Creator - with type worker_state := Worker_state.t - and type connection_state := Connection_state.t) = + with type worker_state := Worker_state.t + and type connection_state := Connection_state.t) = struct - let verify_blockchains (w : Worker_state.t) (chains : Blockchain.t list) - = + let verify_blockchains (w : Worker_state.t) (chains : Blockchain.t list) = let (module M) = Worker_state.get w in M.verify_blockchain_snarks (List.map chains ~f:(fun snark -> ( Blockchain_snark.Blockchain.state snark - , Blockchain_snark.Blockchain.proof snark ) )) + , Blockchain_snark.Blockchain.proof snark ))) let verify_transaction_snarks (w : Worker_state.t) ts = let (module M) = Worker_state.get w in @@ -179,12 +179,12 @@ module Worker = struct ~f:(fun ~worker_state ~conn_state:_ i -> f worker_state i) ~bin_input:i ~bin_output:o () in - { verify_blockchains= + { verify_blockchains = f ( [%bin_type_class: Blockchain.Stable.Latest.t list] , Bool.bin_t , verify_blockchains ) - ; verify_transaction_snarks= + ; verify_transaction_snarks = f ( [%bin_type_class: ( Transaction_snark.Stable.Latest.t @@ -192,7 +192,7 @@ module Worker = struct list] , Bool.bin_t , verify_transaction_snarks ) - ; verify_commands= + ; verify_commands = f ( [%bin_type_class: User_command.Verifiable.Stable.Latest.t list] , [%bin_type_class: @@ -204,10 +204,11 @@ module Worker = struct * Pickles.Side_loaded.Proof.Stable.Latest.t ) list ] list] - , verify_commands ) } + , verify_commands ) + } let init_worker_state - Worker_state.{conf_dir; logger; proof_level; constraint_constants} = + Worker_state.{ conf_dir; logger; proof_level; constraint_constants } = ( if Option.is_some conf_dir then let max_size = 256 * 1024 * 512 in let num_rotate = 1 in @@ -219,10 +220,9 @@ module Worker = struct ~log_filename:"mina-verifier.log" ~max_size ~num_rotate) ) ; [%log info] "Verifier started" ; Worker_state.create - {conf_dir; logger; proof_level; constraint_constants} + { conf_dir; logger; proof_level; constraint_constants } - let init_connection_state ~connection:_ ~worker_state:_ () = - Deferred.unit + let init_connection_state ~connection:_ ~worker_state:_ () = Deferred.unit end end @@ -230,11 +230,12 @@ module Worker = struct end type worker = - { connection: Worker.Connection.t - ; process: Process.t - ; exit_or_signal: Unix.Exit_or_signal.t Deferred.Or_error.t } + { connection : Worker.Connection.t + ; process : Process.t + ; exit_or_signal : Unix.Exit_or_signal.t Deferred.Or_error.t + } -type t = {worker: worker Ivar.t ref; logger: Logger.Stable.Latest.t} +type t = { worker : worker Ivar.t ref; logger : Logger.Stable.Latest.t } let plus_or_minus initial ~delta = initial +. (Random.float (2. *. delta) -. delta) @@ -270,11 +271,10 @@ let wait_safe ~logger process ~module_ ~location ~here = Logger.warn logger ~module_ ~location "Saw an error from Process.wait in wait_safe: $err" ~metadata: - [("err", Error_json.error_to_yojson (Error.of_exn exn))] - )) + [ ("err", Error_json.error_to_yojson (Error.of_exn exn)) ])) (fun () -> Process.wait process) in - Deferred.Result.map_error ~f:Error.of_exn deferred_wait ) + Deferred.Result.map_error ~f:Error.of_exn deferred_wait) with | Ok x -> x @@ -286,7 +286,7 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : t Deferred.t = let on_failure err = [%log error] "Verifier process failed with error $err" - ~metadata:[("err", Error_json.error_to_yojson err)] ; + ~metadata:[ ("err", Error_json.error_to_yojson err) ] ; Error.raise err in let create_worker () = @@ -310,12 +310,12 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : (fun exn -> let err = Error.of_exn ~backtrace:`Get exn in [%log error] "Error from verifier worker $err" - ~metadata:[("err", Error_json.error_to_yojson err)] )) + ~metadata:[ ("err", Error_json.error_to_yojson err) ])) (fun () -> Worker.spawn_in_foreground_exn ~connection_timeout:(Time.Span.of_min 1.) ~on_failure ~shutdown_on:Disconnect ~connection_state_init_arg:() - {conf_dir; logger; proof_level; constraint_constants} ) + { conf_dir; logger; proof_level; constraint_constants }) |> Deferred.Result.map_error ~f:Error.of_exn in Child_processes.Termination.wait_for_process_log_errors ~logger process @@ -344,19 +344,19 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : ~f:(fun stdout -> return @@ [%log debug] "Verifier stdout: $stdout" - ~metadata:[("stdout", `String stdout)] ) ; + ~metadata:[ ("stdout", `String stdout) ]) ; don't_wait_for @@ Pipe.iter (Process.stderr process |> Reader.pipe) ~f:(fun stderr -> return @@ [%log error] "Verifier stderr: $stderr" - ~metadata:[("stderr", `String stderr)] ) ; - {connection; process; exit_or_signal} + ~metadata:[ ("stderr", `String stderr) ]) ; + { connection; process; exit_or_signal } in let%map worker = create_worker () |> Deferred.Or_error.ok_exn in let worker_ref = ref (Ivar.create_full worker) in - let rec on_worker {connection= _; process; exit_or_signal} = + let rec on_worker { connection = _; process; exit_or_signal } = let restart_after = Time.Span.(of_min (15. |> plus_or_minus ~delta:2.5)) in let finished = Deferred.any @@ -366,7 +366,8 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : | Ok _ -> `Unexpected_termination | Error err -> - `Wait_threw_an_exception err ) ] + `Wait_threw_an_exception err ) + ] in upon finished (fun e -> let pid = Process.pid process in @@ -381,7 +382,7 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : match e with | `Unexpected_termination -> [%log error] "verifier terminated unexpectedly" - ~metadata:[("verifier_pid", `Int (Pid.to_int pid))] ; + ~metadata:[ ("verifier_pid", `Int (Pid.to_int pid)) ] ; Ivar.fill_if_empty create_worker_trigger () | `Time_to_restart | `Wait_threw_an_exception _ -> ( ( match e with @@ -389,18 +390,17 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : [%log info] "Saw an exception while trying to wait for the verifier \ process: $exn" - ~metadata:[("exn", Error_json.error_to_yojson err)] + ~metadata:[ ("exn", Error_json.error_to_yojson err) ] | _ -> () ) ; match Signal.send Signal.kill (`Pid pid) with | `No_such_process -> - [%log info] - "verifier failed to get sigkill (no such process)" - ~metadata:[("verifier_pid", `Int (Pid.to_int pid))] ; + [%log info] "verifier failed to get sigkill (no such process)" + ~metadata:[ ("verifier_pid", `Int (Pid.to_int pid)) ] ; Ivar.fill_if_empty create_worker_trigger () | `Ok -> [%log info] "verifier successfully got sigkill" - ~metadata:[("verifier_pid", `Int (Pid.to_int pid))] ) + ~metadata:[ ("verifier_pid", `Int (Pid.to_int pid)) ] ) in let new_worker = Ivar.create () in worker_ref := new_worker ; @@ -409,10 +409,12 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : match%map exit_or_signal with | Ok res -> [ ( "exit_status" - , `String (Unix.Exit_or_signal.to_string_hum res) ) ] + , `String (Unix.Exit_or_signal.to_string_hum res) ) + ] | Error err -> [ ("exit_status", `String "Unknown: wait threw an error") - ; ("exn", Error_json.error_to_yojson err) ] + ; ("exn", Error_json.error_to_yojson err) + ] in [%log info] "verifier successfully stopped" ~metadata: @@ -431,21 +433,21 @@ let create ~logger ~proof_level ~constraint_constants ~pids ~conf_dir : | Error err -> [%log error] "Failed to create a new verifier process: $err. Retrying..." - ~metadata:[("err", Error_json.error_to_yojson err)] ; + ~metadata:[ ("err", Error_json.error_to_yojson err) ] ; (* Wait 5s before retrying. *) let%bind () = after Time.Span.(of_sec 5.) in try_create_worker () in - try_create_worker ()) ) + try_create_worker ())) in on_worker worker ; - {worker= worker_ref; logger} + { worker = worker_ref; logger } let with_retry ~logger f = let pause = Time.Span.of_sec 5. in let rec go attempts_remaining = [%log trace] "Verifier trying with $attempts_remaining" - ~metadata:[("attempts_remaining", `Int attempts_remaining)] ; + ~metadata:[ ("attempts_remaining", `Int attempts_remaining) ] ; match%bind f () with | Ok (`Continue x) -> return (Ok x) @@ -459,9 +461,9 @@ let with_retry ~logger f = in go 4 -let verify_blockchain_snarks {worker; logger} chains = +let verify_blockchain_snarks { worker; logger } chains = with_retry ~logger (fun () -> - let%bind {connection; _} = + let%bind { connection; _ } = let ivar = !worker in match Ivar.peek ivar with | Some worker -> @@ -479,11 +481,12 @@ let verify_blockchain_snarks {worker; logger} chains = @@ `Stop (Error.of_string "verify_blockchain_snarks timeout") ) ; Worker.Connection.run connection ~f:Worker.functions.verify_blockchains ~arg:chains - |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ] ) + |> Deferred.Or_error.map ~f:(fun x -> `Continue x) + ]) module Id = Unique_id.Int () -let verify_transaction_snarks {worker; logger} ts = +let verify_transaction_snarks { worker; logger } ts = let id = Id.create () in let n = List.length ts in let metadata () = @@ -494,21 +497,21 @@ let verify_transaction_snarks {worker; logger} ts = [%log trace] "verify $n transaction_snarks (before)" ~metadata:(metadata ()) ; let res = with_retry ~logger (fun () -> - let%bind {connection; _} = Ivar.read !worker in + let%bind { connection; _ } = Ivar.read !worker in Worker.Connection.run connection ~f:Worker.functions.verify_transaction_snarks ~arg:ts - |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ) + |> Deferred.Or_error.map ~f:(fun x -> `Continue x)) in upon res (fun x -> [%log trace] "verify $n transaction_snarks (after)!" ~metadata: ( ("result", `String (Sexp.to_string ([%sexp_of: bool Or_error.t] x))) - :: metadata () ) ) ; + :: metadata () )) ; res -let verify_commands {worker; logger} ts = +let verify_commands { worker; logger } ts = with_retry ~logger (fun () -> - let%bind {connection; _} = Ivar.read !worker in + let%bind { connection; _ } = Ivar.read !worker in Worker.Connection.run connection ~f:Worker.functions.verify_commands ~arg:ts - |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ) + |> Deferred.Or_error.map ~f:(fun x -> `Continue x)) diff --git a/src/lib/verifier/verifier.ml b/src/lib/verifier/verifier.ml index c94aed2c774..b747900a221 100644 --- a/src/lib/verifier/verifier.ml +++ b/src/lib/verifier/verifier.ml @@ -13,7 +13,7 @@ let m = [proof_level <> Full], so this should make no difference. Inline tests shouldn't be run with [proof_level = Full]. *) - (module Dummy : Verifier_intf.S with type ledger_proof = Ledger_proof.t ) + (module Dummy : Verifier_intf.S with type ledger_proof = Ledger_proof.t) else (module Prod) include (val m) diff --git a/src/lib/verifier/verifier_intf.ml b/src/lib/verifier/verifier_intf.ml index be2cac4d7c9..d8ea1b3d57d 100644 --- a/src/lib/verifier/verifier_intf.ml +++ b/src/lib/verifier/verifier_intf.ml @@ -11,7 +11,7 @@ module Base = struct t -> Mina_base.User_command.Verifiable.t list (* The first level of error represents failure to verify, the second a failure in - communicating with the verifier. *) + communicating with the verifier. *) -> [ `Valid of Mina_base.User_command.Valid.t | `Invalid | `Valid_assuming of diff --git a/src/lib/visualization/visualization.ml b/src/lib/visualization/visualization.ml index 8459e6d31b3..1b778db9422 100644 --- a/src/lib/visualization/visualization.ml +++ b/src/lib/visualization/visualization.ml @@ -22,7 +22,7 @@ let rec to_dot (json : Yojson.Safe.t) = | `Assoc subvalues -> sprintf !"{%s|{%s}}" key @@ to_dot (`Assoc subvalues) | subvalue -> - sprintf !"%s:%s" key (to_dot subvalue) ) + sprintf !"%s:%s" key (to_dot subvalue)) |> String.concat ~sep:"|" | `List values | `Tuple values -> List.map values ~f:(fun value -> to_dot value) |> String.concat ~sep:"|" @@ -32,7 +32,7 @@ let rec to_dot (json : Yojson.Safe.t) = Bool.to_string value | `Variant (key, value) -> Option.value_map value ~default:key ~f:(fun some_value -> - sprintf !"%s:%s" key (to_dot some_value) ) + sprintf !"%s:%s" key (to_dot some_value)) | `Null -> "null" @@ -62,17 +62,17 @@ module Make_ocamlgraph (Node : Node_intf) = struct include Graph.Graphviz.Dot (struct include G - let graph_attributes _ = [`Rankdir `LeftToRight] + let graph_attributes _ = [ `Rankdir `LeftToRight ] let get_subgraph _ = None - let default_vertex_attributes _ = [`Shape `Record] + let default_vertex_attributes _ = [ `Shape `Record ] let vertex_name = Node.name let vertex_attributes node = let dot_format = to_dot @@ Node.display_to_yojson (Node.display node) in - [`Label dot_format] + [ `Label dot_format ] let default_edge_attributes _ = [] diff --git a/src/lib/vrf_lib/integrated.ml b/src/lib/vrf_lib/integrated.ml index 2b54a9e2bab..b34ab78487b 100644 --- a/src/lib/vrf_lib/integrated.ml +++ b/src/lib/vrf_lib/integrated.ml @@ -1,8 +1,8 @@ module Make (Impl : Snarky_backendless.Snark_intf.S) (Scalar : sig - type value + type value - type var + type var end) (Group : sig open Impl @@ -16,9 +16,9 @@ module Make module Shifted : sig module type S = Snarky_curves.Shifted_intf - with type ('a, 'b) checked := ('a, 'b) Checked.t - and type boolean_var := Boolean.var - and type curve_var := var + with type ('a, 'b) checked := ('a, 'b) Checked.t + and type boolean_var := Boolean.var + and type curve_var := var type 'a m = (module S with type t = 'a) end @@ -95,8 +95,8 @@ end = struct module Checked = struct open Let_syntax - let eval (type shifted) - ((module Shifted) : shifted Group.Checked.Shifted.m) ~private_key m = + let eval (type shifted) ((module Shifted) : shifted Group.Checked.Shifted.m) + ~private_key m = let%bind h = Message.Checked.hash_to_group m in let%bind u = (* This use of unshift_nonzero is acceptable since if h^private_key = 0 then diff --git a/src/lib/vrf_lib/standalone.ml b/src/lib/vrf_lib/standalone.ml index 39585a21105..af5b0ed6fd6 100644 --- a/src/lib/vrf_lib/standalone.ml +++ b/src/lib/vrf_lib/standalone.ml @@ -1,7 +1,7 @@ open Core module Context = struct - type ('message, 'pk) t = {message: 'message; public_key: 'pk} + type ('message, 'pk) t = { message : 'message; public_key : 'pk } [@@deriving sexp, hlist] end @@ -11,7 +11,7 @@ module Evaluation = struct [%%versioned module Stable = struct module V1 = struct - type 'scalar t = {c: 'scalar; s: 'scalar} [@@deriving sexp] + type 'scalar t = { c : 'scalar; s : 'scalar } [@@deriving sexp] let to_latest = Fn.id end @@ -24,7 +24,7 @@ module Evaluation = struct module Stable = struct module V1 = struct type ('group, 'dleq) t = - {discrete_log_equality: 'dleq; scaled_message_hash: 'group} + { discrete_log_equality : 'dleq; scaled_message_hash : 'group } [@@deriving sexp] end end] @@ -33,27 +33,27 @@ end module Make (Impl : Snarky_backendless.Snark_intf.S) (Scalar : sig - type t [@@deriving equal, sexp] + type t [@@deriving equal, sexp] - val random : unit -> t + val random : unit -> t - val add : t -> t -> t + val add : t -> t -> t - val mul : t -> t -> t + val mul : t -> t -> t - type var + type var - val typ : (var, t) Impl.Typ.t + val typ : (var, t) Impl.Typ.t - module Checked : sig - open Impl + module Checked : sig + open Impl - val to_bits : var -> Boolean.var Bitstring_lib.Bitstring.Lsb_first.t + val to_bits : var -> Boolean.var Bitstring_lib.Bitstring.Lsb_first.t - module Assert : sig - val equal : var -> var -> (unit, _) Checked.t - end + module Assert : sig + val equal : var -> var -> (unit, _) Checked.t end + end end) (Group : sig type t [@@deriving sexp] @@ -71,9 +71,9 @@ module Make module Checked : Snarky_curves.Weierstrass_checked_intf - with module Impl := Impl - and type unchecked := t - and type t = var + with module Impl := Impl + and type unchecked := t + and type t = var end) (Message : sig open Impl @@ -170,7 +170,7 @@ end = struct let typ = Impl.Typ.of_hlistable - [Message.typ; Public_key.typ] + [ Message.typ; Public_key.typ ] ~var_to_hlist:Context.to_hlist ~var_of_hlist:Context.of_hlist ~value_to_hlist:Context.to_hlist ~value_of_hlist:Context.of_hlist end @@ -180,7 +180,7 @@ end = struct module Evaluation = struct module Discrete_log_equality = struct type 'scalar t_ = 'scalar Evaluation.Discrete_log_equality.Poly.t = - {c: 'scalar; s: 'scalar} + { c : 'scalar; s : 'scalar } [@@deriving sexp, hlist] type t = Scalar.t t_ [@@deriving sexp] @@ -190,13 +190,13 @@ end = struct open Impl let typ : (var, t) Typ.t = - Typ.of_hlistable [Scalar.typ; Scalar.typ] ~var_to_hlist:t__to_hlist + Typ.of_hlistable [ Scalar.typ; Scalar.typ ] ~var_to_hlist:t__to_hlist ~var_of_hlist:t__of_hlist ~value_to_hlist:t__to_hlist ~value_of_hlist:t__of_hlist end type ('group, 'dleq) t_ = ('group, 'dleq) Evaluation.Poly.t = - {discrete_log_equality: 'dleq; scaled_message_hash: 'group} + { discrete_log_equality : 'dleq; scaled_message_hash : 'group } [@@deriving sexp] type t = (Group.t, Discrete_log_equality.t) t_ [@@deriving sexp] @@ -206,15 +206,15 @@ end = struct let typ : (var, t) Impl.Typ.t = let open Snarky_backendless.H_list in Impl.Typ.of_hlistable - [Discrete_log_equality.typ; Group.typ] - ~var_to_hlist:(fun {discrete_log_equality; scaled_message_hash} -> - [discrete_log_equality; scaled_message_hash] ) - ~value_to_hlist:(fun {discrete_log_equality; scaled_message_hash} -> - [discrete_log_equality; scaled_message_hash] ) - ~value_of_hlist:(fun [discrete_log_equality; scaled_message_hash] -> - {discrete_log_equality; scaled_message_hash} ) - ~var_of_hlist:(fun [discrete_log_equality; scaled_message_hash] -> - {discrete_log_equality; scaled_message_hash} ) + [ Discrete_log_equality.typ; Group.typ ] + ~var_to_hlist:(fun { discrete_log_equality; scaled_message_hash } -> + [ discrete_log_equality; scaled_message_hash ]) + ~value_to_hlist:(fun { discrete_log_equality; scaled_message_hash } -> + [ discrete_log_equality; scaled_message_hash ]) + ~value_of_hlist:(fun [ discrete_log_equality; scaled_message_hash ] -> + { discrete_log_equality; scaled_message_hash }) + ~var_of_hlist:(fun [ discrete_log_equality; scaled_message_hash ] -> + { discrete_log_equality; scaled_message_hash }) let create (k : Private_key.t) message : t = let public_key = Group.scale Group.generator k in @@ -226,13 +226,15 @@ end = struct Group.(scale generator r) Group.(scale message_hash r) in - {c; s= Scalar.(add r (mul k c))} + { c; s = Scalar.(add r (mul k c)) } in - {discrete_log_equality; scaled_message_hash= Group.scale message_hash k} + { discrete_log_equality + ; scaled_message_hash = Group.scale message_hash k + } let verified_output - ({scaled_message_hash; discrete_log_equality= {c; s}} : t) - ({message; public_key} : Context.t) = + ({ scaled_message_hash; discrete_log_equality = { c; s } } : t) + ({ message; public_key } : Context.t) = let g = Group.generator in let ( + ) = Group.add in let ( * ) s g = Group.scale g s in @@ -243,15 +245,14 @@ end = struct ((s * g) + (c * Group.negate public_key)) ((s * message_hash) + (c * Group.negate scaled_message_hash))) in - if dleq then Some (Output_hash.hash message scaled_message_hash) - else None + if dleq then Some (Output_hash.hash message scaled_message_hash) else None module Checked = struct let verified_output (type shifted) ((module Shifted) as shifted : (module Group.Checked.Shifted.S with type t = shifted)) - ({scaled_message_hash; discrete_log_equality= {c; s}} : var) - ({message; public_key} : Context.var) = + ({ scaled_message_hash; discrete_log_equality = { c; s } } : var) + ({ message; public_key } : Context.var) = let open Impl.Checked in let%bind () = let%bind a = @@ -281,7 +282,7 @@ end = struct >>= Scalar.Checked.Assert.equal c in (* TODO: This could just hash (message_hash, message_hash^k) instead - if it were cheaper *) + if it were cheaper *) Output_hash.Checked.hash message scaled_message_hash end end @@ -291,16 +292,16 @@ open Core module Bigint_scalar (Impl : Snarky_backendless.Snark_intf.S) (M : sig - val modulus : Bigint.t + val modulus : Bigint.t - val random : unit -> Bigint.t + val random : unit -> Bigint.t end) = struct let pack bs = let pack_char bs = Char.of_int_exn (List.foldi bs ~init:0 ~f:(fun i acc b -> - if b then acc lor (1 lsl i) else acc )) + if b then acc lor (1 lsl i) else acc)) in String.of_char_list (List.map ~f:pack_char (List.chunks_of ~length:8 bs)) |> Z.of_bits |> Bigint.of_zarith_bigint @@ -318,7 +319,7 @@ struct let%test_unit "add is correct" = Quickcheck.test (Quickcheck.Generator.tuple2 gen gen) ~f:(fun (x, y) -> - assert (equal (add x y) ((x + y) % modulus)) ) + assert (equal (add x y) ((x + y) % modulus))) let mul x y = x * y % modulus @@ -328,7 +329,7 @@ struct let of_bits bs = List.fold_left bs ~init:(zero, one) ~f:(fun (acc, pt) b -> - ((if b then add acc pt else acc), add pt pt) ) + ((if b then add acc pt else acc), add pt pt)) |> fst let%test_unit "of_bits . to_bits = identity" = @@ -343,7 +344,7 @@ struct transport (list ~length:length_in_bits Boolean.typ) ~there:(fun n -> - List.init length_in_bits ~f:(Z.testbit (to_zarith_bigint n)) ) + List.init length_in_bits ~f:(Z.testbit (to_zarith_bigint n))) ~back:pack module Checked = struct diff --git a/src/lib/vrf_lib/tests/integrated_test.ml b/src/lib/vrf_lib/tests/integrated_test.ml index 5c8e7f45002..ff6cfeb54d1 100644 --- a/src/lib/vrf_lib/tests/integrated_test.ml +++ b/src/lib/vrf_lib/tests/integrated_test.ml @@ -24,13 +24,13 @@ module Group = struct end module Message = struct - type 'state_hash t = {state_hash: 'state_hash} [@@deriving hlist] + type 'state_hash t = { state_hash : 'state_hash } [@@deriving hlist] type value = Mina_base.State_hash.t t type var = Mina_base.State_hash.var t - let data_spec = Tick.Data_spec.[Mina_base.State_hash.typ] + let data_spec = Tick.Data_spec.[ Mina_base.State_hash.typ ] let typ = Tick.Typ.of_hlistable data_spec ~var_to_hlist:to_hlist @@ -39,12 +39,12 @@ module Message = struct let gen = let open Quickcheck.Let_syntax in let%map state_hash = Mina_base.State_hash.gen in - {state_hash} + { state_hash } let hash_to_group ~constraint_constants:_ msg = Group_map.to_group (Random_oracle.hash ~init:Mina_base.Hash_prefix.vrf_message - [|msg.state_hash|]) + [| msg.state_hash |]) |> Tick.Inner_curve.of_affine module Checked = struct @@ -53,7 +53,7 @@ module Message = struct Group_map.Checked.to_group (Random_oracle.Checked.hash ~init:Mina_base.Hash_prefix.vrf_message (Random_oracle.Checked.pack_input - (Mina_base.State_hash.var_to_input msg.state_hash))) ) + (Mina_base.State_hash.var_to_input msg.state_hash)))) end end @@ -64,16 +64,16 @@ module Output_hash = struct let typ : (var, value) Snark_params.Tick.Typ.t = Snark_params.Tick.Field.typ - let hash ~constraint_constants:_ ({Message.state_hash} : Message.value) g = + let hash ~constraint_constants:_ ({ Message.state_hash } : Message.value) g = let x, y = Snark_params.Tick.Inner_curve.to_affine_exn g in - Random_oracle.hash [|(state_hash :> Snark_params.Tick.Field.t); x; y|] + Random_oracle.hash [| (state_hash :> Snark_params.Tick.Field.t); x; y |] module Checked = struct - let hash ({state_hash} : Message.var) g = + let hash ({ state_hash } : Message.var) g = Snark_params.Tick.make_checked (fun () -> let x, y = g in Random_oracle.Checked.hash - [|Mina_base.State_hash.var_to_hash_packed state_hash; x; y|] ) + [| Mina_base.State_hash.var_to_hash_packed state_hash; x; y |]) end end @@ -98,9 +98,9 @@ let%test_unit "eval unchecked vs. checked equality" = (fun (private_key, msg) -> let open Tick.Checked in let%bind (module Shifted) = Group.Checked.Shifted.create () in - Vrf.Checked.eval (module Shifted) ~private_key msg ) + Vrf.Checked.eval (module Shifted) ~private_key msg) (fun (private_key, msg) -> - Vrf.eval ~constraint_constants ~private_key msg )) + Vrf.eval ~constraint_constants ~private_key msg)) let%bench_module "vrf bench module" = ( module struct @@ -125,6 +125,6 @@ let%bench_module "vrf bench module" = (fun (private_key, msg) -> let open Tick.Checked in let%bind (module Shifted) = Group.Checked.Shifted.create () in - Vrf.Checked.eval (module Shifted) ~private_key msg ) + Vrf.Checked.eval (module Shifted) ~private_key msg) (private_key, msg) end ) diff --git a/src/lib/vrf_lib/tests/standalone_test.ml b/src/lib/vrf_lib/tests/standalone_test.ml index d30dde40682..12574a27965 100644 --- a/src/lib/vrf_lib/tests/standalone_test.ml +++ b/src/lib/vrf_lib/tests/standalone_test.ml @@ -12,9 +12,9 @@ let%test_module "vrf-test" = include ( Snark_params.Tick.Inner_curve.Scalar : module type of Snark_params.Tick.Inner_curve.Scalar - with type t = Snark_params.Tick.Inner_curve.Scalar.t - and type var := Snark_params.Tick.Inner_curve.Scalar.var - with module Checked := Snark_params.Tick.Inner_curve.Scalar.Checked ) + with type t = Snark_params.Tick.Inner_curve.Scalar.t + and type var := Snark_params.Tick.Inner_curve.Scalar.var + with module Checked := Snark_params.Tick.Inner_curve.Scalar.Checked ) let of_bits = Other_impl.Field.project @@ -55,8 +55,8 @@ let%test_module "vrf-test" = include ( Snark_params.Tick.Inner_curve : module type of Snark_params.Tick.Inner_curve - with type var := Snark_params.Tick.Inner_curve.var - with module Checked := Snark_params.Tick.Inner_curve.Checked ) + with type var := Snark_params.Tick.Inner_curve.var + with module Checked := Snark_params.Tick.Inner_curve.Checked ) type 'a or_infinity = 'a Marlin_plonk_bindings_types.Or_infinity.t = | Infinity @@ -89,7 +89,8 @@ let%test_module "vrf-test" = module T = struct type t = Curve.t - include Sexpable.Of_sexpable (struct + include Sexpable.Of_sexpable + (struct type t = Field.t * Field.t [@@deriving sexp] end) (struct @@ -134,9 +135,9 @@ let%test_module "vrf-test" = let res = add x (negate x) in if not (equal res zero) then failwithf - !"inv failured, x = %{sexp:t}, x + inv x = %{sexp:t}, \ - expected %{sexp:t}" - x res zero () ) + !"inv failured, x = %{sexp:t}, x + inv x = %{sexp:t}, expected \ + %{sexp:t}" + x res zero ()) let%test_unit "scaling associates" = let open Quickcheck in @@ -145,7 +146,7 @@ let%test_module "vrf-test" = assert ( equal (scale generator (Scalar.mul a b)) - (scale (scale generator a) b) ) ) + (scale (scale generator a) b) )) module Checked = struct include Curve.Checked @@ -164,15 +165,13 @@ let%test_module "vrf-test" = Array.init (5 * Impl.Field.size_in_bits) ~f:(fun _ -> let t = Curve.random () in let tt = Curve.double t in - (t, tt, Curve.add t tt, Curve.double tt) ) + (t, tt, Curve.add t tt, Curve.double tt)) module Pedersen = Snarky.Pedersen.Make (Impl) (Curve) (struct let params = - Array.map - ~f:(Tuple_lib.Quadruple.map ~f:Curve.to_affine_exn) - params + Array.map ~f:(Tuple_lib.Quadruple.map ~f:Curve.to_affine_exn) params end) module Message = struct @@ -180,7 +179,8 @@ let%test_module "vrf-test" = type t = Curve.t - include Sexpable.Of_sexpable (struct + include Sexpable.Of_sexpable + (struct type t = Field.t * Field.t [@@deriving sexp] end) (struct @@ -209,17 +209,17 @@ let%test_module "vrf-test" = (b0, b1, b2) :: bits_to_triples ~default bs | [] -> [] - | [b] -> - [(b, default, default)] - | [b1; b2] -> - [(b1, b2, default)] + | [ b ] -> + [ (b, default, default) ] + | [ b1; b2 ] -> + [ (b1, b2, default) ] let hash_bits bits = List.foldi ~init:Curve.zero (bits_to_triples ~default:false bits) ~f:(fun i acc triple -> Curve.add acc (Snarky.Pedersen.local_function ~negate:Curve.negate params.(i) - triple) ) + triple)) |> Curve.to_affine_exn |> fst let hash_bits_checked bits = @@ -249,14 +249,16 @@ let%test_module "vrf-test" = open Impl let hash_for_proof m g1 g2 g3 = - let x = hash_bits (List.concat_map ~f:Group.to_bits [m; g1; g2; g3]) in + let x = + hash_bits (List.concat_map ~f:Group.to_bits [ m; g1; g2; g3 ]) + in Scalar.of_bits (List.take (Field.unpack x) Scalar.length_in_bits) module Checked = struct let hash_for_proof m g1 g2 g3 = let%bind bs = Checked.map - (Checked.List.map ~f:Group.Checked.to_bits [m; g1; g2; g3]) + (Checked.List.map ~f:Group.Checked.to_bits [ m; g1; g2; g3 ]) ~f:List.concat in let%map xs = @@ -276,10 +278,9 @@ let%test_module "vrf-test" = in Quickcheck.test gen ~trials:50 ~f:(fun (priv, public_key, message) -> let eval = Vrf.Evaluation.create priv message in - let ctx : Vrf.Context.t = {message; public_key} in - if not (Option.is_some (Vrf.Evaluation.verified_output eval ctx)) - then + let ctx : Vrf.Context.t = { message; public_key } in + if not (Option.is_some (Vrf.Evaluation.verified_output eval ctx)) then failwithf !"%{sexp:Vrf.Context.t}, %{sexp:Vrf.Evaluation.t}" - ctx eval () ) + ctx eval ()) end ) diff --git a/src/lib/web_client_pipe/web_client_pipe.ml b/src/lib/web_client_pipe/web_client_pipe.ml index 120aba7954e..41e5ef0f675 100644 --- a/src/lib/web_client_pipe/web_client_pipe.ml +++ b/src/lib/web_client_pipe/web_client_pipe.ml @@ -26,17 +26,18 @@ end) (Store : Storage_intf with type location := string and type data := Data.t) (Request : Web_request.Intf.S) : S with type data := Data.t = struct type t = - { filename: string - ; reader: Data.t Linear_pipe.Reader.t - ; writer: Data.t Linear_pipe.Writer.t } + { filename : string + ; reader : Data.t Linear_pipe.Reader.t + ; writer : Data.t Linear_pipe.Writer.t + } - let write_to_storage {filename; _} request data = + let write_to_storage { filename; _ } request data = let%bind () = Store.store filename data in Request.put request filename let create ~filename ~logger = let reader, writer = Linear_pipe.create () in - let t = {filename; reader; writer} in + let t = { filename; reader; writer } in let%map () = match%map Request.create () with | Ok request -> @@ -47,14 +48,14 @@ end) () | Error e -> [%log error] "Error writing Web client pipe data: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] )) + ~metadata:[ ("error", Error_json.error_to_yojson e) ])) | Error e -> [%log error] "Unable to create request: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] + ~metadata:[ ("error", Error_json.error_to_yojson e) ] in t - let store {reader; writer; _} data = + let store { reader; writer; _ } data = Linear_pipe.force_write_maybe_drop_head ~capacity:1 writer reader data ; Deferred.unit end diff --git a/src/lib/web_request/s3_put_request.ml b/src/lib/web_request/s3_put_request.ml index dca5e3fe844..42e628926ed 100644 --- a/src/lib/web_request/s3_put_request.ml +++ b/src/lib/web_request/s3_put_request.ml @@ -6,7 +6,7 @@ type t = string let create () = Deferred.Or_error.return "s3://o1labs-snarkette-data" let put ?(options = []) t filename = - let subcommand = ["s3"; "cp"] in - let args = subcommand @ [filename; t] @ options in + let subcommand = [ "s3"; "cp" ] in + let args = subcommand @ [ filename; t ] @ options in let result = Process.run ~prog:"aws" ~args () in result |> Deferred.Result.ignore_m diff --git a/src/lib/webkit_trace_event/webkit_trace_event.ml b/src/lib/webkit_trace_event/webkit_trace_event.ml index a4b1cd40512..51680915b30 100644 --- a/src/lib/webkit_trace_event/webkit_trace_event.ml +++ b/src/lib/webkit_trace_event/webkit_trace_event.ml @@ -18,18 +18,19 @@ type event_kind = | Trace_end type event = - { name: string - ; categories: string list - ; phase: event_kind - ; timestamp: int - ; pid: int - ; tid: int } + { name : string + ; categories : string list + ; phase : event_kind + ; timestamp : int + ; pid : int + ; tid : int + } type events = event list -let create_event ?(categories = []) ?(pid = 0) ?(tid = 0) ~phase ~timestamp - name = - {name; categories; phase; timestamp; pid; tid} +let create_event ?(categories = []) ?(pid = 0) ?(tid = 0) ~phase ~timestamp name + = + { name; categories; phase; timestamp; pid; tid } module Output = struct module Binary = struct @@ -73,8 +74,7 @@ module Output = struct let emits ~buf (s : string) (pos : int) = let sl = String.length s in let pos = emiti ~buf sl pos in - Bigstring.From_string.blit ~src:s ~src_pos:0 ~len:sl ~dst:buf - ~dst_pos:pos ; + Bigstring.From_string.blit ~src:s ~src_pos:0 ~len:sl ~dst:buf ~dst_pos:pos ; pos + sl let finish wr ~buf final_len = @@ -91,8 +91,7 @@ module Output = struct | Cycle_start -> () | Cycle_end -> - emitk ~buf Cycle_end 0 |> emiti ~buf event.timestamp - |> finish ~buf wr + emitk ~buf Cycle_end 0 |> emiti ~buf event.timestamp |> finish ~buf wr | Pid_is -> emitk ~buf Pid_is 0 |> emiti ~buf event.pid |> finish ~buf wr | Event -> @@ -105,8 +104,7 @@ module Output = struct emitk ~buf Measure_end 0 |> emiti ~buf event.timestamp |> finish ~buf wr | Trace_end -> - emitk ~buf Trace_end 0 |> emiti ~buf event.timestamp - |> finish ~buf wr + emitk ~buf Trace_end 0 |> emiti ~buf event.timestamp |> finish ~buf wr end module JSON = struct @@ -133,7 +131,7 @@ module Output = struct | Trace_end -> `String "e" - let json_of_event {name; categories; phase; timestamp; pid; tid} = + let json_of_event { name; categories; phase; timestamp; pid; tid } = let categories = String.concat ~sep:"," categories in match phase with | New_thread | Pid_is -> @@ -143,7 +141,8 @@ module Output = struct ; ("ph", phase_of_kind phase) ; ("pid", `Int pid) ; ("tid", `Int tid) - ; ("args", `Assoc [("name", `String name)]) ] + ; ("args", `Assoc [ ("name", `String name) ]) + ] | Thread_switch -> `Assoc [ ("name", `String name) @@ -152,7 +151,8 @@ module Output = struct ; ("dur", `Int 0) (* Placeholder value *) ; ("ts", `Int timestamp) ; ("pid", `Int pid) - ; ("tid", `Int tid) ] + ; ("tid", `Int tid) + ] | Cycle_start | Cycle_end | Trace_end -> `Assoc [ ("name", `String name) @@ -161,7 +161,8 @@ module Output = struct ; ("id", `Int 0) (* Placeholder value *) ; ("ts", `Int timestamp) ; ("pid", `Int pid) - ; ("tid", `Int tid) ] + ; ("tid", `Int tid) + ] | Event | Measure_start | Measure_end -> `Assoc [ ("name", `String name) @@ -169,7 +170,8 @@ module Output = struct ; ("ph", phase_of_kind phase) ; ("ts", `Int timestamp) ; ("pid", `Int pid) - ; ("tid", `Int tid) ] + ; ("tid", `Int tid) + ] let json_of_events (events : events) = `List (List.map ~f:json_of_event events) diff --git a/src/lib/with_hash/with_hash.ml b/src/lib/with_hash/with_hash.ml index 979fe8aa308..8d4aeef2ced 100644 --- a/src/lib/with_hash/with_hash.ml +++ b/src/lib/with_hash/with_hash.ml @@ -5,21 +5,21 @@ module Stable = struct [@@@no_toplevel_latest_type] module V1 = struct - type ('a, 'h) t = {data: 'a; hash: 'h} + type ('a, 'h) t = { data : 'a; hash : 'h } [@@deriving sexp, equal, compare, hash, yojson] - let to_latest data_latest hash_latest {data; hash} = - {data= data_latest data; hash= hash_latest hash} + let to_latest data_latest hash_latest { data; hash } = + { data = data_latest data; hash = hash_latest hash } end end] -type ('a, 'h) t = ('a, 'h) Stable.Latest.t = {data: 'a; hash: 'h} +type ('a, 'h) t = ('a, 'h) Stable.Latest.t = { data : 'a; hash : 'h } [@@deriving sexp, equal, compare, hash, yojson] -let data {data; _} = data +let data { data; _ } = data -let hash {hash; _} = hash +let hash { hash; _ } = hash -let map t ~f = {t with data= f t.data} +let map t ~f = { t with data = f t.data } -let of_data data ~hash_data = {data; hash= hash_data data} +let of_data data ~hash_data = { data; hash = hash_data data } diff --git a/src/lib/work_selector/inputs.ml b/src/lib/work_selector/inputs.ml index 36067a0276f..d5b3800535e 100644 --- a/src/lib/work_selector/inputs.ml +++ b/src/lib/work_selector/inputs.ml @@ -52,7 +52,7 @@ module Test_inputs = struct | None -> fee | Some fee' -> - Currency.Fee.min fee fee' ) + Currency.Fee.min fee fee') end module Staged_ledger = struct diff --git a/src/lib/work_selector/intf.ml b/src/lib/work_selector/intf.ml index 5663a4ca333..34d40098646 100644 --- a/src/lib/work_selector/intf.ml +++ b/src/lib/work_selector/intf.ml @@ -50,8 +50,9 @@ module type Inputs_intf = sig val all_work_pairs : t - -> get_state:( Mina_base.State_hash.t - -> Mina_state.Protocol_state.value Or_error.t) + -> get_state: + ( Mina_base.State_hash.t + -> Mina_state.Protocol_state.value Or_error.t) -> ( Transaction.t , Transaction_witness.t , Ledger_proof.t ) @@ -82,8 +83,8 @@ module type State_intf = sig val init : reassignment_wait:int - -> frontier_broadcast_pipe:transition_frontier option - Pipe_lib.Broadcast_pipe.Reader.t + -> frontier_broadcast_pipe: + transition_frontier option Pipe_lib.Broadcast_pipe.Reader.t -> logger:Logger.t -> t end @@ -169,8 +170,7 @@ module type Selection_method_intf = sig type transition_frontier - module State : - State_intf with type transition_frontier := transition_frontier + module State : State_intf with type transition_frontier := transition_frontier val remove : State.t -> work One_or_two.t -> unit @@ -191,13 +191,14 @@ end module type Make_selection_method_intf = functor (Inputs : Inputs_intf) (Lib : Lib_intf with module Inputs := Inputs) - -> Selection_method_intf - with type staged_ledger := Inputs.Staged_ledger.t - and type work := - ( Inputs.Transaction.t - , Inputs.Transaction_witness.t - , Inputs.Ledger_proof.t ) - Snark_work_lib.Work.Single.Spec.t - and type snark_pool := Inputs.Snark_pool.t - and type transition_frontier := Inputs.Transition_frontier.t - and module State := Lib.State + -> + Selection_method_intf + with type staged_ledger := Inputs.Staged_ledger.t + and type work := + ( Inputs.Transaction.t + , Inputs.Transaction_witness.t + , Inputs.Ledger_proof.t ) + Snark_work_lib.Work.Single.Spec.t + and type snark_pool := Inputs.Snark_pool.t + and type transition_frontier := Inputs.Transition_frontier.t + and module State := Lib.State diff --git a/src/lib/work_selector/test.ml b/src/lib/work_selector/test.ml index 56c6209f1cd..8581d306e7c 100644 --- a/src/lib/work_selector/test.ml +++ b/src/lib/work_selector/test.ml @@ -48,7 +48,7 @@ struct in match stuff with None -> return () | _ -> go (i + 1) in - go 0 ) ) + go 0)) let%test_unit "Reassign work after the wait time" = Backtrace.elide := false ; @@ -57,9 +57,7 @@ struct let logger = Logger.null () in let send_work work_state = let rec go all_work = - let stuff = - Selection_method.work ~snark_pool ~fee ~logger work_state - in + let stuff = Selection_method.work ~snark_pool ~fee ~logger work_state in match stuff with | None -> all_work @@ -78,7 +76,7 @@ struct Async.after (Time.Span.of_ms (Float.of_int reassignment_wait)) in let work_sent_again = send_work work_state in - assert (List.length work_sent = List.length work_sent_again) ) ) + assert (List.length work_sent = List.length work_sent_again))) let gen_snark_pool (works : ('a, 'b, 'c) Lib.Work_spec.t One_or_two.t list) fee = @@ -91,7 +89,7 @@ struct return () | work :: rest -> let%bind fee = - Quickcheck.Generator.of_list [cheap_work_fee; expensive_work_fee] + Quickcheck.Generator.of_list [ cheap_work_fee; expensive_work_fee ] in T.Snark_pool.add_snark snark_pool ~work ~fee ; add_works rest @@ -112,9 +110,7 @@ struct let%map pool = gen_snark_pool ( T.Staged_ledger.all_work_pairs sl ~get_state:(fun _ -> - Ok - (Lazy.force precomputed_values).protocol_state_with_hash.data - ) + Ok (Lazy.force precomputed_values).protocol_state_with_hash.data) |> Or_error.ok_exn ) (Currency.Fee.of_int 2) in @@ -134,8 +130,7 @@ struct ~message:"Exceeded time expected to exhaust work" ~expect:true (i <= p) ; let work = - Selection_method.work ~snark_pool ~fee:my_fee work_state - ~logger + Selection_method.work ~snark_pool ~fee:my_fee work_state ~logger in match work with | None -> @@ -148,5 +143,5 @@ struct (One_or_two.map job ~f:Lib.Work_spec.statement)) ; go (i + 1) in - go 0 ) ) + go 0)) end diff --git a/src/lib/work_selector/work_lib.ml b/src/lib/work_selector/work_lib.ml index 78646ed0728..55d2d21f825 100644 --- a/src/lib/work_selector/work_lib.ml +++ b/src/lib/work_selector/work_lib.ml @@ -26,27 +26,30 @@ module Make (Inputs : Intf.Inputs_intf) = struct end type t = - { mutable available_jobs: + { mutable available_jobs : ( Inputs.Transaction.t , Inputs.Transaction_witness.t , Inputs.Ledger_proof.t ) Work_spec.t One_or_two.t list - ; jobs_seen: (Seen_key.t, Job_status.t) Hashtbl.t - ; reassignment_wait: int } + ; jobs_seen : (Seen_key.t, Job_status.t) Hashtbl.t + ; reassignment_wait : int + } let init : reassignment_wait:int - -> frontier_broadcast_pipe:Inputs.Transition_frontier.t option - Pipe_lib.Broadcast_pipe.Reader.t + -> frontier_broadcast_pipe: + Inputs.Transition_frontier.t option + Pipe_lib.Broadcast_pipe.Reader.t -> logger:Logger.t -> t = fun ~reassignment_wait ~frontier_broadcast_pipe ~logger -> let t = - { available_jobs= [] - ; jobs_seen= Hashtbl.create (module Seen_key) - ; reassignment_wait } + { available_jobs = [] + ; jobs_seen = Hashtbl.create (module Seen_key) + ; reassignment_wait + } in Pipe_lib.Broadcast_pipe.Reader.iter frontier_broadcast_pipe ~f:(fun frontier_opt -> @@ -56,15 +59,13 @@ module Make (Inputs : Intf.Inputs_intf) = struct t.available_jobs <- [] | Some frontier -> Pipe_lib.Broadcast_pipe.Reader.iter - (Inputs.Transition_frontier.best_tip_pipe frontier) - ~f:(fun _ -> + (Inputs.Transition_frontier.best_tip_pipe frontier) ~f:(fun _ -> let best_tip_staged_ledger = Inputs.Transition_frontier.best_tip_staged_ledger frontier in let start_time = Time.now () in ( match - Inputs.Staged_ledger.all_work_pairs - best_tip_staged_ledger + Inputs.Staged_ledger.all_work_pairs best_tip_staged_ledger ~get_state: (Inputs.Transition_frontier.get_protocol_state frontier) @@ -72,7 +73,7 @@ module Make (Inputs : Intf.Inputs_intf) = struct | Error e -> [%log fatal] "Error occured when updating available work: $error" - ~metadata:[("error", Error_json.error_to_yojson e)] + ~metadata:[ ("error", Error_json.error_to_yojson e) ] | Ok new_available_jobs -> let end_time = Time.now () in [%log info] "Updating new available work took $time ms" @@ -80,33 +81,32 @@ module Make (Inputs : Intf.Inputs_intf) = struct [ ( "time" , `Float ( Time.diff end_time start_time - |> Time.Span.to_ms ) ) ] ; + |> Time.Span.to_ms ) ) + ] ; t.available_jobs <- new_available_jobs ) ; - Deferred.unit ) + Deferred.unit) |> Deferred.don't_wait_for ) ; - Deferred.unit ) + Deferred.unit) |> Deferred.don't_wait_for ; t let all_unseen_works t = List.filter t.available_jobs ~f:(fun js -> not - @@ Hashtbl.mem t.jobs_seen (One_or_two.map ~f:Work_spec.statement js) - ) + @@ Hashtbl.mem t.jobs_seen (One_or_two.map ~f:Work_spec.statement js)) let remove_old_assignments t ~logger = let now = Time.now () in Hashtbl.filteri_inplace t.jobs_seen ~f:(fun ~key:work ~data:status -> if - Job_status.is_old status ~now - ~reassignment_wait:t.reassignment_wait + Job_status.is_old status ~now ~reassignment_wait:t.reassignment_wait then ( [%log info] - ~metadata:[("work", Seen_key.to_yojson work)] + ~metadata:[ ("work", Seen_key.to_yojson work) ] "Waited too long to get work for $work. Ready to be reassigned" ; Mina_metrics.(Counter.inc_one Snark_work.snark_work_timed_out_rpc) ; false ) - else true ) + else true) let remove t x = Hashtbl.remove t.jobs_seen (One_or_two.map ~f:Work_spec.statement x) @@ -123,7 +123,7 @@ module Make (Inputs : Intf.Inputs_intf) = struct (Inputs.Snark_pool.get_completed_work snark_pool statements) ~f:(fun priced_proof -> let competing_fee = Inputs.Transaction_snark_work.fee priced_proof in - Fee.compare fee competing_fee < 0 ) + Fee.compare fee competing_fee < 0) module For_tests = struct let does_not_have_better_fee = does_not_have_better_fee @@ -134,11 +134,11 @@ module Make (Inputs : Intf.Inputs_intf) = struct ('a, 'b, 'c) Work_spec.t One_or_two.t list = List.filter jobs ~f:(fun job -> does_not_have_better_fee ~snark_pool ~fee - (One_or_two.map job ~f:Work_spec.statement) ) + (One_or_two.map job ~f:Work_spec.statement)) let all_pending_work ~snark_pool statements = List.filter statements ~f:(fun st -> - Option.is_none (Inputs.Snark_pool.get_completed_work snark_pool st) ) + Option.is_none (Inputs.Snark_pool.get_completed_work snark_pool st)) (*Seen/Unseen jobs that are not in the snark pool yet*) let pending_work_statements ~snark_pool ~fee_opt (state : State.t) = diff --git a/src/lib/work_selector/work_selector.ml b/src/lib/work_selector/work_selector.ml index c55cb390b22..b9e61be8169 100644 --- a/src/lib/work_selector/work_selector.ml +++ b/src/lib/work_selector/work_selector.ml @@ -4,15 +4,15 @@ module State = Lib.State module type Selection_method_intf = Intf.Selection_method_intf - with type snark_pool := Network_pool.Snark_pool.t - and type staged_ledger := Staged_ledger.t - and type work := - ( Transaction.t - , Transaction_witness.t - , Ledger_proof.t ) - Snark_work_lib.Work.Single.Spec.t - and type transition_frontier := Transition_frontier.t - and module State := State + with type snark_pool := Network_pool.Snark_pool.t + and type staged_ledger := Staged_ledger.t + and type work := + ( Transaction.t + , Transaction_witness.t + , Ledger_proof.t ) + Snark_work_lib.Work.Single.Spec.t + and type transition_frontier := Transition_frontier.t + and module State := State module Selection_methods = struct module Random = Random.Make (Inputs.Implementation_inputs) (Lib) diff --git a/src/lib/work_selector/work_selector.mli b/src/lib/work_selector/work_selector.mli index 537f61689ac..fc31678f1be 100644 --- a/src/lib/work_selector/work_selector.mli +++ b/src/lib/work_selector/work_selector.mli @@ -5,15 +5,15 @@ module State : module type Selection_method_intf = Intf.Selection_method_intf - with type snark_pool := Network_pool.Snark_pool.t - and type staged_ledger := Staged_ledger.t - and type work := - ( Transaction.t - , Transaction_witness.t - , Ledger_proof.t ) - Snark_work_lib.Work.Single.Spec.t - and type transition_frontier := Transition_frontier.t - and module State := State + with type snark_pool := Network_pool.Snark_pool.t + and type staged_ledger := Staged_ledger.t + and type work := + ( Transaction.t + , Transaction_witness.t + , Ledger_proof.t ) + Snark_work_lib.Work.Single.Spec.t + and type transition_frontier := Transition_frontier.t + and module State := State module Selection_methods : sig module Random : Selection_method_intf diff --git a/src/lib/zexe_backend/pasta/basic.ml b/src/lib/zexe_backend/pasta/basic.ml index a4468f8bd51..bf9e806dbdb 100644 --- a/src/lib/zexe_backend/pasta/basic.ml +++ b/src/lib/zexe_backend/pasta/basic.ml @@ -117,11 +117,11 @@ module Fq_poly_comm = Zexe_backend_common.Poly_comm.Make (struct module Backend = struct include Pasta_fq_urs.Poly_comm - let shifted ({shifted; _} : t) = shifted + let shifted ({ shifted; _ } : t) = shifted - let unshifted ({unshifted; _} : t) = unshifted + let unshifted ({ unshifted; _ } : t) = unshifted - let make unshifted shifted : t = {shifted; unshifted} + let make unshifted shifted : t = { shifted; unshifted } end end) @@ -132,10 +132,10 @@ module Fp_poly_comm = Zexe_backend_common.Poly_comm.Make (struct module Backend = struct include Pasta_fp_urs.Poly_comm - let shifted ({shifted; _} : t) = shifted + let shifted ({ shifted; _ } : t) = shifted - let unshifted ({unshifted; _} : t) = unshifted + let unshifted ({ unshifted; _ } : t) = unshifted - let make unshifted shifted : t = {shifted; unshifted} + let make unshifted shifted : t = { shifted; unshifted } end end) diff --git a/src/lib/zexe_backend/pasta/pallas_based_plonk.ml b/src/lib/zexe_backend/pasta/pallas_based_plonk.ml index 1c7fa4e65ef..9f5f571f837 100644 --- a/src/lib/zexe_backend/pasta/pallas_based_plonk.ml +++ b/src/lib/zexe_backend/pasta/pallas_based_plonk.ml @@ -36,7 +36,7 @@ module R1CS_constraint_system = let params = Sponge.Params.( map pasta_q ~f:(fun x -> - Field.of_bigint (Bigint256.of_decimal_string x) )) + Field.of_bigint (Bigint256.of_decimal_string x))) end) module Var = Var @@ -48,9 +48,10 @@ let lagrange : int -> Marlin_plonk_bindings.Pasta_fq_urs.Poly_comm.t array = Precomputed.Lagrange_precomputations.( pallas.(index_of_domain_log2 domain_log2)) ~f:(fun unshifted -> - { Poly_comm.unshifted= + { Poly_comm.unshifted = Array.map unshifted ~f:(fun c -> Or_infinity.Finite c) - ; shifted= None } ) ) + ; shifted = None + })) let with_lagrange f (vk : Verification_key.t) = f (lagrange vk.domain.log_size_of_group) vk @@ -95,7 +96,7 @@ module Proof = Plonk_dlog_proof.Make (struct let batch_verify = with_lagranges (fun lgrs vks ts -> - Async.In_thread.run (fun () -> batch_verify lgrs vks ts) ) + Async.In_thread.run (fun () -> batch_verify lgrs vks ts)) let create_aux ~f:create (pk : Keypair.t) primary auxiliary prev_chals prev_comms = @@ -120,15 +121,10 @@ module Proof = Plonk_dlog_proof.Make (struct let create_async (pk : Keypair.t) primary auxiliary prev_chals prev_comms = create_aux pk primary auxiliary prev_chals prev_comms - ~f:(fun pk - ~primary_input - ~auxiliary_input - ~prev_challenges - ~prev_sgs - -> + ~f:(fun pk ~primary_input ~auxiliary_input ~prev_challenges ~prev_sgs -> Async.In_thread.run (fun () -> create pk ~primary_input ~auxiliary_input ~prev_challenges - ~prev_sgs ) ) + ~prev_sgs)) let create (pk : Keypair.t) primary auxiliary prev_chals prev_comms = create_aux pk primary auxiliary prev_chals prev_comms ~f:create diff --git a/src/lib/zexe_backend/pasta/precomputed.ml b/src/lib/zexe_backend/pasta/precomputed.ml index a85ed284a9c..e3a1ca42971 100644 --- a/src/lib/zexe_backend/pasta/precomputed.ml +++ b/src/lib/zexe_backend/pasta/precomputed.ml @@ -11,8312 +11,9993 @@ module Lagrange_precomputations = struct "0x68fe06f08453cb5167c77c7420a9c361707aa89b4606f3ad395f757f2d55c33f" , f "0xa2f523775eb6dca1c2fd826093d50826f6d7eb22d789fbee229ccf0bbf097028" - ) |] + ) + |] ; [| ( f "0xc83ae32256e05b58f094d076ea03c6860994e93a688cb6e019e24f634772c21e" , f "0xfd30fb78571213375acb7cac4849efe5f92d9871e712c9e8115630a2ef562b07" - ) |] |] + ) + |] + |] ; [| [| ( f "0xf032c17cb4dd6a9f44a47b770cd0fcf61ddfa505347850fe63334f4c4a17f424" , f "0x688b7771db5056be624873c8a11ab2654f744e1c00e4591489c927d6da28d827" - ) |] + ) + |] ; [| ( f "0xf9cbe33076d0a142b38013f2cb68a593d6a1bbb5a69202a3a72ee419d3431b3f" , f "0xf5ff5decba2f35e39af103599bc06a01c130954d18a34caf2e049adf17f2f32d" - ) |] + ) + |] ; [| ( f "0x194a14c51dbc01fd44bc4e4a5b14582b9d04ac512914eecad61b942ce27d3115" , f "0x3487913110c027680b24d9c67b6cb872eed550e3c33fc5b6e98d7b99cafd6e3e" - ) |] + ) + |] ; [| ( f "0x2d17ed0359e0441513dc0dd047863d2f23df36cf6c7bed6a2d0b186faf13dd13" , f "0x7c15aebf1193c0865979161dbfacf522bf7acf8899d066e970c1110e7d13e139" - ) |] |] + ) + |] + |] ; [| [| ( f "0xde0203a36169c42b9ce95b12e111d86946c9f61ba86f900ec3baf8ca764af73c" , f "0x6028c284264392d0d7a40ba77c1cb3084be5f83db1866dea406f3da97a831602" - ) |] + ) + |] ; [| ( f "0x56706930b3a35da1ecf5d7d1483127a28474ead9c0f5dc5a68511c02c28b962f" , f "0x999d8fdac865ae6e480d7f9d9744e2e9a0df7ae1393e6dcbfb212dc34e1c6518" - ) |] + ) + |] ; [| ( f "0x32fceacc7c55581119a46bd024e5e2a989e8a45929e35b8dc112493ed01d0f17" , f "0x57862e2ee36211c6408f9e2fef7376b1de5be964cf4b92f9a6d778dbb8983425" - ) |] + ) + |] ; [| ( f "0x077420e790deb40b95d6c06b5e4480ed017802d35d9323105cd1d1460016a606" , f "0x3aff5a14cfb616b2b14c11c50a294b779cf94151557394d55bdb0c551094db2a" - ) |] + ) + |] ; [| ( f "0x09a059b899fad1864e5e6b6d4ca95c30a5f747f6054c296372444f3a941f6d2a" , f "0x3470da02ef4d58c95ca646e33f083fead62ebc65a646fe10b77b54c711b33604" - ) |] + ) + |] ; [| ( f "0x0d68f4d455e3808c76fab99be304318f89a4414bb4293a7bcf805b4b268a0a31" , f "0x498d939954dd7d64136f7e70edbf7124496d39a383afe844639067a0ffea9222" - ) |] + ) + |] ; [| ( f "0x88e3cafe8bd78d3a27d1b662f844f681b0470129ed5cd4ff5dfaa5144732e100" , f "0xb028956d9ba3eb37c0bea0117c7fe80d1d50b68fbbf526bb13fb28f2820d301d" - ) |] + ) + |] ; [| ( f "0x02aac42d5d810eb9c733f78bd5b29d2f4a07979ab28f430205c99d02555d3d0c" , f "0x1793397feca729c712caefe21302036eac784fceeae6b9ba70198cb2606b1a21" - ) |] |] + ) + |] + |] ; [| [| ( f "0x565e0543a20a0cbc18c93d592aa7b213fe2bc4f6a20011e45c232a6904288e11" , f "0x603a2927af4ccebb2891123768af4d4e9ad9a1be0cbf5b6019a9eeb06119651d" - ) |] + ) + |] ; [| ( f "0x7c7b5d210792ea1fc60c28aa18f9cfc7602e95d785d06a39de2805425dbc8b2c" , f "0xea1bf37545569d41d4964168c2b3cdfb46782b868607253da3f21a5b3d00df18" - ) |] + ) + |] ; [| ( f "0xdc3cf2170ccba8fa235eb756c8874e7aa7997e2f3228273c33e6c1987c6be03c" , f "0xfe3754f6144e6317d7b55791181a06530cabd1a2a486987872db1b986b0e3a21" - ) |] + ) + |] ; [| ( f "0xb87e5906dfed3a07f8eb3a2571ab8b70028869b833d19e715ec6776630415908" , f "0x11b6127d7c10fccfb812ff438eb728c495fc60b832796253cd8fa2803e0ac927" - ) |] + ) + |] ; [| ( f "0x807b2b6323b0f526cde00ba90359b33cadcb62dac3fa6c0bd1bae46460ed0d21" , f "0xf6f2b7cf37fe11e8c74c09c9161d119065c61112723a7aeae5fa4bf20605ba38" - ) |] + ) + |] ; [| ( f "0xa9ea76ae5d557557931113f2542232cf9092eed7f0fbcb34f7037c83fb80f82b" , f "0x3a08a652c22d72c01ae148474916ace234d7e87a3cc5c94f8083eea6da6c4133" - ) |] + ) + |] ; [| ( f "0xb339d00883e3387ca8b4cc69f074ce3681050d0a360b00fbe5d7275663847405" , f "0x2991323916a83d5079027624dfe5cbf68da690effca62ffa04aed02ab89d3022" - ) |] + ) + |] ; [| ( f "0xdbf6f88009af709b7ae83b41defdb06493d0d0c32462e8aebd2ce64dae39d603" , f "0x01f5b5249accac85b61b3b5f95136d2f2c1e6ec70d8fba79fab1ab2079130a34" - ) |] + ) + |] ; [| ( f "0x58527bcec72cad23a091391f89c4165d7f67d954ccddaa689e781e55fefd3712" , f "0xa4bec82d8f0b2951bbab5fb48d6b25b4f37ffb52c0b1328e9d7cc5ae836f9a2c" - ) |] + ) + |] ; [| ( f "0xe0c0be3a591cc828753cb543da99849fc9e9f56cd940c53cb9c8163ea068f436" , f "0x3a70fa4dac34c79bbf64e37a1acd6c9bfec39167cd57e605ec1ce904dfec1810" - ) |] + ) + |] ; [| ( f "0x5d7a1b037f1c725bd3e972ac833f34ad3c65ea942c710184bf7cc530c0abb606" , f "0x51ba2dff32aee89f512255e1df4b838c032669b71cef08e0d197c27fa6852305" - ) |] + ) + |] ; [| ( f "0x40028978f21bebf2e826ef2e72f6b4266f17d9c97825aea52a0e61635dd2d032" , f "0xb435e681667167e7796388ace993c8e3bae1bc66994bb55b15c4499c86626c0e" - ) |] + ) + |] ; [| ( f "0x6737a9b1d9c9b1fe8489677ed102526af05831b66f37b2f734badc8b9d8c260b" , f "0x01d26dd30a31091fa09a332c3d50f642035d1f21d88272c14be57632fc8d7621" - ) |] + ) + |] ; [| ( f "0x07b674c06230543ada5f2cbb9d1d903d5d12abc87f5e8a8e1a7014466bdf1b36" , f "0x2c1133f924547d12c52f29411d221068aa077915e781db87f330a44ef9c7e617" - ) |] + ) + |] ; [| ( f "0x105d993f341f11129e675aa58748c9a6e76469d18cd20dc22a92b20ea1ab7127" , f "0x621cc35db309159334e11cfa1aecfbd2ffc4cbe61fc89e4785f483a4fd9dac2b" - ) |] + ) + |] ; [| ( f "0xae07bade7cc53e17c444a989e1d2e85116e1c5b47842fd42d28b2cebe2857b05" , f "0x93908d043f8ee0f8968e6ec5735ea0eeccb576b4aa4faae910dd8970f03a0c11" - ) |] |] + ) + |] + |] ; [| [| ( f "0x17903827d4ec14fe84a97df7fc62f62ac16d55c69d33c11403f7ceb1370f7630" , f "0x1512e8b546448cd6f3b56e95ceede6e064624fc02c4ddbd3d34aca34a3c3e23b" - ) |] + ) + |] ; [| ( f "0xbe283bda0efd7c835a90e7cc525ac943d34bd3d56207968f4714a0cd28ebcb2a" , f "0xa31ef7048f2cfdde62518213d79bf3d3b6166800d1fe689306e25341111b2c12" - ) |] + ) + |] ; [| ( f "0x723e2224e526882537b7997c9ccfea7a3addca2f2025a2eb9b4eab3461de4518" , f "0x9d2f79b0004f96917703d671b9add1510f956729dbda2adcd992142f382d9235" - ) |] + ) + |] ; [| ( f "0x14f629d55c194af8e6b82061fa6cd68ab6de201aa5261bd88e769561e088d307" , f "0x2ca14e3c19877c1e01d519eb84957ce640bc956507abe9c2f4f24b019760b11f" - ) |] + ) + |] ; [| ( f "0x9ad96881d7bf4f3f064818b2064a6f337a5c063beaed18215ea436021d3c1c2a" , f "0x6349683ca78afabb400efb001a083b10885431621134409fd83f21ca81291c1b" - ) |] + ) + |] ; [| ( f "0x84c87e8a61121ba5bfb0ce4bd2e44fa120a09319eadc2b65d88ad0db68eb5f3b" , f "0x30c0bb85553f52cfa445fb5d3ad3990d56613d7e9fe084bda7b19bd89a798d00" - ) |] + ) + |] ; [| ( f "0xae13d78e22131d4cdd9e87cde8ca6935613ff7e5f977fb7c6d5af208da4aa214" , f "0x14b4f03c93bf95a557a7e2300a010151b966ec92a0f9e4bdc82603796647c127" - ) |] + ) + |] ; [| ( f "0x8b187c243ee78691bc70a9c3abd1985a45fa1d08667b3b37f335adf505829716" , f "0x93785a6f12a7b3009407956e06ee6490c05d93720ca82039bbc4679805135039" - ) |] + ) + |] ; [| ( f "0x625b98dc41705bc875183b1857196579c0e69dd2baf2272a94dcfb916576de21" , f "0x24aaeecd617426b1de9af4ef854c5e9b8890568d8c7af49d95392afab6842d29" - ) |] + ) + |] ; [| ( f "0xc16893aa9946f12843b7582007fad798eab5e486adc2d418aef4f2fbbf2c0a2f" , f "0x39765d4a0d9f589ba72310301e4766d2c365e7a9227ef326473e36d2a2116c36" - ) |] + ) + |] ; [| ( f "0x9510e236bc2788355cc1824094869726a79b9c33777de29bb49e00ef6b1bd63b" , f "0xbbf6c1b26d4c5c2481078302fe869e417523bbabbfbb940860aa0f360a21d807" - ) |] + ) + |] ; [| ( f "0xe1d161022890f368debb65bbbeb207431a0a4ca3c174f376682b80a44ef00f2a" , f "0x18cd6059c6fd52694a0ed18f91fe6a01cf937b2a52cc8f9d0d969d74d20e841d" - ) |] + ) + |] ; [| ( f "0xcf5a9b9b334f796742d4c144dfe8f008f0083a15a332fbebb6aac500dd133f19" , f "0x00a33d9c48828867a4831b9a976db4dbe41d79c778e581db27c2a3538f6b0d33" - ) |] + ) + |] ; [| ( f "0xd05d99db14fb7443b21189d63afe6461710cc6a767f2ab155efaaba262265609" , f "0xeeda853af9492e5e84e7078a1acbb41b27927479324df7e58c95097554bb972d" - ) |] + ) + |] ; [| ( f "0x26e233e3c7f01eb3dd5e91cfdc5bb93e40b1f73dba24dfadd7a7649ccf8f5926" , f "0xcdf00af8bb644cdeb5be0baf3c8249177851b7c2bb42898044fc804082a0313a" - ) |] + ) + |] ; [| ( f "0x93150fbe9d5bebe62e374da3fad000b1c95fa104e6643deb19966749b2903c26" , f "0x5bf3d29eba9d974bbbb64020e9db36f0aa918e70156983e3a959892498589624" - ) |] + ) + |] ; [| ( f "0x625c01ba09bf6854b0b624e4a47557a62003f4cd0273a5373e6a81cd2f158719" , f "0x8fd81e97153d29cdcc15883bf711ad2ead3805005abed02b2efe848ded9c9d29" - ) |] + ) + |] ; [| ( f "0x19e8a1e4b1319cbbad0793571221353e8d4b0c9f13b1010b7b891d2508558729" , f "0x72ca2b2caf85a2110e9d723253fefd93ce7166f04555b1bd08a6e1dcc2e00e05" - ) |] + ) + |] ; [| ( f "0x18278cab687af4dde97dc2a6e8a9b148e93be5aae6fe9e77b674b7de0ca41f30" , f "0xf6bdd241b159f9fc95d0435522f99729b6a29564db732237252c91830f11b53f" - ) |] + ) + |] ; [| ( f "0x4dfc5cc394b8fbb4877227add2d41389dbd1a9d4bba337430fcfece88781e939" , f "0x004b26c81de10e940f4d62866e313eb871816b2f57d19c40e0e5bd29f0ff6336" - ) |] + ) + |] ; [| ( f "0xbe80725310450c73f2ec73e42bb8db51ee68db04c39d56dc33ee06da1833f735" , f "0x6e6f5d58467894b52211dffbe3e064aae10066632bfbc169f92f506677b9c520" - ) |] + ) + |] ; [| ( f "0x3dace08140c15e6c60491ed6b6b3c4130bb5f2c3a3cc7b9d910c415f3e9b003e" , f "0x80330d96c260ab554f1093caeca4d275122861fb55bb80e2820a0084c3e8342e" - ) |] + ) + |] ; [| ( f "0xdc9d39035f290c1ba46bae7d4ad95f8ee593843d9dcb286cace9fb2f2d7cba11" , f "0xa60faa99fb415247a310cbd11624bae9a08fed55ec301a1d86404f04f688d321" - ) |] + ) + |] ; [| ( f "0x0551923d39f052f3ed612a55f68989c8dbba5e5c719b194f7db972336c58913c" , f "0xe63194e88dfdbccd03b026c3be9f7f0fc4e03a1398b6b068c2589ade25a6d615" - ) |] + ) + |] ; [| ( f "0x8d8e58cde746fb9a1e7193c80740c52bfc21323b70fc8ce65700e819a3687c05" , f "0xa5f7b0587577f534853beb570a3347c4fe263896e323884e1712a4c4d8a0af0b" - ) |] + ) + |] ; [| ( f "0x82881af366b38e06ecd62eb5d0c7ef3b1168932416f346fafd4a1f84d395e013" , f "0xcd77b0d39f5a46beacec9b5b42fd5347479b22a30eb5d682181bc798a28a6b0d" - ) |] + ) + |] ; [| ( f "0x597a507cb8b399d67bd2defa68aa0e9847ccb7599f2fbdaa392a92034cc98433" , f "0x4e7ed5be430e7c4ad96c18030656e73d8ea1befe1d3ae7ad2b40843d72f47804" - ) |] + ) + |] ; [| ( f "0x482a7266540c6f110bbb70624a873387fcfb093846191d6529597073914d2130" , f "0x686577012d8542374ddb844a0a3686b3faf5b511d3c1c18de1b50afd12c7d13e" - ) |] + ) + |] ; [| ( f "0xfdb1df2d2b6fbefab8c36473ea6bfb6ba749b846183c06705fc3c0cccd525a32" , f "0xd2c09130d8d40f92dc50c25b2b84efea526857b7d100735508f963da6a4c7a14" - ) |] + ) + |] ; [| ( f "0x23f7fd5ea99735eb718a945b0c710471399c9e97ada2a007a7f086534db6ae21" , f "0x17a4035ca9cefddf5912a3b42c7778f62aa750d56a7a88ead7b40cea20705c36" - ) |] + ) + |] ; [| ( f "0xeef047f5b1432490242046e69648f70dbd0e54888049ebc409c2a6d155fc8f13" , f "0x8ee9c9846b6cfb9f85d8d0bb470e11bd2235afa6ef2e76c2d24035de3bf66627" - ) |] + ) + |] ; [| ( f "0x8b66d51f5aa35037e162206b8223b0070a37f41a99a27faeea6831b6aa564c0a" , f "0x59bf12b984cc884130dc18b9aeaf6243f80d365e5a4f6987e901b3406b8ed53a" - ) |] |] + ) + |] + |] ; [| [| ( f "0x839ca3227e61f2380cc537d4d3278ae56d5675795895371e94fa9b0a84bc893c" , f "0x99f90c69428274d0661a6c117a95514378a6133cc43d99117929d1b2ee0e7a03" - ) |] + ) + |] ; [| ( f "0xe38173444547758de51d0318a769ca69c4fcb857d4643fe0104fd273ac731b31" , f "0x08e36707d1fa19504b8166531a906d152910e994c4dcee100de6af62a5155f3d" - ) |] + ) + |] ; [| ( f "0x34a648ea8d7186bed9f6d10a4055f65cc50c9d9f8e0e894f2cde380c16aafa17" , f "0xddcecbc50729d95b667d2dadb1801fa435d3358c636f79514bfb526522eaf92f" - ) |] + ) + |] ; [| ( f "0x8bddde66d739b1d1d8853f2b45630b212c1d6d5d669a89f1697bc6bdfcbdfe24" , f "0xf0163c0d88221013714f31b1cdbd5b058ac2e6d2430b6af495902a7a33e9cf09" - ) |] + ) + |] ; [| ( f "0x52fb450c5fb432199be4e2c1e6cfca60634dd7933276f3a85fe5bc2ed7da3601" , f "0x58894d8e3713d88d7f07434736c6d37a847d7b31cc154ef0b66b93f66c9d142f" - ) |] + ) + |] ; [| ( f "0x2c83b13d1de6b2482432d2aaf81105adff96cf90c7ebea976d32c0a26da70408" , f "0x7d0bdc7fbef92988d8482b5bde514fa6793ef1600320ffffe7ed04ba82bd7a0c" - ) |] + ) + |] ; [| ( f "0x86024a8d8d7beb57550f179f3f20c72c2dd92da378be7de13351b62bbbefa638" , f "0xe601d2da88891dbc22b5b6440e96a434ccbc0f0abac8a0abf2d69ea7fa325514" - ) |] + ) + |] ; [| ( f "0x7107bccf7e81698a1dd0190a14f185b7b9238098f41af306d7ea74d0a5cfd404" , f "0x3127c0a8fa092622fd5de56a79313e58e01e9875dd7bfa80f91da101cea9740e" - ) |] + ) + |] ; [| ( f "0xc017303e66fe477ed49e4f4104416d3ee57aa14a427b48a4fae5116734a4481f" , f "0xeee411c1e77a55b3a93acaaa49016408b7a75ff9f2bc156bf1a56800a54e930f" - ) |] + ) + |] ; [| ( f "0x134f22ce563aa46f88b94645f0b5fdb5545d05a73490357fdb7f6d8ee0f9cd1a" , f "0x6f0a70889e899466ad2c43ac07c334461f391dafcef0a6561205fe0a98c61105" - ) |] + ) + |] ; [| ( f "0x466f64d26d269753c9d33e89402dbba92db28f917407ef2e2760bb4144aa3a26" , f "0x44c616efd4d10a0db0d8954d3da370d3e3e0c5b5a97f1859bc7e099c1bc5af14" - ) |] + ) + |] ; [| ( f "0x4d0ad2f63a2e10c1791e27c7fd6c280af2586409540bdc5b64189bbdcc91fb00" , f "0x8fe68aacd2968f64806dd8035f0decb33e72a409cde915501dc24929a226eb02" - ) |] + ) + |] ; [| ( f "0x3f7137242e834d8f8529c73bff224cdcfc391d306eb36c1615a98db48a65f62d" , f "0x1d7fd1b44d885a5935c0135e61e155522baf6565ad001786a34fe87f95101237" - ) |] + ) + |] ; [| ( f "0x1d477d977b0827a93f04d7865204985e179f9e322d94b44557b9e4f0d9422c22" , f "0x0cca8af309694d4a9f4b39a44134b7a623bc4dcb90f6aaa92b1fa4be9e331829" - ) |] + ) + |] ; [| ( f "0x5af25cbc15580cedb3c48d292121729861bccb9e1191c903128a16cb96a32436" , f "0x341005e7f64d10e8d64939ed1bd50e0bf094138c381174d1ff570e736d69d73f" - ) |] + ) + |] ; [| ( f "0xb47c73871c45f0adbb19fddf64206bbd5da3a8efbae97e2705fe0a20c4b7fd15" , f "0xb5e67d6e03f488caafd17397fb351c451bc0b44fae50ca13b2d7c8821edf7916" - ) |] + ) + |] ; [| ( f "0x6f053953344f79d5962a77303213d5405e76cb4cd64cf0f7fcd8156f9a858c16" , f "0xf59e66c7fd3245850f433052643fcebd1e8760edd7de48ff3b4fdc81dadedb12" - ) |] + ) + |] ; [| ( f "0x2d041241e1f6e1a6e5d7b39a470551b5c826a413b4e267093aada6a291fcbc29" , f "0x1ed4d14ec9c66e227db42ac277880d389862f27ca2faf0cae4a7f7038d8a830b" - ) |] + ) + |] ; [| ( f "0x212cd4c12f15986088741fbcadb02b261d2feb4aa074ea00deb4514b65597010" , f "0x773fd0493eba9b8c34ffd06f225a9fe44a976b1449685b5a932e9a4692cf0720" - ) |] + ) + |] ; [| ( f "0x56ad7bbd3c5087537369a32979ec898b573a89ceff1cdcaf11dd4fc5a7000e32" , f "0xfd4b8efa3c3417e612d21475647affc8088fe518ab4e47cdb918ef2e9f6d5118" - ) |] + ) + |] ; [| ( f "0xd8129ca68f0b9a1f0acbd3f229840c394b6495aea13b1850728acf901abb5233" , f "0xf1fa0a97143b5cd8939df1653fd82dbdd9846ada3f72c24c39c84a7d0e450609" - ) |] + ) + |] ; [| ( f "0xcb7f42a390a1263d1e0672e9887f436b653630e0c544ee26f23dd670b2b18020" , f "0x9146baf4817974bb0918e9bd3496417fadb7d73e49a7a01e522e0bf401bad934" - ) |] + ) + |] ; [| ( f "0xdc7c0e4d42da58de8282c9431eb3b045d237dba1918febea58f91367bdb4721b" , f "0xd62a3c1d38ae2f450ea0f827d9166940e34eb64bbfe2b37369d4de359a00363b" - ) |] + ) + |] ; [| ( f "0xbd33266864922871046607dfcab966b73dfdab617e22aaf36112fc04f6d7d43a" , f "0x7fe44e3d5af6a59163175b36f4234132e78fccaba576749172f44d486aa7872c" - ) |] + ) + |] ; [| ( f "0x6adfe6ddf47478116eabb04b3400b470fb93ca294896d8643cdfd06b8a9d0a3e" , f "0xe63f21fd3a7055b2521632091f95a22ac5b2450e35969b2c452e9ea462ed1d31" - ) |] + ) + |] ; [| ( f "0x2b98d9297e5cc14d55cdf202d4ba78c142067d0634795ecd8fde0b6844334c3e" , f "0x979de37ca3adc4274a9828bfc25a5babfd76045106bcde2b594adcb9c7bb922a" - ) |] + ) + |] ; [| ( f "0xacfd47445ae77d59fcc28fbef066f3c6307a46b98296e5a1cb4d8c521091420f" , f "0xfe2d126b54a2d602d8489f7175e7097c2c1d3e4ae732a68dd868db6779841308" - ) |] + ) + |] ; [| ( f "0x1bb9c909eba0d402efb7d50391d7d6da33f55af4b9740a3d7a7440807af2b23a" , f "0xbcbefa85c4c8eac1ae32af2453ad9123ccb7510b920de632721cd1886912ee11" - ) |] + ) + |] ; [| ( f "0x394bd1c6e531a5bd7b0800fc03286bfa9926a2439e964b07e279238107dbcb20" , f "0xb47b93f957598a09e188593eaf9690f984304db1239b8e42ebb845dced65a415" - ) |] + ) + |] ; [| ( f "0xc5edb523517ab5efaf184e3c777b9e2262b5b23a85c86bdb491241477367cf23" , f "0x2b154e1f2f52251dbc39b0784b3cfa41a6b811a657a058eb1db54e3b57cfa203" - ) |] + ) + |] ; [| ( f "0x495f58a3b02c8f11205ce4f344c0bd32c975d239831f1ab33092a365ab4b840a" , f "0xb532c5d53b1447ccd38f95e8e09a702d0f3013561dccba43528f0ae4a69e4e15" - ) |] + ) + |] ; [| ( f "0x7f5b11337ed57c6cbfd1af378e1bfb622258c4e41e554e13963489cb1524070d" , f "0x7aad4a66c068936abc2a24cc2c73c63009ef20c2d678932343fb7774b5e50438" - ) |] + ) + |] ; [| ( f "0x9caa36c654655917cede9b6400efafc8cda815281efba8ca64bba581184d7b1e" , f "0xaff921c23b4bcb12b24474a35d532ff31324d833aeb958507ed3419b3ec57c17" - ) |] + ) + |] ; [| ( f "0xf29febf72b278e3f2fb39dfcb5a2f5a5d06680c9b0716298427cbd162133433f" , f "0x92206894e898fe0cb21631babfa7f9951b2faa569028ddf8e4bc35db6b7c820c" - ) |] + ) + |] ; [| ( f "0x4a4e4d768b3d76907be5e1c6de4e20118bc62db435b15ca5805d5195368bca28" , f "0xf1318a519083f0d4f4fa8568f538a0a11536f40c0b32e0687ae5bb7e0fe56623" - ) |] + ) + |] ; [| ( f "0xcc3da02d538193d2e7813b46663c329c849f29ba1d7bf0873eaafd4a8c91ba28" , f "0x824f281126827e6bf3b53f853cf2f195f8d1aa377d2bafcd527e87f845e9c521" - ) |] + ) + |] ; [| ( f "0xeab7f480ba4f5f78762b04dd218db518d57c665cb4dbdf44f2369db509a2cf3f" , f "0xe08623faea470850fdb87dcba18483efcf224752163f772e9892bf59c563aa1d" - ) |] + ) + |] ; [| ( f "0xe7b141613fa8bf9af7898e2e11f92f4ebc99a0b589f401b4aac51b5256d0521c" , f "0xec7a56a00ccd3d7725fd9b72d47f806fedc13581221b2dba7138f74f9f137539" - ) |] + ) + |] ; [| ( f "0x4c40b46e94cf579a21b958fb916e18c6f619aa6fe5c9f8e498078221916bca39" , f "0x09f4154b92318a4dcfd5438437c5aef33e3e10884584fc97e48a5892e76df300" - ) |] + ) + |] ; [| ( f "0x7244ea190ffff38194435a29dd726f8a20ae74dd0cf13bcb6cb098c0ba766236" , f "0x5ec99d4d1defd67d0e97f1dee3106d214b5796b6f4b063b9c4c182cc88246805" - ) |] + ) + |] ; [| ( f "0x33d6dba491ba54db9af9f6fef295dedcbb52e8cc8d17ef7e6213a28987696e3e" , f "0xcaa12d0174c8b6d9e9d1cb530b90d8805548b496ffd22ac415f5c0a688cc1b3d" - ) |] + ) + |] ; [| ( f "0xc9e468b06e34e83b75789184844a4fe412156ab9828b5c3d6d4205c54aa9fd06" , f "0x10b41717561ea7da3a27a33617828929027625e2ac045910f81141a54adbb11e" - ) |] + ) + |] ; [| ( f "0xb43b219f255c447e49e8cc8af284827db93c1be5071fdfd9f1a8a81ee2cb0d34" , f "0x406790a2e500dfae9e10a68772d63f45d23b5219f6c55c501d497335cfaba02d" - ) |] + ) + |] ; [| ( f "0x91966fdcb5f36472c6634615398b9b839f19b56353551ac7f9303571a662f41a" , f "0xd8bd324a0d889350cdf7fe9b6d74fdf7e422bc568ed2d0472fbe0ae1303b9e20" - ) |] + ) + |] ; [| ( f "0x9baa1d52a4ee951594e122a927f12e6862ceb27b2bc15d69f3cee8064a715a0a" , f "0xe6bbf4cba5a7f490bb8fc48e9c99b877ad4685b1b5216aad93c5a226f0493508" - ) |] + ) + |] ; [| ( f "0x16c675a351d2ac45011ec5f15b394191df895eddfe00a66710eb248caf8a9d31" , f "0x85bddc607fe223908c4c987e63b9aaf087b376c7f58236a945e2b7e3bcb23112" - ) |] + ) + |] ; [| ( f "0x5f3e9c3180fcd2ec4d9a119aa405e9998ca789fa18268a01c4b58e9583ea3928" , f "0x0a915e1aae046134150b48eb9f0071e52e0170706843ba4495698bd30e13741d" - ) |] + ) + |] ; [| ( f "0x5f098a835613f3d34efb448dcec42ba09a70264aa323a7f7ee1b4f06514c9124" , f "0xf145ca1147b2bcf1bca789dab8414cb461228e26e313f9c6b877479bec372115" - ) |] + ) + |] ; [| ( f "0x7cb90836f6254b569a07fab9932b85c6a5e4885bb1357d54196c739acc581b2e" , f "0x2cd534cf6933746c9c2c2ad5f3dd82e6cd9189c53d04cae359f7331e88be7d26" - ) |] + ) + |] ; [| ( f "0x1d7e5fef16bdeee9b28548a3fd6eb379bc256cb9eafacb6763e2fc9a9e469719" , f "0xcd6afe11d24e4a178f7206c31c071cf4a7fcee5ee2f77c79ac4158c2f7a33815" - ) |] + ) + |] ; [| ( f "0xcda6393e81e3a173d4b8a68e6251201ff0edd6b9ecff25f919e1c1d0f750be32" , f "0x1556a3e55ec3ffd756d8c6c468dc1cb25e7e4e76dfd4bdc77999063d0c811928" - ) |] + ) + |] ; [| ( f "0x0debec8453522a6da5e4b5638d63140c90e54e2cd4a70857ceda0b98a8d5292c" , f "0xb3c8b03d8f4ffd4df088c59cbb3f7bc3bface1f61cd6b0c83e0c5215687a0914" - ) |] + ) + |] ; [| ( f "0x2f2952a153c437c5235a18a7660faf09a27d4f3ef3313b07bd852a89db03de02" , f "0x0e8e4487e7e6e97ff831dd7b1843790a0d4964b83ac285262ea1449abbd9313a" - ) |] + ) + |] ; [| ( f "0x5d69cafc57a63d3dd2bf17cfb72daea624cba92a09d7ca2c7d74f07f189be337" , f "0x83cc67a7c680670eb05bd41e0ca23d62e55bbab410ce789e7c45cb7c100fe82c" - ) |] + ) + |] ; [| ( f "0x147d4a14771c8d15ba2405d35d5499f9974fe8097adf884b054858cba93f593b" , f "0x0995377d4640b7771a5b429cd77bd92ef02561c38b23fc5dc7a75d3f097e0a0f" - ) |] + ) + |] ; [| ( f "0x1dd51a6712319f9e7a17bf2f961fa0db316d6750be96cb4850bcac86f4386305" , f "0xe35e8f4753fee006818631ed3ee6cc89efd76fd1dc0947f82adf824960c68e0c" - ) |] + ) + |] ; [| ( f "0x06fbccba4306b17219bf163172768970907df1b00380c61147fd56a2a4533e12" , f "0xceaa4b95bd469acccb2c9ef757ab4727947b5149e696739fdb9907d8c976f123" - ) |] + ) + |] ; [| ( f "0xf82244a24a6fd4e62445cda74d21c27b87e7fd4701e390f00995d02b19cf3732" , f "0x3bcbdf9d54c00612d3d17dafaf70147d5d2b8e3f71f4ddc412992ac519d28904" - ) |] + ) + |] ; [| ( f "0x4aeda891a092f02573cab02316213899e121c606f875f8353a0bc8deb99e0219" , f "0x88b6dde4cee83094c1f73b8347113d291bfd6188b7aba29bb3bf0c22a46fec38" - ) |] + ) + |] ; [| ( f "0xa92be18d4f028871dd347150a8f148e9f009a310de55eb3c72e1859b0b073c2b" , f "0xcf0f05fb82e0ae517b5ce7de49ceb8b267f62b66514f1a848aa2ec3ff792293f" - ) |] + ) + |] ; [| ( f "0x4db2b315b3c17e2ab977c80bbd1cb5388e9dfdaf915ed8a0cdb7210c9a4b7d22" , f "0x8c9b807e848222c235420cf3d0dd6a538821d05ae29bf654b659b6954015e739" - ) |] + ) + |] ; [| ( f "0xe71a53f2293d55d74a9a764387136ca59a4818fc0e750810c050a0873b05c820" , f "0xfeb459e095968743bb2c3a586c6e4f0fd63335b9169968c21edbb7779883462f" - ) |] + ) + |] ; [| ( f "0x2430eaf337f317e7f4494935850e36ce3fef1197b3d634059fc48215f33d5321" , f "0x8a8e501ae2d9f4f5d6e6f02d9e673bea418938bbb4bca341aeeb8125088d4f06" - ) |] + ) + |] ; [| ( f "0xd545bcbe0534d111d50de9df2d33a59a2ba8dfedb9d3ca00de5818436e8d8f31" , f "0x0614c9e609240aad75bc4f14febd2bdb80e46c323dcf878efa462ff47580d41c" - ) |] |] + ) + |] + |] ; [| [| ( f "0xee05c01dfa0f712509daa79dfd46012c110a62fb454ed78c59cb0705e6481838" , f "0x0ccb4e7a0d188a44c149c38bacd750e9a7dd69131c553062e3dc189dca55c035" - ) |] + ) + |] ; [| ( f "0x17661beb3cf47c49ad70987e72dceeb678d0a0ad535bf4e6ae169ce9dae6600b" , f "0x0b5a4b03ee73f4ae7e6fbc2a5f1aeb08c89ff268ca956ea60520665f3d6cab15" - ) |] + ) + |] ; [| ( f "0x09ce4375b6a901551e0afa00eeadddcf6e56033e9a8065445f8e30c602065f39" , f "0x68149babd3b43ba85c87c14620cad1925321a2109ae9c5f1bcd577295632b409" - ) |] + ) + |] ; [| ( f "0x929193a52608f0812558dd93fcd6daaf23dbdb6c67a59bd54e4a9467488f2b1c" , f "0xff1d97cf1598a6dd3a153580db05476b8be93a29a77894013ec69dd21a4d9b10" - ) |] + ) + |] ; [| ( f "0x1603708ac7dfa8041746a8d3bcdbf861707da7d204141285ff27042af8df6817" , f "0xe399a6ae4c06d2c5ff9d9746cb792c048ceb3a4a461b21cae4c13ce776e8f318" - ) |] + ) + |] ; [| ( f "0x17cefc23a75dc455cb0d1c14892ffacafef67d12e3aca8f7a1c24e8a5073f520" , f "0x76e2bb792b73c84a06fa956dd8660723edc93b8c21ecf1df9b7ece473a478004" - ) |] + ) + |] ; [| ( f "0xfc726d9bd627d5c48ed3fe5ba5d1d7f840273f1b56b288c6c6dd7ac036daea04" , f "0x61e3e59ec3ad0317e6c5efb8f5d424baa216d54aaa7d043fa15be94aa255431f" - ) |] + ) + |] ; [| ( f "0x2478f5ac231c284c82d39ad16bf191768bf7025732bac9bfbe60d31bc13f4717" , f "0xbc4a834899e651f7ce6489c969cd3a7467aa08d923c52d4c8b82d2951b2fa70d" - ) |] + ) + |] ; [| ( f "0x6b1c37393c018d784cc8f370e570b9a1b5e9f429e2494102bb87eda915e8aa28" , f "0x12903d2a15a6ce1549814a2c591e34ff8810dd232db55df1db97fcdb54ea012d" - ) |] + ) + |] ; [| ( f "0x91952492771326697382d572003748d0b306b594805d92b84c8ab1493a30041b" , f "0x67653ec9b41d456caeac66d73981723b478c001c051559c7e5f556a8e2a0e62c" - ) |] + ) + |] ; [| ( f "0x976dc0a027d1da57630dc4605e2fa1a1498f38840d3b32f86c966e983e2d6c36" , f "0xd49cf2d9c835e5194b7ec9593102a9fa0b8403184c77f725105af8bdbfc17e37" - ) |] + ) + |] ; [| ( f "0x910830d3f6ff69c99af0a72e28ec52a10ff90a73e137d3dfe23b2b0b8654ad10" , f "0x753518ff556418cd4dbfde96fd092f0e957b0dbda8bb761e779dfd4d26c3782f" - ) |] + ) + |] ; [| ( f "0xad6d2889714b9722240e85836a1bb9d7a0f9e6f2e780a3871f4590a139e0db3b" , f "0xab7c5ccb8eaac1d8be493dd983c3a8caa9fddab31e320e8f01d287f269e4280e" - ) |] + ) + |] ; [| ( f "0x373990ec9464b0879b157a03e66f87c262f8368e158c05549e0108670e419a23" , f "0x01c875d286bf78b310131005ff17ea0d69c1445e42523efeb0f0f64fc3851022" - ) |] + ) + |] ; [| ( f "0x8767d664fd54b766ed514123673488ce5928ae1e90beff48e165ebb1d2ead302" , f "0x1bc916e8d7ba8e1855c8dab0f3528b987078bbc1f8ea35a5d0ad76ebc13b311d" - ) |] + ) + |] ; [| ( f "0xe0fb20ee59c97f9b513c4475d17f5215caf0be5d9a9e6ee5517db79ca5a0c01a" , f "0xde75f37e44350fccb264b3af3dcc2702c580b7ca8ea1f157f12b146dce05ac34" - ) |] + ) + |] ; [| ( f "0x3e16660017e355e0f97a8c7950da2e1443507a64ce74f046e40ee71bf5ba7d08" , f "0x7e02dfd8756112f73576b8229ab35171ce80ab34befce17a8972b959f69e8507" - ) |] + ) + |] ; [| ( f "0x9fafdba52ee3b9c2572a6656d677714e2e2352b8079e4b8b18c7a2793c8b3e2a" , f "0x8b134fc6dc91ccae3e052d6d5cc8b4d0b350150b7d38eba6647c4dea602e3210" - ) |] + ) + |] ; [| ( f "0x4adeb8482b9fa2fc7f91ef3669970e3188c62470fd01a0de650f85fd85298336" , f "0xd22225c2d92134c4c542610c955b2ad7c9e2f9c59790f7bae2a9fd3146246931" - ) |] + ) + |] ; [| ( f "0x0f410d4bfe7d10ebd53f5116fed5afce3ed186b60a1d94f53d6a3dee1db8480a" , f "0x5f2030a031237d4713163a1228f5299b4ba5fa52084271a0147c0e56b7f9702e" - ) |] + ) + |] ; [| ( f "0x200bf26c4f98c84912f8d6e3bbf42eec0e1d0e90313ec8dd418c2fb901ed7b38" , f "0xdb0069cc5a0b5b1885a27fc30b48bf167b766b907f673c26f3c613879a77af26" - ) |] + ) + |] ; [| ( f "0x0b8bcc9e314ad7e652169a45b2518086d89245bfee1607a611d10dc1c07b6c28" , f "0x89e853d09181868f063680f3b181bbd4a3a5e380154694a4a275e99d17227d3a" - ) |] + ) + |] ; [| ( f "0x9f6cdcc702e8dd20cd87aa806b153f38842b8da13a7c28fafbc2fcc360da3331" , f "0xa74c3d0ad201fe422ffe3c228f7cfc383582334a7ac8aa7e5bc9a6e949d3bf13" - ) |] + ) + |] ; [| ( f "0xa74198334044faaaa2c10ec6f1edddca4304ffbdaef2919a0336a0167a7c433b" , f "0x55c93981bcbed22c52ae1e5cebc94c384f910e1ed9531a187f0b120522210f02" - ) |] + ) + |] ; [| ( f "0xa63c05db89a8a093c7c0b78d0ae0cb43d5e9df98bab9b2f630e1847641c18505" , f "0x06e9f359f46177a97a0902672629165d699f2a697ceff5279f47c9d794c9d712" - ) |] + ) + |] ; [| ( f "0xbbb26c65d69b3e3318cd5f762cc27e38aa81e8c523b043adbf74b15d90633a30" , f "0x5afafe982912ef2692605457792a89879a042ade8655560c9b015dcc1df02500" - ) |] + ) + |] ; [| ( f "0x5918e1775fd59b7ed60b1cb964203a45ff9dc827256f5bbb0e5ac314cbbb212b" , f "0x46095298ca5747706c278a9cb169cf9e40bfa3bff6901eb7a6803f716fef0210" - ) |] + ) + |] ; [| ( f "0x26a5dca332f6607ec3f909b022b6f83d4bcf891b6f119909b1d174fe4c06ce27" , f "0xf5b7b895a345073cd58cdd21ca4a801f8f995ed4212ea397146822d14babbc1f" - ) |] + ) + |] ; [| ( f "0xbfd31e628e386e571e98e052f7ff0ae3b8f8825180bfbee311d17ec6f9f1902a" , f "0x11484cf9bbb5604b262a3c651ad9d89a2386c0434ff411c6b7c735bbc438de18" - ) |] + ) + |] ; [| ( f "0x00cb2fa7b84e8ad64a6d2fe1d4aba9d09bbc1b9059d4e6cc39b43600ef5ea421" , f "0x6de23e51a7a00baedf11ecd575e3461bdc15ac56d900b04e3a9e45c05863c90b" - ) |] + ) + |] ; [| ( f "0xcb970ba357aa0deb91342f1c26b982c2474382b1324a2592288fd862c5e44231" , f "0x00cf2ef293fc848150c58a45dba941aa56c082cfb2ff9a70bd6c44ac71669234" - ) |] + ) + |] ; [| ( f "0xadfbe021e7e229f8c629d548bf6122908d4f8fb7b40b2c8d97815e0b0bf49320" , f "0xe9c0d2d4f58d2a2f2aa1bfda313084fcb379ba59b90f79cdb7a28ff32955be24" - ) |] + ) + |] ; [| ( f "0x06fea073311a857d39b4b200ab43956494af18c23dde230701a803adb067aa04" , f "0xefe1ab64d570ad1e57c19f0c56eade05101a66ff0ed39225a3cd0d5efefbbd0d" - ) |] + ) + |] ; [| ( f "0x001d8e217cedce834809a27181d6cf9152846588da6be731fbfab5e00474f722" , f "0xd6083d47a9833cd32cec1e6bb54f633face88ad00e8cb21e7dfbdb0849cbf63e" - ) |] + ) + |] ; [| ( f "0x2aa63077f7ffa4fa5d243626d65e77e33cc7dc52e6ca3a9efbd96cf2385d0c02" , f "0x1b760aab355f0b658d6b70ac53921296749fedcf4816f0335284b3009fb30e38" - ) |] + ) + |] ; [| ( f "0x19970512bdba4b9dca53d133d1d03063fe6ff92a7cbc37dca336c2c5c0968113" , f "0x5c56b8ad431ef48bd9d9a0ceab6ae9e3e432ce4a8cf7eaa65e1130663e70b53c" - ) |] + ) + |] ; [| ( f "0xb15ad7c5637a2ba9b3a6e18227c56fb2dc13a5ad7a133cb2eb0c93eb4f357403" , f "0x916f4912335a91cc277719db18d5f9aeea5698dd798d3a99bbfdc93cffd3102d" - ) |] + ) + |] ; [| ( f "0xc2956e6763e2d06454bdb5e7e60ddae59526ec493396820b73829daff159b115" , f "0xe779a99abaff29447bb68783aa45e8ea95069e6a540f214a9642f03c4cae2006" - ) |] + ) + |] ; [| ( f "0x4227a470ce635bf4e96c5c3514518db9777e563ac01e2982bedc6eefb00ffc01" , f "0x99ecbab6e6efd2ef5936d34c94b27fb2e825753dec75d000c8d5fb79a3ff4816" - ) |] + ) + |] ; [| ( f "0xf00bf6e2d7756dcedd5a515166832984b7c623c5d32ef5aaea127ed8af87a138" , f "0x6a1c9472a74b5fde61407d0498147a01488c3c22c3ec503aed1dd96d21125d1e" - ) |] + ) + |] ; [| ( f "0x00344742df68dd614da40f3278646776e57ffd00c0cfe8dbbed1a60d2ba47a31" , f "0xb2f1abcc1c9bab933ea2cf7515316d6cfda91f606ac2b6c5a3c14ecd3e7ccc0a" - ) |] + ) + |] ; [| ( f "0x765dfd01d39856c7663a24289b113d852df290b3ae0b560529635572bd4e6521" , f "0x0b0b41ce3756018aee7ade3ac109373d3ad50361d7826bf4f85ac72b0e52083e" - ) |] + ) + |] ; [| ( f "0xf4608ceb9a5282200cc179e988957364fbdf0bb5f01a4e2149d437c662459411" , f "0x56bc72e215a4d6664b5df94157246de1d3028b483e71e772b18ef00f04c5342c" - ) |] + ) + |] ; [| ( f "0xf7835bc83300ff003bf3906a8045478b9eff369892c5f7b8ffb4c051c8ffb501" , f "0x7f865ccedd80ce2406d16551fb5135c1b2456a3bc445c59334c7cd35d68f6b18" - ) |] + ) + |] ; [| ( f "0x649da92eb8718c2bedee97796ea5510ef08784a8e1c85efa5be01bef25e4f311" , f "0xe15db9001c3d52e343ca16dd28437cbb5d6e4b023ea716c67bffe604086abe33" - ) |] + ) + |] ; [| ( f "0x1969c4ffdccfeaf4b4071674f4158fd99cf25b7544fd1d08dbf64fd9e341692a" , f "0xe54e5818dc788490a273322bd1c839c385f686c5de3dc2cb1f797476f23d6d30" - ) |] + ) + |] ; [| ( f "0x979e80e2cf4549c37224c983115788017e4834c261091d21e8145e423bfad329" , f "0x64ee9c28bc875ab9222ee3365233dc2e97d2328626eeaa004c92c3d81e95fd33" - ) |] + ) + |] ; [| ( f "0xae8185b9aaa3cf69d7de97268c9b780264749b8c2e38935088ddf7fad5ae751b" , f "0x95fe50731cdb25cfdc6a4e4f9ecfeb5e0cbf82c8f531dc48a5a5b0ffc1eb1b18" - ) |] + ) + |] ; [| ( f "0xffeeaa3d1a96ad7daeb85f09d5e24f87c215c6a21a0c6e347cfa4383ed406e11" , f "0xff6f8352e3b79d69760fd1686fddeb7fbb27cb7923fa66f6acd9fc44abcdab21" - ) |] + ) + |] ; [| ( f "0x5fd7993817e6223cd6f553a0167cd3511234b84b55cb44e74128b30192e95a31" , f "0x925ac1cb584db0443bf293f34920a794c9dc9b6d17993c177c4c0903b7a30321" - ) |] + ) + |] ; [| ( f "0xdd48f899077d7386e7a00c0c03234c19a1bc633d5acedd6500fabd6e37a4e707" , f "0xf805c617953d925decd0d37bedfb9d3a9d39f8fca64518c103b7156b5e9f141e" - ) |] + ) + |] ; [| ( f "0x15438261281a22e7b3ba28d3e8d2411eb04ae294c7e8df7ad127e4b8e1fc4209" , f "0x1dad72e8e9a9a27ddbd7e3462240ab0cb17682128ccaeed3030c4f7d4dedba01" - ) |] + ) + |] ; [| ( f "0xd15842b5506cfd9d417116cb0ee61a7d650fdc55167bfcbc509ce45bc769813f" , f "0x0d7c21f07b1163617719162b1feb9400bdd05edf8e264d69a4363ab9d008e102" - ) |] + ) + |] ; [| ( f "0x06b2a5245cb5936301a43283940186359f6646d2ad6c5a099444a2f44e524e33" , f "0xef4db07882d839b96cd81e3f36c4ac16be888c0fd4a741775d04a32f80c0a13e" - ) |] + ) + |] ; [| ( f "0x29f5e4c119a916eff372f90b064b61afa7be167715b59df5dfcf8806aac07735" , f "0xaf8ab323bdd5102b13ab2ea2da263404b2a82857a776eacef90bf011af12e212" - ) |] + ) + |] ; [| ( f "0xf3c71fb1d5a7624aa39c98a21e2df67e2042348f14ce8af994627b72a638fb05" , f "0xfb66a61fd40eedc257563a6d2e869fbfb6b703f7f68320711a33292668925b3a" - ) |] + ) + |] ; [| ( f "0x135f7f4c068da2369f69d709b4378d17eaa3b20cd1fd9de72f5c51fe695a1b2c" , f "0x93f34cc12967856810366201a4861a1154e3ef821de64567d0a7d76d4ec96e0a" - ) |] + ) + |] ; [| ( f "0x626f69d49220962ca343dd2ecc4f2e3f3c8a6fe767071a31b2f52bc47c92523d" , f "0xd1b5a459c909feb606aeedeeaffa5ecea8526937f0342934b8f6ab1f7f0c2c2a" - ) |] + ) + |] ; [| ( f "0x0db2f0a342ab071c6e30ae30005f0636457dce8ac37cf85627a5d4e9f63b103a" , f "0xe9ec0613f2b5bba875e61a5fe0852d943ce0ae1e0977ecb989dd360d11cd5c31" - ) |] + ) + |] ; [| ( f "0x02d3f685e8c9194412752340ed98c633eb4f98bcbdc412b93369d2a89193ef3f" , f "0x328f08e6bf8372968a133609554d6bc487790f4528247bd16d887f724e065f06" - ) |] + ) + |] ; [| ( f "0x5d47e5c690de315a946e717817b37abebf5569ea214f0e0659d749b53328f41a" , f "0xb4a34f7aedb3cdb0a000e6016d7c9b4e36b22cdcc79d79225ad31af638514119" - ) |] + ) + |] ; [| ( f "0xd2f77dfe179139f7b63e5d592bd4ac83599d618633f0c7179c1eaae268c94e1a" , f "0xd77e342f283bf12435d1099334a883e459cb8ae73816ea7424269eb5f6929f29" - ) |] + ) + |] ; [| ( f "0xf8c13ad6589dd2f319338fb3fa0d2dead81169aaed09db29ebaa1ca2182ce426" , f "0x4f85a43e6ceb08d711229965829cd3b1822d47bb8027f9a2b4836335b1d82e3a" - ) |] + ) + |] ; [| ( f "0x60459719b81cc94d4eaa6fc6632e0f8aec30052142f3afaee37e5f446aa8102a" , f "0x346e498636df13d77427cbf2996b44662d7ff562f73a76d9a60332528f6bb03e" - ) |] + ) + |] ; [| ( f "0x2e9975c9cc7363376da27ec2bf6630fdcf2f6f79a23c39550d90e2c7e561110b" , f "0x7949d8dfa9667e0f1091154f4d30f5b04f5781dc6c3768a403c111a7b14cc43f" - ) |] + ) + |] ; [| ( f "0xa1432c8c22c486922337820af628a68809aaeade294b86889a42b3f57213ec1f" , f "0xe1f8b5255c5949a599844cd5cb91379cd24dbc92df141657a8eec8b7c0b0f53e" - ) |] + ) + |] ; [| ( f "0xfa39444c855b3d737ee8c57dcf632312d7341a6572bb99b919bad6a0e7bae224" , f "0x515c91aceb119ab2f7a49889d276f3a9d122a3e08f13f57efeb3d037da61b01d" - ) |] + ) + |] ; [| ( f "0x7a406005b82efad623fb7574ec0c1f482d0168c6e13bd6dcfc583bb109de3c2b" , f "0x658a5e45d2d09ea22af61badc344d0e1be995f0c5401d191da524fa00ac23d24" - ) |] + ) + |] ; [| ( f "0x18e5079092fa960724d240d235309ee3169aed661c2423986235e62c9455f300" , f "0xd3352ace20ad374eba616748d4fddc9da9bca06e9b2cd4f9c7bec20c46b14c3f" - ) |] + ) + |] ; [| ( f "0xf9072ce2c7353381e74ada5569d2f471dc3e1ed58e43253b942a6d83fe433301" , f "0x73c2c3e9d9b84a7e6de503bf95d75095fb02a6378221b65da729c30578a90a34" - ) |] + ) + |] ; [| ( f "0x70f84f00e8873f8bbb9e057e1e17450afe15941ca5c2eab3c4be2f0a38ad792d" , f "0x281efd00285d1d51f0fdbd08463d4544bbfb771eb01d4fe82d8c43fae0f1e92b" - ) |] + ) + |] ; [| ( f "0x174f34917f98a6a85700f45c934315c963d76865878ada3a3f7fd60a9f874103" , f "0xfb2da2d99203a9c066e61e910392a828cc278a90f063663c338915216cad0236" - ) |] + ) + |] ; [| ( f "0xb938d90a12a2cbb4fe3d9a275483f9b86c9d1659170f1b296f3cfa7fb2e63b38" , f "0xf1a03f9ecc8e244f9e32655f8c6953a0795b54c56391f0a3f7d3174ed994a123" - ) |] + ) + |] ; [| ( f "0xdc36764111c35809331e66e8a851831e2c37bd6d3ca983a5e05294d1794aa23a" , f "0x67177593924544963843cf3d2155a736b2d3a5121079152f7d4cb4f84506c302" - ) |] + ) + |] ; [| ( f "0xf48a07332620fd4f633ba84c7fd69165b89d563e499529f62ff4202bab563e32" , f "0xb087af71e1f0474a3924ded9d39697aaf60a225c3f31f9c3cefc891df99ae722" - ) |] + ) + |] ; [| ( f "0x310b069b67ebb5d252ce52c60efa4c0b8c5ca27d3b062bfdfc16dfecac674f1f" , f "0x427814e0dae49f008090ea204074067cb0b084f4c5abba8c12d3b19cb893ff09" - ) |] + ) + |] ; [| ( f "0xbe8d49d97684eb4480bec9b0107311c726edcaa4e0a0baa2b9845d6641469f33" , f "0xd73e431caafc60faf7928af07a814c5fc811291ec73eb4ac3743621c11787e2d" - ) |] + ) + |] ; [| ( f "0x07f26e0fd8286be6b3d8d0bd527c7b9aa680ad6286c1c7e0397c42960182ef32" , f "0xbcbc06dd229b7f09126d4d56f680e72f6b779d37591efdc573e46a09a8f8c12a" - ) |] + ) + |] ; [| ( f "0x1335969fdd684450f0343e789aabb3596da2a8489c085676b699a7198e8f0c1b" , f "0x416222be65af6e2408b78d62bcbc70d9779fa6ac1f68ceb1041ca55df5b0253b" - ) |] + ) + |] ; [| ( f "0x357c36cc9dcfdc982eea2c5dbd1709ce70ccba3028003df8c3a5fc55734ac61b" , f "0x4fdf0b6b78c8850a79a5e28ddb150296bcb02e43e75d31639a23d4c8c053291a" - ) |] + ) + |] ; [| ( f "0x692f82bfd4bca7e844e96e233df50587124026eb0dab91255592cf9178b84c13" , f "0x14a303eaf07c1a91b0c53a35c3344eba8c1913728f636f35a644e89001fc3924" - ) |] + ) + |] ; [| ( f "0x3aeccde10ab33584287aac377acbfded64e8682299e266534e558b2ae4bcb224" , f "0x6a185004835f1cf82c843614910ad7d928705a726a7fb41485f9cbba433b7a33" - ) |] + ) + |] ; [| ( f "0xa2a5ce85ef51ed4ce6a5ba8701f0b8e6d2708eef29e1871e2ad0ea765b31412b" , f "0x98299972db0eb5291b71b16ee041d313d46bfcba1caf595f615a597cfe2b1b34" - ) |] + ) + |] ; [| ( f "0x0a4b83bd0321717eda688f81b206fa2a9f6a4ce664f95b16a278da13e2f84103" , f "0x7d68ee326da9709f6fd34b91fba6db8f23d03d970ecd5ce7879a73f4c83eee35" - ) |] + ) + |] ; [| ( f "0xacf530a0f0d418d06e4bcd6fe6ce83f6865e81321865199f7aa057b684969008" , f "0x9f86e8f5c0eb5bcd97315e26e72e2fa8d903e5b64342bbbe31213c01ee811a11" - ) |] + ) + |] ; [| ( f "0xc704cbc39a2af81b34247b748a149621e6e8020559d563fc9fefdf90490ed22f" , f "0xd599d02769d7c7261a824817f78821eed9799e348729fc890222c2019c5ed80c" - ) |] + ) + |] ; [| ( f "0xf43d2651ae2a8814b095133bc7927f6d54a20946b279821977467c749873ce13" , f "0x0834fd902a9ccd7593e81f8d39cff9666691c28e6721555e9406ceea66a48c3f" - ) |] + ) + |] ; [| ( f "0x09abc45aa11dd5d5267d9b6d862f4cbefd24e5d942173cabc3b54349a3b5721f" , f "0x14435a073e7edc67fa5db7bc2830597a91efc3dafdb206a46661a32cf066ff05" - ) |] + ) + |] ; [| ( f "0xf9d85c7b63b777e66298489a8fe8d218a0731497e0630df34fb33e88ed8c800a" , f "0xc9dd05ec4efdcfdece68a61befd1970932c97fd3fddd8cb1ec8fdf2da90bac35" - ) |] + ) + |] ; [| ( f "0x80f0fce6b6ae5e7e519821f7ff5aae120a2014ac69c807fb7b836f6db43dc235" , f "0x72f86663026c8db42e01891c03024e0c1e1ab0d7cb73f6eb08d9b3cc7169a832" - ) |] + ) + |] ; [| ( f "0x8c61129a3dcb211fa940c26af3fa7f0ef1f482a69b087fa2f1151f0e12e23f01" , f "0x997b4abaf5e18e254062c62eb48b9d4c036f777e46142a7189f80a158bae5516" - ) |] + ) + |] ; [| ( f "0xb21feb7d3a2b0e30ca040cddea164d329fa58a2ef7f3dd5df55a15f78d659a17" , f "0xbb0b19cd97d70aa639c0c38890ce1811516819add68fc473735f263aa7530103" - ) |] + ) + |] ; [| ( f "0x93c1d3d67a6698160957528b0c88acdce717c92fb47d20a29abc791b1c8af61e" , f "0xd98f1af61473af1dfd476f0e7f75c0701a31dde3d2aec4ccdeaa222c27d1e439" - ) |] + ) + |] ; [| ( f "0x36c87c7457dea5808d75bb5269e6b01bb4934271b8c833b93fbfa7fb585d6428" , f "0x9aa5631bca0f71cd732431c0e7a6226c4c262b11f0f94c7b97a0f0b74a767405" - ) |] + ) + |] ; [| ( f "0xae86dbf27a87a07975188e20ab152f1c7d752a285b95ab26b7b91fc5d36ff431" , f "0x06e1c03ea26fc25160a495939ed2c11647a17705b3eac84f1bd2137cd3f8db1a" - ) |] + ) + |] ; [| ( f "0x9e3f4279c2467ee3e56bd2686ba0d0c6a45390fbdfcb1258a552227ef2914923" , f "0x8a1e78b562be984524efc2a08b35377b030012866d0b52725b53a27e36bd4d07" - ) |] + ) + |] ; [| ( f "0x0bdc85305028a5e7179026a54fd2701dc1fdf90437c31cc703fb2935046fc733" , f "0x7b71c75cdb3d744ad6c4d35e98c96c1a516479936dc9007c567a7cbc3cb3cd2f" - ) |] + ) + |] ; [| ( f "0x2a95ed48b725d8c3ec84543b086464d798f23a36afb813655d051ae38c5fa338" , f "0xb836ed1b30e8e0aabf811a1ce5bc16ba902e15a2c0c7b499d27dd0d86d931d03" - ) |] + ) + |] ; [| ( f "0xefd0fef9103d9b32fc10148cae082107904f9ed912a908d20354a6c18b67bc3b" , f "0x47f0fb3cf94e17decef89d990e4492612eb78eb54b00eb262473f881f96b6630" - ) |] + ) + |] ; [| ( f "0xeac262d924cb9e6c0584c6fc15cda95566faa512f318d784f4e40fd9cf097531" , f "0x689098680f62f7dcfe2fb6904c76f3f63b9f131d7dbf7bee3a9466d12414382d" - ) |] + ) + |] ; [| ( f "0xfd838492da797aa5a52d52a811d3d6e77fce344de255639745683e1fe105d914" , f "0x956fe028b79d272fe08a6e2576a1d9cdf3f8ed37db6575347b8b9418f1f6b521" - ) |] + ) + |] ; [| ( f "0xc98d891538fd87c3ccccb233e7ed0b52b2e4fdc5ef5a7e75d45270ebec86f23d" , f "0x73aae2f0fee11b5cba2c18c2876f26febb01d2182f48bc6c7e10806861a41f0c" - ) |] + ) + |] ; [| ( f "0xa810fb9e1d8e827a5ac1c7d0193d1a5de5ce4351856a069bb952e93c572d3920" , f "0x6685b0e3ed157ccbf720de8212ad020bac2394c494b9014462997dce42027a39" - ) |] + ) + |] ; [| ( f "0xa78837d06b6b526a43f62d2c5f08991f96ffc5182941b3bfdbdcac61c2436235" , f "0xee611b6c2d1cabaedd09d5b58343de25fee8b83c9d406904363ffd7bdcf7402d" - ) |] + ) + |] ; [| ( f "0x2a27dc68dfe787abf3c586fee00aa6727f30ba30deacb7553e49e13bb05b9900" , f "0xc9a39a7cc57b2ef17bd997b94185b3074d110f72172e7dd9abc3d96e6e882707" - ) |] + ) + |] ; [| ( f "0x8c8d3f759b42e5aa800306c87de481d265efb2648a361b6cb47577855343b136" , f "0xb685ce5b1f526730ec65c9536cdd6188f5fe0854569733a8799773b4de1f5936" - ) |] + ) + |] ; [| ( f "0x933cafa7008653feaa6fbf03fe6d50f51571807634ac48b03ca18a86c4f64b24" , f "0x81af4f9ca5bb43e74c8c1a70c9df340be59f2d3b2b810f92a0d8907e3b416d0b" - ) |] + ) + |] ; [| ( f "0xd10e365767dafa74c505e939f1859a92951190ad509ae2ab2e10eba563e3140f" , f "0x4cff443ca04c4c8bdf92915a94427657ce6a84d09f8fa6767e943a1d9f72c008" - ) |] + ) + |] ; [| ( f "0xadba1be9e261d207f6d330267fe213d4c19510491f3db178ceb73e13152a5908" , f "0xe08e4dafaa38b8066e00dad33058bda2cd5d6473e3de1258eedb578587d8520c" - ) |] + ) + |] ; [| ( f "0x13840d5e98884b9aa5ecaf09eaa45973e21f46b03da2d59ab210f070bd9bca3c" , f "0xab2aa20f6f38b8cb6028bc7087ef3cf45fb9f519c0809581328e77a169b3b322" - ) |] + ) + |] ; [| ( f "0x384df38333a19678e9b83053daef40c9c059fd45716a934f6a913ce164340d3f" , f "0x7adc58c7e180c047fa300c4a0d9e2014235f8017ea1f4dd528faa2a21daa993b" - ) |] + ) + |] ; [| ( f "0x71ccde166deee13b80759028e03f5049f6f2e0a78d9dd22bf3a4a9acaa46d81b" , f "0xbb8556c6fb88dcff8cc92d1713d2bccd8125f119c161e6e55444bf9140035105" - ) |] + ) + |] ; [| ( f "0x188218b68afd06944afc2fab9cdd3ee14747e10fa0ed2f695cf69f0a68ff131f" , f "0x651eb188ee0e26c2b380eaf96bc2080e45f29d5b74592759f8da838d88163610" - ) |] + ) + |] ; [| ( f "0x42de00b2d2d3b0f265d72392f608da5bb9764220c532e0117e5d1be57dda8435" , f "0x736025440c84f36b647a965f517ea5d50106d07acca908415ad7bced8272522b" - ) |] + ) + |] ; [| ( f "0x04f91b2db0e9b1a7b4e41e665e71f996a3015def296d764f1f76b9afbfc2893f" , f "0xab5189a336c8b7111ca691dce615fa4e1c88eb7150823288a6f890f1657d3021" - ) |] + ) + |] ; [| ( f "0x91eee3263042ce3b19a5105d30a96deb3cafe35e4f4a3da043ebd311edef173f" , f "0xdd6617a50dc40a1bfd7945b08007d3f07d789ba882fb5a8125c28d67e9dbea25" - ) |] + ) + |] ; [| ( f "0xcf566e5c0baf4eafde9472360e5342ecefa6ea5377683ad0e454748b4b88fe32" , f "0xd0cfe7bac2d36c4e92e7f14506e2712f67b7206a5bb76ceb26c2dd6816d75406" - ) |] + ) + |] ; [| ( f "0xa88571a359174d849561c57106db93fce0960a20ff336c40e5f1af15e9486424" , f "0xa525f5fdb9f45fdc081f89cffd03397d9c3ce30694edf80232470513ab08e03e" - ) |] + ) + |] ; [| ( f "0xae4cc387765632967785593394abf28608d0ccb3e5a9478728029a705c872727" , f "0x7f52ca94a6b4618e8203f14642fc1bfb4ec3b63990cc68215639cfc1e9c8b912" - ) |] + ) + |] ; [| ( f "0xdc59a9d6722396ca9124cdface51e11fa89f680a95bf0af88962298a1dd63722" , f "0xb70c662a77405142f9cb2d3cf932ca8fe34f14c996cc549040433499b899dd2e" - ) |] + ) + |] ; [| ( f "0xb3742db02f7bb091043c35f6eb65cb48fb727fe7aaab73c52fa905650068e02f" , f "0x2eed0ec7732fdc2e6e5787ee430bff02fe0827a9a7605dcc103ca218a49f883c" - ) |] + ) + |] ; [| ( f "0xb404335d78be2612652176d23495009bc17621e5fc959e596551788842034f3c" , f "0xe9528f7d95dff236a16a88e2f6a4ae01c0b5961ce3c18e620065000000c2de2e" - ) |] + ) + |] ; [| ( f "0xcd906168c96e68c4c993ee972de0a0569df1df2bec4d12e8bd2535cd0e205138" , f "0x40a1a20c59c64187b3ed04267087a7731778a4aea572f6abde3dfb6b00289211" - ) |] + ) + |] ; [| ( f "0xcfcc9bb5e667ea76e7c349b580f79a270b81efa36d76ea020edb6e69ffa1cd1f" , f "0xd3ca0ee4ac5e6ab272e96382db24b1905abfeabe25dc4fbeab547d3b277b941d" - ) |] + ) + |] ; [| ( f "0x3c8767329555c7b2e78463cab733aaeb396ae4ece55b170838c87eccf9d54325" , f "0xc4372aede2f224e9876aab5a2d075206b3cb4a3f44aece558cfc438383a5d637" - ) |] + ) + |] ; [| ( f "0xb65a3b1a8e35cd0d3ea18c1450ce2b0eec34335e19d416fc645ba45a0eec720d" , f "0x4a27908f1460b5ec640e9853a06912fdceb78a0848ce925f7dd3ac5268118d17" - ) |] + ) + |] ; [| ( f "0x3928495e72592c4720622772be8bc19e9956a3324bdf232fa0ecf51ed66c0e06" , f "0x92ecb0ff9dafe439f57789c752d524a973bddac96cee9f53a8215d02eef5a41a" - ) |] + ) + |] ; [| ( f "0x10304de87e15fcaf4106bdca75701a519752147afad783bfe2587255de161827" , f "0x32f8d231288a9d7be532a8c3632644f5c7fb3c319b0ba4c3a0dcee74faf50a02" - ) |] |] + ) + |] + |] ; [| [| ( f "0xf940731f166b7862e5e4fbe208637edc37b10e0fd566b6e80ecc1e72d571c63e" , f "0x90cd57943522ea6ff069f8df40100455149c1ae8d8b3c0ab22ec6bbb76a6f83c" - ) |] + ) + |] ; [| ( f "0x4a45518e85787241c9cccdebda68a004dc7b36ef3aa9df2a296713b5dc704c04" , f "0x54346716d760e76a59141b4d720cbd4b06bc2c252a96de7abec56cbab4558d2f" - ) |] + ) + |] ; [| ( f "0x1d493dd6e2f85d500fbbf5adf04dde8db96220bdbe8302ff45d237ae3b8b963c" , f "0x7aa108606a1122a99ff1ff60016df9d72a88a4633e5287d39bcdc6600af29232" - ) |] + ) + |] ; [| ( f "0x21b412aedecbba02d0f0a9cb5ba44e3a4b72cc2d4a8bae7010b48215d495c21d" , f "0xddf9ad796fb31d44fe36c997f53702e492709138b3780e524018bf0d077b7713" - ) |] + ) + |] ; [| ( f "0xc681543d1e4796636eb0af8dffcdaa4ffa41998bb0df744ab1efad2aef7be030" , f "0xf20917427eb3dcf09a7cdfc48433ed99f47f85c0c4b15c083f312ec8a306dd2f" - ) |] + ) + |] ; [| ( f "0xf310f554d6827cd4b5fd183a335e80762ea2dbb9374a40fc65e5938d5ef91a32" , f "0x870a40349e91722dbed3470e0eb241a7d73ca61bde237d78b1303ede18a6de30" - ) |] + ) + |] ; [| ( f "0x24624fd32dd1672be896c39eb3e18ddf4fc70a73a8c9f9b7565812b47584e010" , f "0x31680333c7f23dc20446d6f3524eb7d013521ae678182036611f931a5fb4f71f" - ) |] + ) + |] ; [| ( f "0x2810370360ed66543140b227eac178aef1feb174f8daadb8306768a53235de17" , f "0x9f40ed0d93bb07eaa094a4347c04bc7b8cf51668a1de03deca47c43171f2cf37" - ) |] + ) + |] ; [| ( f "0x8be2e5c883165f26056b0b4a91ccf003027563edc6a97645bd16ec3380881a3e" , f "0x9096a64b97b531a193c24fb259098871d95ce7eb0b072db4bb1046631de8b42e" - ) |] + ) + |] ; [| ( f "0xe988901792b75c515b983a7d6bb3aa13946f519eb97acc3955daa763f1153925" , f "0xf79bd3a3e58175e475fa339fdb72e6f7f04c8e3e51de7af11185dabe86bf1207" - ) |] + ) + |] ; [| ( f "0x0613e0de39bb9b4ba4083bfb96740fe0694305407a7aec1a1f9012ef7bd87532" , f "0x35594213b10920b8ce89b6c92bcb8d3dfc2882366d2b2fdaa60a3ae115466a19" - ) |] + ) + |] ; [| ( f "0x8a32709ea1cc1a6142d525b62a52dde2a8e6a8b77fa55c3425ba4ede565d0d0d" , f "0x67802af027c4ca223e83d80c29e88a74e7a9048401506228c588f48ca8187d21" - ) |] + ) + |] ; [| ( f "0x8c0e8402690e4a6d5f2ea1b99cd8c5143247acadc7ec95876ab05b700d5be734" , f "0xf7d5be814d03dc34953a70383644e5246ed6295b04c3ae38f77f131ac03bd322" - ) |] + ) + |] ; [| ( f "0x322308e052da974b2e6dc7ec633bdf43bfb5dc1f3523212f21083faece09083b" , f "0xdfd9ba5ba03d7fcf4277b2ec88ad43f472b5f113f3a1ede0ea9375a8d966d40d" - ) |] + ) + |] ; [| ( f "0x33106264ecaf8c70478c5b6f89d7aa39dbc3dca78a81e69588d2c799f71c772c" , f "0x2ed2f3697099221223390dc55922173c0fe9ee3796d04b36d18daafe3b481a0f" - ) |] + ) + |] ; [| ( f "0x8bceee001db18f323341e1a7254da0d10c892b85e99e02aa0cd866f855743d34" , f "0x017017f0fb86738e592279d458c648a927a5e84a5628560709b40690d32c3b3e" - ) |] + ) + |] ; [| ( f "0xf1c0acbbf5c083e8420fe13201e7a7dfacadc9e5a271ea822a3a7aced8a02916" , f "0xd5266de3fc3eac1b6f23b27e053605eff511711e7b27a2bffd974c959a461831" - ) |] + ) + |] ; [| ( f "0xbbd4eddb311ba7e9fda272d736f2763fb83537c65bfd03e72b16b86862516832" , f "0xaf219f804ac080241ef5918e9daab0badbf636d7446cf597aea08881d173a70e" - ) |] + ) + |] ; [| ( f "0xfdbf22da64f8e1ee341bd73367aacf3e9e20f97cd223b9c59745195e0d704916" , f "0xb46ed0e787ea9ee1f7bc9fd50175eaf7cab73797db3ddd767d93dead90e32017" - ) |] + ) + |] ; [| ( f "0x16e65cc23134127cfc68e44ef100d8b8a47b0e020825b97fce81c74bbcece70f" , f "0x82bd03cf0d909fb44d424c26ff21ed9bb9c7180139088de1357f1f2e4ffcb819" - ) |] + ) + |] ; [| ( f "0x9ca7c5538529335eaecbc7a55da4b128f289c0047b6a74b8b1a6ba7713254e09" , f "0x4c1487ccc4adc012d5fe291e2537912a5c24c029404a7792d865aa8d64660b0c" - ) |] + ) + |] ; [| ( f "0xb4c3957e2e296f6e1111851337466e152bc024a0cc7a050899caed6ab458c620" , f "0x00b38755a9011dede57c8ce39e4c774320b9243532334fe2f6061698c1e63a10" - ) |] + ) + |] ; [| ( f "0xad7bdba87679b3129d17bed2e95a1b8dc60e2a57752208ca13db27fae2ace016" , f "0x77866527b1427390659d70871b7258f8e178265f83f630dc0ab9fa70d6a59613" - ) |] + ) + |] ; [| ( f "0x947d766f4a176d023c190120d6bafdae28f917aa6e430b939ea9e19ed3025f20" , f "0x6a85eafdd0c4b9b976d3a7ddf58b4774e4d26ab44e336ed0d168f9bd3430030a" - ) |] + ) + |] ; [| ( f "0xdb77a0be5fb73b6c968dc7d0a279ccdace976b5e0cc3c50609b6b9dcd884e91a" , f "0x6dc786fe2b3ee3ee15d4dae84bcdfdacc68e783e6cc174840cbca1a4858aa637" - ) |] + ) + |] ; [| ( f "0xea4be0c3b97f87609ae6f1f2bfd84cf7874b81e8e41c3b998e8e69b16a165234" , f "0xf5947a229d5fbb3c557af6914e0cd07c35bf9f85e9b7fa78e8f7215392b6040b" - ) |] + ) + |] ; [| ( f "0x763265f25c25feab4e963cc5a043a4d50b71a9733355a919145bad4a42cfee05" , f "0xfe1346074ab52ccb39ed32d389ca2c65ccecd5eb4195437df2a448889fd5e00f" - ) |] + ) + |] ; [| ( f "0xb2e59b4673aa41e3e2f56bb87cf6ea44bbc3a19d4af844827e25ae175bc3df2c" , f "0x92b47d9b9eec32f13ea16a813319f720c7e50ded52283771367931175a1fb507" - ) |] + ) + |] ; [| ( f "0x5909cd7b0735cd5fb74a0cff026560b4748e2ab3ced55485e53f1c6d517fc03d" , f "0x7444f899ef75863d894da83210ecdfec2aa4b5ca5dbb7cb6c0cd53ba1a572620" - ) |] + ) + |] ; [| ( f "0x264dd2d4b91cc899ff4f5734d04b7e3f70590f28b05c03ecd4994609dd146314" , f "0xd5babf4d0024dc7cbd3e0f2af4790271725d2095fa0355cb0e14d108ab4b5332" - ) |] + ) + |] ; [| ( f "0x73e92ca3a87d8a3c656b2b422611d5aa14d3c7b121385e529b4995eb33758222" , f "0x07eb91ac17b3ebb3d6fbe5c9b6939e7423fe969fbb7da8121bd7c5b50fca1304" - ) |] + ) + |] ; [| ( f "0xf7c1daf33c4c5a799b61459de8127e7dd74c827ac09897cc03cc3b5a7076943b" , f "0x9800277edfd009c8e979c67b2bd318b39f6f97accdc44fa25e4150f1cf101438" - ) |] + ) + |] ; [| ( f "0x3dd70e7750a20c95afda478337eb15c9fabfbc14fee46c32af15039d97a8771e" , f "0xa036ed81074ae1070a666224a8f3965240cc3a34bb5bb9559cdffb119e523f12" - ) |] + ) + |] ; [| ( f "0xca84eb4d2d72dfa00f85e90172b387b10d4e03547a8afa3d8e0bd5f6d9a48c3b" , f "0x51b85cb633c7a9d07112a2b3435452c3790c38ac6d10fc184ec565ddbc626103" - ) |] + ) + |] ; [| ( f "0x9b135c1f7890130b61c3cd244ae351f6c8cef534f68e554d1bab80008828041c" , f "0xbf56049a61bf3df1f8c50a2d69acf18097a0088d2901bde4326c26a5b7e58536" - ) |] + ) + |] ; [| ( f "0x9da8d160bc1490656d6b150de27615da361713aed1d2822f0a23c4199634663f" , f "0x9ad3190531a3734588d41be2fe350a0a0d79ac96e9156c1094f19ed1b3378306" - ) |] + ) + |] ; [| ( f "0xfefaaa326a65413e65d1372b721e9150474dc1334a763146e30cd54009d46e16" , f "0xad401c753924a607ca6d0682400ffad0c1e51d42e747a179fccecf0762659320" - ) |] + ) + |] ; [| ( f "0x8ffa680b8942f7ba68842a402c6138906397e585eb297316bf17652bfce96623" , f "0x14151a2beeb6a7b2f8f1d948fe340f691797a77fd07d688d8ebfc0a89e19a707" - ) |] + ) + |] ; [| ( f "0xf42a7139a900eb189240a97e9c3719b834facb8c6480eae8d4eae29f7a47cd2c" , f "0xb2d1c2eb6fdabc639421ee011e5f706a5fe07971553f38f321971cf2a7c38024" - ) |] + ) + |] ; [| ( f "0x8390949512be355a9566f42abf26baec36616454f1541745cad1ae2851170c1d" , f "0x3f5d00713cf8e2a9a802497455c4d0e5fe510041f65bf1c2dc8427500f9df001" - ) |] + ) + |] ; [| ( f "0xb0ecaed5e67e67e7cfd08696c9728c92ab7a53980b0c0f8534ad6e9ed2d45938" , f "0xf7b0dc30965f52adda277f3dc960fa2b83d433f23051dcb098fd09727f62c81e" - ) |] + ) + |] ; [| ( f "0x9fa0becbbf6f62cfbb5bee9ac60fcaf61c122b55b21a67ff7bf85de218d27330" , f "0x9c780f5c77bdeacd2acbccf6874830029bbfa68700abb65f75b7f2f966498739" - ) |] + ) + |] ; [| ( f "0xd0cb9eb836c2cecd225b7f705ddf964ce83dfd20f7ff269cd08d6733221da938" , f "0x882f79f7dea8034fb86da0eda01e16a35573c5825133f88a44a903550fb84133" - ) |] + ) + |] ; [| ( f "0xa603b5a825cd4b050ee6b9025a6c5a83d772daeba5c29e50266bba0c6f5c1f33" , f "0x68dd79b6d42bbc96179d8e11b3e816961eaa61e1c282271c552ded45bddecd2c" - ) |] + ) + |] ; [| ( f "0x0aa5e8cee4a223893a35a3bfe98dd6b3d8903bfc4000a619cef424b43e70bb26" , f "0xec82230e9f1b700de152340a5989bbf77eb154f031f135611e7e08074ec7263b" - ) |] + ) + |] ; [| ( f "0x22984efc853c5656da0cd7c5c8bb702e6e7435804d74686c596176c7d3644f32" , f "0x9851a47555723912f41b2d28541651f2f1467fd923b98635bb077117d1b9481d" - ) |] + ) + |] ; [| ( f "0x7c6e058154cd76156e232755b91d49c1634c25ab50e188de8a27a76e3009f616" , f "0x3515c4404d2de754d78ec1c1269695f05c85b14b952be585d144563503693338" - ) |] + ) + |] ; [| ( f "0x7c8dfb6784d9bf00a520e6733109cf94ca999be2661fbb341310c7840065d638" , f "0xa90dd8b8a3cb87ddcd36ae08fbeaec605f22e74c4bb29c64b02109025f94a832" - ) |] + ) + |] ; [| ( f "0x6f89ca251c36012b961171e322160d160a2b1dd65d275356320197a6125b201a" , f "0x1bb60803b357e0bb12bc96d775622431d0c895de7bd32edd84bb164bd340ee02" - ) |] + ) + |] ; [| ( f "0x7ba7ecef537d5bedf84337ef49fa8920df2b50f4f9b6e45e5e2de0f3b4eedd26" , f "0xfc923ac7388f7c3ac7ff0d74c9319f91823a1e55526921d7ed0e4bde67bba531" - ) |] + ) + |] ; [| ( f "0x4addda8a5b00a3f2635ce563d338303ebbeeb79060dce22e81b6daa0bbb4522c" , f "0x15c1ed7cc3ddcbd26f019fb59b683af21752708c7c6116b743144bc598ca7a16" - ) |] + ) + |] ; [| ( f "0xf1279c23915b9eb4d92cf8e8c4b8644481d83e973ebd6a6366ce7b4fd111722c" , f "0x4378282d7065d0edd2bf1445f961aeb2ef5e6c76e7896cb761d5d8e813b86138" - ) |] + ) + |] ; [| ( f "0x1abd21435c0b43d6aa59afe50d89b6f6ee6c4981aca73f3046cc365177ed9a11" , f "0x0f4bd3070ac988833649f223f34fe26c125be3f771ad20e1f5c87d499b3cb110" - ) |] + ) + |] ; [| ( f "0xe970b991f3e09ff3a785b802b2486be05180e6f76aabc636997ff5f1f0f0e219" , f "0x1b7a942c8c76535898e983187b9e32bd4af5d26033919e231e72ba4b43261b31" - ) |] + ) + |] ; [| ( f "0x2f634134c46376186440f56a564331f8c6ea3a45dd37f12f2840de826646a916" , f "0x789e66422faee59040273d7e8853d7d1ce644de21e9ba5b955ad528c2fc74b29" - ) |] + ) + |] ; [| ( f "0x736e26348c36683dc4ea6a2fa438d6d23ef779d4f9c7df883a16c9d438cff712" , f "0xef51720137739c8be8cff911f635e84a65faf9828474ef5b6de5e2bb7fe58918" - ) |] + ) + |] ; [| ( f "0xe5399a5b00664c369367205f62fb7628bc58acc5c18498914fd6d8f34173033f" , f "0x239db9ee7d6f0ab25e855767d9bb63f6f933a9fb741555c1d84f1d2b4ce29f00" - ) |] + ) + |] ; [| ( f "0xc324dda4193d856ab208f91d0f63bdb0b1ee605bc2db335294b885c0aa11101e" , f "0x4134c54276934291df75a92533d9620bb10484cffd8517989e8625b520a03e25" - ) |] + ) + |] ; [| ( f "0x66d52cb511af2521fdcac1278f78d54e5354f7f96e8834f4ddf5921a8550a039" , f "0xc4408bbea2570c728c05570951b6ab480c63eedc54d126c41f6bc2e94672c404" - ) |] + ) + |] ; [| ( f "0x6a8b9e41402a049d01fc953fdab241ce2f23a257c9978d8b1927d2e8f08f0e3e" , f "0xcddb19fcbe06c3670ffe290834ba90643f3a7b4f2c66b774c98f05de8d272c2b" - ) |] + ) + |] ; [| ( f "0xa1a19bbcd36020cf517a7e93cc1720e88349059ef669bda7d9ee9b324f8d1f08" , f "0x980a8abe93a942be759b7006d75c2459cbb99c869e34de1f71ddaf279fc94711" - ) |] + ) + |] ; [| ( f "0xb325354034abad43fbe43ca02d679efec4a33c9e169529a99cd6b57af6a3943a" , f "0x74278c309f4406b9bff7a0d2091cc791755571f97ce4d70ec3836e35a9196422" - ) |] + ) + |] ; [| ( f "0xfad915644e522ff76eeff36d09b0948ea47d2aef08159928b40eec332ed80738" , f "0x811888749e727d32826c44725f3f1b936affc9cc208b23cd7a0e0290c7a1c810" - ) |] + ) + |] ; [| ( f "0x6a47718321b96a709f3cb468eff0a18789885d7b9da9394a7dfb093910f3a21a" , f "0x17589d4d4fef87019afb6ba80406688f9eb47ff7efaae34d02f93dd88d3f5b0e" - ) |] + ) + |] ; [| ( f "0xc734d5f12132b0da1ea6944ed28d8e42b49a33567e5dd4fa02321ffb0f3d9906" , f "0xe1fb2b74045517029a65c979d495164a2b825f480ad0d7757dcf14cf3199b93b" - ) |] + ) + |] ; [| ( f "0xf3909304b83098b4e40fd429184766ea3e9ae71a35595f283ba1a27cffefb919" , f "0x5a16b73f79f0b087e476bfaada5392c19ae4dbfb3f82986f385172e39a82670a" - ) |] + ) + |] ; [| ( f "0xd8e9b0f30031bc2b4ff7dcd0df7394ec1dbe1c3e2e821d14035dd8cbc29f251a" , f "0x88e61138f761cf544dc3fc9542376b1e16ac241f70c3c5d5ad7a07fcc3afd034" - ) |] + ) + |] ; [| ( f "0x86a56a2590e6a09156dc7a5d6599a3706e86545a5a9b15630706c857e438e034" , f "0x767f0bd621e7e7ae677c21babcff96f771c86d0b63eef045f957550d0b696e0f" - ) |] + ) + |] ; [| ( f "0xeb4da089d0aded5a67b35a9afb3c9eef06f169dc53a580ec3e7f3a3678805501" , f "0xbac4e2dc21953e81494b667733cd025d3a0c7b1c3009d75335d972ec2a74b908" - ) |] + ) + |] ; [| ( f "0xd2f5d3b661ec1f83ba595c190f21af8fab9532c24a5986a82f7bd6f339c0c23a" , f "0xcecc70412aa2b0014c8b667e91546fd043fe210aa71cf50c80b79132e5e77305" - ) |] + ) + |] ; [| ( f "0xad955fdfb2be90bd39a2c1b31d7ee8335e2db1c416b21695b43322b245a0c224" , f "0xe89d31612c9aee292417ffe328ce26d1c758d1d4496cd37d6b6d17dcd9c27817" - ) |] + ) + |] ; [| ( f "0x69f6d2356a7300721133a100402f8ca895d5e8652660b18b654544dd43007b3e" , f "0xaa2bae5a309707fceb2d52aa78fb70e6c4675ffd19171880f02514603eb98933" - ) |] + ) + |] ; [| ( f "0x3ee0918198e12f8d782c64e402e303340b82246943958b771418ddf138c71927" , f "0xb83498b420db1d38c854aac84f0ae483f8d13ed3ba53de566d8e1d4bdd70f60d" - ) |] + ) + |] ; [| ( f "0x1b58308424a50cd5429e86f3d6240c49895773fbffa0d469de95b0e1a5871118" , f "0x2779c41012d8066ff8adf5bcc89ea056822f11ceca9f62322787716af08bf607" - ) |] + ) + |] ; [| ( f "0xe2042979f7c2b5ba0e77e79fe41b02304c12ecb3b39a2275c0985f79962e231b" , f "0x87adb2152b11dcde9f60e62c54a2dc5570791b6bc1ce64dc0ddee3ca00726a2a" - ) |] + ) + |] ; [| ( f "0x37934057801c5ae23d93f5e57bccc02d686938d4d70afed6918f6c455c842a0c" , f "0x405091aa81452091766e9195e66fbb8d95ede9856fd702ad426511649b883e1e" - ) |] + ) + |] ; [| ( f "0xa25cee6896afaf12112106e7f9a8ece847c271cb38740a49ea106ec3664a0d05" , f "0xe90529888f42920bc61e1b0b0b53a3c9832f4e2c91b752e61f9967c6c2020703" - ) |] + ) + |] ; [| ( f "0x4cf0b0d30ffda251d8a3b12300745ef8d3ecdfc378539d2af658ab483fa4241c" , f "0x25bd3fa319dce38eee8387a76a15e1d03a34cd4c7dd53c352577463136f17a3b" - ) |] + ) + |] ; [| ( f "0xc5a9ddb2c79f375768aa132868ca1815537daa19f7f96ad802c3a4a0ae50e333" , f "0x201906f27b5dc2ef3ccea86cc585035ad8e5459c2cd63ddc9c2465701ef5a83b" - ) |] + ) + |] ; [| ( f "0x4dacf6ea871c2f281d27c5064eacd74180e4c85f40bea658aa5140dd37ffb52a" , f "0x4fb430bfa42a8911301c49c83ef7e0e3427644b40b6514e7e60f67d35b629832" - ) |] + ) + |] ; [| ( f "0xa92e76d24e8d9ef86432fa94685b49c9540f84fed33496079ecb4b9f6c77063f" , f "0x219694a82a5eeb32ccbfe2f581ae4f49fc0cefd18496c6ae00c77cf256fe4d2d" - ) |] + ) + |] ; [| ( f "0xacbfcba5834aa30475c9fc8172b815ee9134b7e2833dbf6152e6bbc98d1fe221" , f "0xa9ba73c7393b99fa876545e4625d14acf34772e4fd09639aa100849fd165ac01" - ) |] + ) + |] ; [| ( f "0x9a3dd58a094523a3da8c33281855b33c199b83f1df4a44b5ad8cdfb1e8c8ca0a" , f "0x788eaa57ccbdee7dc9e0a26d6bb00abaddccb242e65277c98fae86992184b136" - ) |] + ) + |] ; [| ( f "0xb1c25958f11c2fd1c99c508924b70834b47a57b9840193aaef09045c89330a3f" , f "0x596c23fce3d5d1920d930b5569c886271449eec6f3f4fccc29e3c64199eb9e1a" - ) |] + ) + |] ; [| ( f "0xe0840670ca9f7474b19ab9e20eaa489d8d9164198d192d2e164d28304efc0926" , f "0x1f23848c011b6d839ed40f7d9e0e26447241d1045a883b102d7b7b2be0bbed36" - ) |] + ) + |] ; [| ( f "0xed7ec434b7290661984540f3db0f0b3612f54de5c60fcd1f9d7adde66bece13b" , f "0x8eb248469e444af90129fe2d18b2b1ea83cbba29c6831f658bb8304933abcb1d" - ) |] + ) + |] ; [| ( f "0x26832d9430ba751e1a8965152545fe50ef432a700a7035367c15f274f252b826" , f "0x8bfc77550f84c666b950869e35a073444175709b09608899352ba7e725586610" - ) |] + ) + |] ; [| ( f "0x61bee009ac79e053bf93e73caaab64679e7aae7f577c4b62ade8211f04173139" , f "0x18e2ff5368c1b1dc7faf59cde7fcb5983eee4f6bd25e7a738c98e0a7b6453e24" - ) |] + ) + |] ; [| ( f "0xde486d3d1c19d1427912d37ae47df5a2bb061d3eaf01f4695c99b4f2a16e2901" , f "0x0fc421715406a4bcfe6d242f35a395611e724c3b850dee673bbbdefcbc7b2938" - ) |] + ) + |] ; [| ( f "0xbae2120e3412b26f9ee88829d3940abbac30585436a78f041730aa68f8093b0c" , f "0x00a9ee940e88b7acd533930c22508aae9029d70e66d90b5a254da0955c194f2d" - ) |] + ) + |] ; [| ( f "0xbee2896fe877053a27e5cb97fb8003c13e082b16f22a68951cccfd1da7409e18" , f "0x684a7ff9718053b0b59b24fba84d6bcae0c748a8c992b9492d8901e835419a06" - ) |] + ) + |] ; [| ( f "0xc222546be38af2a9b98d3fe494bb4202a29ba621c26c76df74ecdb20bce3e12e" , f "0x8b3b26b79ce7d40707b597b967c1d0f3744c25b6b581c19fb3b83d2c23813014" - ) |] + ) + |] ; [| ( f "0x35673b7563a2088f24ac9101d759d3eab2c4a2594357e03bf5e550fff9591c09" , f "0xac739971961fa8b54b7ca13852303f8a0cbb0111335bdd970c570e23060e541e" - ) |] + ) + |] ; [| ( f "0xde17867473bf8cc6fa9e4e005fb5ac4ee8cd4409c496ca574a10c09511e52a21" , f "0x977122865a22c246eaa364479f03ecb7cb4e549a914d4252126de220ccfac60b" - ) |] + ) + |] ; [| ( f "0x6306cb735f62173c5c28b8f57426fef1e0e3933a0fc2a4cbcd82f92ac4d3d405" , f "0x238102fa88f07afbff82741de392c593549f7444e3b180b3acd4c7b0bb42df3e" - ) |] + ) + |] ; [| ( f "0xbf1768354fa9468a75105b8517042be120ee0500fce2119d374c4efce03ca936" , f "0x5ac951e936ee6df2d290796aa1785a7c1ee27c8f9b7cad78c0597070b3d0283a" - ) |] + ) + |] ; [| ( f "0x2eb1570ae8014caf021809f482eda817bac715ade7cfb6f5d26e7213d9b85f11" , f "0x7c28f055fd88448fe162d9172a034c72015f222dcefab1052c54e0a1b1ed6809" - ) |] + ) + |] ; [| ( f "0x74d770c520d9587fe7eab0f58e509775ed1a925d8453b01e9f4d153aea4ad433" , f "0xc5c8b512f36ce4dd497e4a8706e7f31579c4b04799455ea65c86a475f3c39f22" - ) |] + ) + |] ; [| ( f "0x5518c7a85e76147669bc025634fbe1a33274c7fd71d85ad79fcadc9c72c4513d" , f "0x8d485f0eaa21953c25c90d71897cea2a6e9a52feb28e6dfa189967068bb18219" - ) |] + ) + |] ; [| ( f "0xbe2d7567c3fee30b4e422d6e4c71525b58eb066e0cb702008b421c849373403c" , f "0xc70358f1f72c870647925fbd881a2d9449cea1356264943375d68def7317af10" - ) |] + ) + |] ; [| ( f "0x2d7d3b05edae28c179a677c9297aa1a5bc3c0a0eb3866dd84090e8e51b58ae37" , f "0xab85786faeb1de1bd08f174739dcc85e5ae82e9908c7e73954b122de64a02a19" - ) |] + ) + |] ; [| ( f "0x4ce4ba2bd77fad6fb20a1c924382defe9369d95166ad9e5329d06945dad19b1f" , f "0xc824d8825bea9bcbabde6526ba25ecfab46795db4b5a366212cfaf934d7c853d" - ) |] + ) + |] ; [| ( f "0x8820e7e9eeef44347897ad4a54a059531d6321873b330d080531a736bf950601" , f "0xac5d968e87ddad2e71e58d3e5804967a18016ea7e00787eaea5b5436a2e23c01" - ) |] + ) + |] ; [| ( f "0xdd220f66cd859693e00eb9fb81a4f2ac7fc3b39817bac5afb77935702e2c420f" , f "0xfc7212456f4d008dda1d9303f85d9e3f9f333a6caa17b787d714b794f14f6131" - ) |] + ) + |] ; [| ( f "0x9bf4bafd49f22a9955fbfebab560b98dca79629a7220ce8dd1d540c58dfc803b" , f "0x088303945eb65234da09582f0ceb573077162d82ffd96bc300271e2a50fa8d33" - ) |] + ) + |] ; [| ( f "0x9fe981fd4bebb1df44fa23fa70cc853b16f576bdcb88d34952b9303d500d0f36" , f "0x9a28c6e9582abd6c880b7c8936156bd784993acd9dc023591a1b6fe235313803" - ) |] + ) + |] ; [| ( f "0x00671c67819686ab1e64a2761b76ef332b506ce3adef39637c94aa1c38772c12" , f "0xfd675aa0b0456c316aa7c5232949ad9a660fc62a1cfa25bff93038255c5c5709" - ) |] + ) + |] ; [| ( f "0xde3b2e85c43e9fa98f1950371dac39d39b3c460983548c57cc0fe0bbce16e818" , f "0xac7079dcf65f3693017a8da3497217681fef07198027cce031dc0620137ed312" - ) |] + ) + |] ; [| ( f "0xd14dd713ca3c6f210132b6287bba7329c998c388fa8e7315c43b00d9c52ce93d" , f "0xf19a7b17dfb7faa2e8ebf132c6dc36cb4d87b6f1e9b824c773ceb372b5c19c08" - ) |] + ) + |] ; [| ( f "0x28c1a8fef95d9bec52132ebdcd920230e222c5a14dd47cc3ba1597b69b199134" , f "0xc6a4c172833080ffec3a402c6c72cda9c93b02ad9b2b64d5908ef5a403c42516" - ) |] + ) + |] ; [| ( f "0xb649424636e93340f3d69d86cdbdf324e21f303def5f29a7548f70621bd3da27" , f "0x6d9a7ad2fd0a89e167631a50cd69644cb949ab3fced1d8083e079e2d3cab3e29" - ) |] + ) + |] ; [| ( f "0xa31f07e136689782eafe9d222a0d9186fa3799f15ea350ba853db8d6caed4b06" , f "0x71e6a39f39036255aade109a08a41e4cb29939d40799a73619534052b0aebc1e" - ) |] + ) + |] ; [| ( f "0x387862619b210a142edfbffab12f434e387a7e7b6c260a1fcc28b2ae29a9a71c" , f "0x7726a40e87f6ed5b3f5aaefb9f1c953545befc0dd136d778960119ce665b8f2d" - ) |] + ) + |] ; [| ( f "0x47f202d7c6fe137576d0b49c2013c7824068ebb5bc8fc3f853ecbb2a470e1804" , f "0x236b22be1f4f9885db66a1eaa63e4448a7092bffb7eb496a2beb5f4d2cbec323" - ) |] + ) + |] ; [| ( f "0x761fde204d57d1daa56e0cded60ff085bd66a82af22930f5aad7ea43cad4d31d" , f "0x4ec1db3a8d5450de8ba1ec67fe83f57ad975b6553f3d43dc74b5a62d99405736" - ) |] + ) + |] ; [| ( f "0x1428600a45d01eabdf41411a81d54e75b15ac773d0b199f24fdcfb89b8c9870d" , f "0xc4f47f9d9bd8c02a87f3ed162181dc74a9f28ac398ee1cd40e1b6b7bb6b4d017" - ) |] + ) + |] ; [| ( f "0x211695d4a6f7e72eaafc4f896ff0666656dd656482bb0267900c7629275bda02" , f "0xe2d2959129d8443d10293b55d5482489868dc8d12c4ad42afad2eb165e444d15" - ) |] + ) + |] ; [| ( f "0xf52c19a3efaae96589c3f0cb5622b772354613b53d782ed92fbef9a5d641d63d" , f "0x72a97210031a42339503dad53e0d3eb496bed72988d2a903bbfc35bf4415be19" - ) |] + ) + |] ; [| ( f "0xd14bfe4b91e926bdfacdc4a924e222461e9ae4cd432406a96e941dc29d4a1300" , f "0x24a1051aa362ff71deb59e748582dc3f0eb04044c221a297dc4fbff55f9ac412" - ) |] + ) + |] ; [| ( f "0x9e5f702b0defd2c03ef681f1c789bb85cb48ed648362f7b0eee3f0d3b0d3eb18" , f "0xe2cd8a8201c7602592fec2983b4e0a5874a4fb7b68622fd5a085c5b493b45b38" - ) |] + ) + |] ; [| ( f "0xba0cf291c29a351916b530122701d00163150068eed5931dfa336873e966b106" , f "0xe7005a67399a8272a6935aab8e9bccefacd787f0071b4251799cfbd9511b0b25" - ) |] + ) + |] ; [| ( f "0x4deb5a6d0c22b43857450dcacd5d342c7bbb8a8504cc99dc023f0a0f997ad935" , f "0x140fafbcb2862928fe367ce7619e562f39082614494b67130a3d1502e3f12406" - ) |] + ) + |] ; [| ( f "0x28e343a07d8097fe26fb0e472ba5341b298dbeb64179a5eff164ae87db59540b" , f "0x7f77613b4c1ac5cf6b4e61ce636e61f523336d2f358b4333cb49e6a6defcca2a" - ) |] + ) + |] ; [| ( f "0xa53d76586549743afc648f5b03a6e30a915b4842a2c00f04dcdda78042e43a3e" , f "0x3fcd82cdcf3c7e86f75a55e9484a17bde1d4967c6ad8e929115a6f59edd79a10" - ) |] + ) + |] ; [| ( f "0x9ff4bc1cbf9dc1cfdc8826e3fc3e15bba1aa8d08501c52f62a439f61d2d87924" , f "0x38c4e57869cf3780a1d69d4956c3798070ae5882b0bb7752d9ed35dc0fce8a33" - ) |] + ) + |] ; [| ( f "0x351c8812a3f0e7561d3b6ada3f416e087f15ddf4cc5353aba790ede58e2a6138" , f "0x9fc057a0c3069579d7f5bf4f6eafa656c21e7224b94a05caf580687d197f5214" - ) |] + ) + |] ; [| ( f "0x8937fc5bfa14d07c47a32f7777d0e476413323714b10f017a23a94a7b0ecf012" , f "0x182872bb9821f01b16cbaa37763d84eb64ea6915f2eb6801ced5da6d3af30b11" - ) |] + ) + |] ; [| ( f "0x43d20ae09b23384ae8954e181d29404a9ab26d48373b65ea7fc6ca6e7ee20e07" , f "0xee84733dfda0d3d88f98e99f8e3a5ff3ea77d69ff090aa00f9a04d88a9892618" - ) |] |] + ) + |] + |] ; [| [| ( f "0xc1fd8fef715281072214873acf190fd727879d4bf9aa456db198fc0313679b02" , f "0x792427a827b0e7641e8f93fd983ae602e8fce3dd711100170335db26f3e5c73e" - ) |] + ) + |] ; [| ( f "0x0a1a10d04bc4aaf6af2d771d16e948003436f985ba012e1a8706dfe40f653e2b" , f "0xda95f195481c341c4da6f20557a266913c80a044e8d8722262e56554878a4f2c" - ) |] + ) + |] ; [| ( f "0x2e3022794d7a820cfe9a62514c1ec8efd040e761a8c5c4f75bbba5ef2cb3c124" , f "0x10dbdb367172f4313e8293cc633402cfbf40c48226cbf2a771fdf0384f92750a" - ) |] + ) + |] ; [| ( f "0x308c58eb39735488b5f49425e46cc2b6102c65f981e97d4763d0731e65a5490c" , f "0x845f18df19729cd6d9e6c509313131f26daa0f459ba66570182f26a48f600c02" - ) |] + ) + |] ; [| ( f "0x6a44da3022bb56108e253e5adb9b513abe876f59d9bf9f0cd587ee339894ed09" , f "0x7fa0f3d6b7004af50a89c77e6ce911c4fffe11632ede9987449702a70feb4831" - ) |] + ) + |] ; [| ( f "0x454a6a2ac0bc2e39988163c2ca69f192cf01db54b1bb3cf06153084c38351c19" , f "0x44d920f6b93a8fa0de52fdd01d9350162868da33df52311723dff8b81462e906" - ) |] + ) + |] ; [| ( f "0x08366725bfccac7226630b7da5687f022f9cf56407bdcece0f3a4fe60d2f2433" , f "0xb92b80fc8af69d1509976c23587b19d51b9b7e21d940378ef7037aa0a4e9d01a" - ) |] + ) + |] ; [| ( f "0xba797af1806618471601dc466b392e6ae2bf27691b71e71168aeb55dab2cd033" , f "0xb8f372baf46f8eff0271d76a17a642003e80a05a4870bb062d66a11b8d9b4809" - ) |] + ) + |] ; [| ( f "0x20605e0d5d17a90752f474bf8b82252ea58c9b7ab3bc32fb423fdf6801eaa21f" , f "0x73399afa4208d8ba4dd331737fb3ad4d3fb79a269ae1e2169501f76083821433" - ) |] + ) + |] ; [| ( f "0x2c25a9ce7a2dd41037bb0bd5e8cd2cbd39b803238173d5c5d9188c17ec199838" , f "0xdc48fb5164d2911475d1580f872f0604f2f630d36ca2fd3fd18e52124e82172d" - ) |] + ) + |] ; [| ( f "0x0ee010b12849280bce447108298373e98c49e7294f4c0812fae3e5cc3eb74032" , f "0x48eb4ec347e148c909019844140bddf02aaf0386d1d8bb4e9b0f6841b0acd307" - ) |] + ) + |] ; [| ( f "0xe334592a8fe2fd6a5808f5cfcbb62abdd9dbcc2f161b67b34e1af445258d2f2f" , f "0xb470fcfff2f51e8299ec56ec198c1a53d7fa5d9aa0e5209fb0b4e837449aff39" - ) |] + ) + |] ; [| ( f "0x0f11ba07af2ff9206283945cd21528bf5b8fc9c27775cd8484fc0d6065bc1218" , f "0xd9741a678c7ff59d79ace79d34fee9c09732d243d34df655a24c6c3db328891f" - ) |] + ) + |] ; [| ( f "0x6b237d9adf2edc86b2d844845115078d479383cf62ebc48acbdec418c5ca591b" , f "0xe379d32ac2d0eb4d10f82ecfc5816800d530bd44a5e80f44085f3a755db0b928" - ) |] + ) + |] ; [| ( f "0xc9572ab7e835add7e6eeb8e3d371bf7761844ef8456f7796f008a01b3ba1f732" , f "0x91ceba3107a05dee5ea74271036b587c00e0a8a77074a96a62dadf85dca02b2a" - ) |] + ) + |] ; [| ( f "0x2d950fe90a470b7aeb2c1ca759c928c93a0de02c7b0a6588a1dfcaf4feac7527" , f "0xb80b82e3712758786f9fc7f14d7ffc9cc1d6831b8406e5f8b7ff68112d153924" - ) |] + ) + |] ; [| ( f "0x4e31af770d1a997718af3b13cd8e383566f462229a6b6c930b9a617fd5df7224" , f "0xa73dfbbded3326c47f2eaed9ae0e0df3cf36f3e6244d49d83f8e97b9295dfa19" - ) |] + ) + |] ; [| ( f "0x5c99e493c7392faf9109669a06fdf30fbf6851dcbfc39b7fe238e423f2b0d334" , f "0x02a2190646a02bd6a1373dc37fc81b464a63aab636571542e72abd9d9550aa0e" - ) |] + ) + |] ; [| ( f "0x4892a0f803678ad0c6935bcacfb5a948fc7f8205e9998fa4d422066e4c88462e" , f "0x35691185ce763047923ee3a9d081b7aa9277a48d4c3cd549eea78df802b86319" - ) |] + ) + |] ; [| ( f "0xa8ad6616691120d640426501d7fd42921ece13362d9bd623c04fe67834598339" , f "0x4bcf1ad71e33026bd2f642a44d602fcf0b87bfeafb93c53441da7b0561b76c28" - ) |] + ) + |] ; [| ( f "0x4d56373a09a3aaf52427066957ba3d95c84c12138fe7926cf84bdb0cefded728" , f "0x83819fb230f88ab1153dcb42d6f805674d21534c4449f27c63474eea1b7aa708" - ) |] + ) + |] ; [| ( f "0x7bce27fb9cc275a02e353298df4b8583d565b3345c8ad8ab255b8cc049736f0d" , f "0xfa159cdacc067ec2e23b2f4e540f21180acf093fa3f31f1b0de4dcbb44125439" - ) |] + ) + |] ; [| ( f "0x4a5474d21484ad76e60b65abd6e3dffbf9834ca02485f27db38370538ecf5f11" , f "0x793c78c34d41fffd044dac61f22e9278e59e711d0737d06e8b9c9176507bf73d" - ) |] + ) + |] ; [| ( f "0xc54a3a4663106464cfda0a426f30f50e88bacb1e135fdb0ea028bd9036a88537" , f "0x67fc7c34a9eb3f35675d3d9ed808c9e6711c83a0b79e5e718f41d898abe80b27" - ) |] + ) + |] ; [| ( f "0x5834b138ef08a1ac20182bad9a3cc1acffe70ffa4b1b884c6306a46786d16b2a" , f "0xd39585c02eeb2ee34a853378371c4be7807ec67841f8d198d06ca867f14db42b" - ) |] + ) + |] ; [| ( f "0x66165e34345ecba95a21774b274fbda556b05d3509a9e596f92b92ae21bfe009" , f "0x4ba9b805025fdd23f275430f3cc1e91a0cb026e4d96fdb9108555272c2207001" - ) |] + ) + |] ; [| ( f "0x7a95680717ec434a6e7dc4f2a6452c5d21bce461875b1f3da39f8b7155f0fe1d" , f "0x53c4e7bedff43799f57d4aa8bbb6bc9dc0fd31aa50020598bb026b8c6f0cea14" - ) |] + ) + |] ; [| ( f "0xbb1afb13802df22f53e09cbaa8fb1fc3d923fc5f1f1002df29366837b6b74621" , f "0xfe40732f7e7eff7fabc887c60653a536396b1102412e9ec5afeb435f35c66110" - ) |] + ) + |] ; [| ( f "0x0c9b809038c9f0861366fae143c2dc219a74b6f8366337c58dd2257c2916af0e" , f "0x41a149873ff69741b3fdd6a82cf8c135aa7a31e4a3024d8126c6a30d32d28935" - ) |] + ) + |] ; [| ( f "0x98517de98fd70ae9aeffbcb536c82114a63231db21e483168cf1d76682732703" , f "0x51e07dcd8f29354f535fea96432ee5fb9045ad2fb6a3cb4719c0c6edacaa681a" - ) |] + ) + |] ; [| ( f "0xca69f2dafc624b4c39e589d1154273de4c5a934fbdb0ab7fe78c7813074f7930" , f "0x84dc21f967ed6c766f35942856737e015684b165278bbc057ed1a134a20e2536" - ) |] + ) + |] ; [| ( f "0xe12fbe4cdaf8669b437d2fdeef6e87bb866ea5e2b9d314225b6759d5a68be819" , f "0xedf7b9a79bb98ecc695deb0fc77fa50454a1f11253f400ab8488e0fc5e52eb16" - ) |] + ) + |] ; [| ( f "0x15727d715e5c1d1ffa398fd35246c1374c4a1c4ad4d0c1c0988d0c8f8726d83c" , f "0x75db1cd7990a6d168d6661c325c722c580f55b484e5ee2d3676aa9418a332513" - ) |] + ) + |] ; [| ( f "0xb30949d59bc2702bf2888e8c5a8d26a6744677431e13badfd5bbe606d3777134" , f "0x7b41c15d89c4de4f42f45a97f146993fe826bf8177f7be66d2a9fa45eb6e9404" - ) |] + ) + |] ; [| ( f "0xda7c5cb70f1ce4186636d20de96245b09853cafad6c54acd7c91368331667136" , f "0x340f80db843ad081d8bf73d3e7274bc8d4914f1f1822b3d079a8af298a159d27" - ) |] + ) + |] ; [| ( f "0x51d6886172db3f08fda1156284b05877ff39f4cba3536d69ed182336913ea816" , f "0x77a8d16137738bcdaa59eed2dfae394da711e77e2cddf8b50e7dc20f178b5d37" - ) |] + ) + |] ; [| ( f "0xe45e37ed8668767e187e7221c13d5294c99c2fa5ccf62e21c1fab06509d4470e" , f "0xbc6bb22384cec1155c3a70def5e7d7f28c361c09308041974c994aea89bdaa3f" - ) |] + ) + |] ; [| ( f "0x74e7e51b4a55dd0b5ddeea3b95915715dd11aed9017dde093c82e172b76fdd06" , f "0x151c5b2312fb1bafffd44a776ba4bb5fec5ac7f568d205fbcae7738b9160880b" - ) |] + ) + |] ; [| ( f "0xc6128070caf79810db8614368dc0c2259d5d4201ae3d524304077c454809aa31" , f "0x755429f01826c3141937bb07062f568f3158e53dea8fdf45a26dd051b9edff0f" - ) |] + ) + |] ; [| ( f "0x02fb8134a79d34bbbd3698458e84acc91dd757db5bde60f9adb4c3290b40763d" , f "0x49dc8668990cbdcb8c50c98bf1eb785607458198432ed41b0b6d541e2450392b" - ) |] + ) + |] ; [| ( f "0x702b27b6531672b5481b937b69ec9e228561c228fc8997742cdf481517a14027" , f "0xd25963152f6c96cf1347e81f0511847b972e727d7ba617700229d094349a371d" - ) |] + ) + |] ; [| ( f "0x67a681ab16a1a515117ff3c4530e0dab6d6da57f5e175de7185934b1b5be1b1e" , f "0xc46a8a42c6d98d1f4a009c19c25ea329225ab902cea465ad1ac9684e81c07f24" - ) |] + ) + |] ; [| ( f "0xef4ccf42725e3c2039a10b865a07ac4660c745a1b695a99d3a29f8b3896c5e3d" , f "0x857319a1d776d4e3bd06be59a3e2131292bf30e1adc4bf840786dd9286ca3610" - ) |] + ) + |] ; [| ( f "0xeceb54f84acca2618bc93d888de2cdf6d1ddbccb32582720ea1e73cb1f94a72d" , f "0x815ade3a09edfcb0ef0f47ae526723af039ded46e804fee3fe3a697dd68a6919" - ) |] + ) + |] ; [| ( f "0xb59b4ae5b754adbb62b3aa349179ca6827d07ed66c13d4801c132c69c8603c2c" , f "0xf470a30be360ad845f5b8a5e8ef8a5ed9e145ac7e4856e9da1348cd4cdcba33c" - ) |] + ) + |] ; [| ( f "0xef9a93c98f41caf5389c4224f39b739c0e50624cdfb19d489070e81ce46aac13" , f "0xba658d06d53d577c439e3d4d3da71abbbe2aa230e1476fa4dc9cc6c9f4753b23" - ) |] + ) + |] ; [| ( f "0xa31e30cc38a6f61b552dc316a6814a7154e2d95c6bc00054d5270ad06ce7f204" , f "0x31a47105ad380b191fb57ed403a3ab6a495da6f3cce39aeb6995ddbbadafc22d" - ) |] + ) + |] ; [| ( f "0x511de0475defe37434a7bcceb3481c1f8c341855ce3ea540c29dc92ef3bc272b" , f "0xc0c515b2a5dd3c0f558da3337f4a9464f775316f6dcebf13538d2d3d0ca5da07" - ) |] + ) + |] ; [| ( f "0xaf533e9044ddb4206920d9f7a9b068897932b3b0f4fe36def3f09eaf59896017" , f "0x1f4f99bc00d5a23272b32cf6a0207259f0cfcf39743a2a88dc46eacc02640104" - ) |] + ) + |] ; [| ( f "0x88d71bc00d41e0a07395bd5a0603e71cf36cbff6790a540df238b72a03701d31" , f "0xed59e29d3ce2f927d459e215ef486dedcbe38bfa949773b32439e24b00898335" - ) |] + ) + |] ; [| ( f "0x2c6eba7c5a73200ff7f7e8a2ad508e050b5d89f75ee54ffea1b8afc834765c3f" , f "0x0ddc65d70dccd1451d4934ad236354dd63029b074ba7b6d68d2ac5051ac6dd3d" - ) |] + ) + |] ; [| ( f "0x58f4c59f0f8740fba7ba2b136694276951493591e341e5ab5ce71985d721a135" , f "0x1bcf64dc6983beff154bc62eafee72c405030006645591accd5e1ccc45969b2e" - ) |] + ) + |] ; [| ( f "0x1a45418c562aadf710c832b059cac019ecd04362e99055bbcd8462346536f427" , f "0x3fb4481366826b23009f7e3bf2413c78dd302d3f5d7f8657dac6a281dabe471d" - ) |] + ) + |] ; [| ( f "0x511452ab55088879b9571e94413b853f065036551868cc626995da3d13128b3c" , f "0xdaf3f4013a4ca43f542622a191b70dbf7d9d69637d10ba35f51002e760cb0930" - ) |] + ) + |] ; [| ( f "0xbd8aff3e0e5f558b846e06beca481e8d06eb5ab3ea7fd0fb2d8ebeae5c52fb3a" , f "0x04b111a7dcecc1dd6bc22a447e5b3f6671f25a6e89d4784d45ee2ab382985837" - ) |] + ) + |] ; [| ( f "0xea307f0db5ac75c7e0b60747b6c697a9029ad170b554512529b383aa70e41f19" , f "0xa24440b0df1001377a55728258fcd3b58aba9199e67c476d2423a7518118a20d" - ) |] + ) + |] ; [| ( f "0x14f0c1a3524394f18c00578ccbd0e56d7930098aa1fc464ef552bc0201f18236" , f "0x0993bdfdc7eca43f8bd443dbab433eb83f327b8461dc14238652923af6bb4233" - ) |] + ) + |] ; [| ( f "0x39396c94688acca8328e7192dc0fee4ba390ba53b57cd666c224ff32cc30ac27" , f "0xe3560d8ea893771beffb291e92fb50fe2eb949debb1a5cd583bda65906c7f408" - ) |] + ) + |] ; [| ( f "0x1f6cb9a4289da0fd5c47800bcdf43ebb24caf646059c5778a4c7b7406f77d00e" , f "0xeb3daac296da479f0b7385927fca46f43803e67486900461177ab93ddefff72f" - ) |] + ) + |] ; [| ( f "0x79fdba63ad593fa5b5637afcdb6327f6591265b93e38384b7665045e59b9d83b" , f "0x5529d156014ed5737fe01e1af5bd882b3effdb2632ccb74b2cc2284472ef203d" - ) |] + ) + |] ; [| ( f "0xf4cd3928418e6097f3799ab7d50013d8a642b00d42f59eb660f3762d55b69426" , f "0x8817cab66af80d77d04412ccab18d899f7e0f2d356e81650491a0d3888719d29" - ) |] + ) + |] ; [| ( f "0xdd5da592bb12e80eb3b13dad7a41e75522791128cc0b14a9a2cc9d05e151ab3c" , f "0x55e2ff426da81beb3c3d0797c2293e1116abb1c39717db5da4de2ae86fd85b3d" - ) |] + ) + |] ; [| ( f "0x9b0a646e3bfa5d70126dad748d53e2a7e8fb9627587f4fd68c45ad37cde0a131" , f "0x63f4e45b56ae376ba79e01afca126ffa4649d8d1a76646cd7b1be25ccf1d3426" - ) |] + ) + |] ; [| ( f "0x5bd48fc6a7ad9ca8a1bf1703c4275420ee252164a5a0ca010a8842aa743ca93e" , f "0xceed2aaf1f6092d0a8ed3a2fecfc65ed8d33ec155d39603e4faeca9f33161518" - ) |] + ) + |] ; [| ( f "0x76cca5140c88ddc74fe56fd3425fbb7f671b5af5fe70652803943ffb8e5d3c2e" , f "0x0836f6eee82bf022535c89a43d51d8fb28f26757eeea0ed5b2fdc9b121641d24" - ) |] + ) + |] ; [| ( f "0x864f21ae9b2a561abd4eb5d201a110a673e960894f193fffbcbe13f6ddd9bd07" , f "0xac8567a2299e557135a880a95db5702474a2372fc64b829294e4cd6c0fe31403" - ) |] + ) + |] ; [| ( f "0xb20baf4661b2ce9580458e919ed9c6bf365f9958e2abc035efc1bdb6607d3813" , f "0xd697a83dd9885fbc6b4b2aa06d3ba7523e71f4f4140e03e36eb0a1d7215c0f3c" - ) |] + ) + |] ; [| ( f "0xef004b01127f4c3c6c9e1cbf7b85960842899a3b6411ad65829d8fdfd7278d11" , f "0x5ef4e288d9d8741d302741315339cd2d69b4b4e0538b3a075e0da42c838b4b0e" - ) |] + ) + |] ; [| ( f "0x0cd0c1c11197e7d42c3d656f5135889a2475cad7a6bc9b00e93772181e71d63b" , f "0x1d1de2b4c98db0b9966ea06affeb3f4325a686b3218708eea945266c6f4bff28" - ) |] + ) + |] ; [| ( f "0x33339950fddb2a6f7105ff500ca8483cf1508900500b748a11131f9b9fddbb11" , f "0x1d21f511dd09726702a4b47bfb97a8be43f71b810cb96122985156e218fa5e3a" - ) |] + ) + |] ; [| ( f "0xa2bfc46f1cdb2180ca94db5ecdff76651652931e2ee94e2c8f7dd62d0797d129" , f "0x52ed3d4906d2aff1216d8a1e4a00e0221ceeb7b507136b57087625088514d829" - ) |] + ) + |] ; [| ( f "0x1652e1f41f2c19f533811bb24fdc53012747f97be9ebfd8863d628c7e0d61407" , f "0x860cbe3b0aeebe487a4e660478e472df61b4e5a858823233766bc5197bf99c09" - ) |] + ) + |] ; [| ( f "0x0b0dd8f8fa6ed836531de59ecca594d00bd4bd7708f08bd830c00a9d4841d53d" , f "0x3fa6426b568d10daaf07b3d541a73511e4daf31e7f54a9c46e99097089d5cf2b" - ) |] + ) + |] ; [| ( f "0xe2c7ecadaa8753eb8e9c1820fdb27e671fed6d88150130e2288a26aa8d25d609" , f "0xddd9a3f42f7b0e44b2a2d80647aed7a03ac41f3509cbef7a4f66f9b732e55518" - ) |] + ) + |] ; [| ( f "0x2320395a48d3ff92c9c1b18e29b10878eaf6b3c8c27d263265165be2959af91b" , f "0x2d7e99cc4f6d47a6f1bcc9c931fee5d17b3b08523492a3090ce865777275961e" - ) |] + ) + |] ; [| ( f "0x47a6f646c800f586e3f5c5167471719cdfe79398bbbee7eeedb51ae54c978d38" , f "0x56a0ac3dfb03ef54b0d9cfd8a856276ef390cf2ce5d03ef5cd22af4c35073114" - ) |] + ) + |] ; [| ( f "0xb26e3f62eaee6c8e5469f8674e10b1e967d746dc232bdd6b9b93f3bd1201332b" , f "0x5b03340d91c433f53880d1effa77b3fe35be92f8f824889d4786caab982d880b" - ) |] + ) + |] ; [| ( f "0xc4cc9413ec9fa53295781a2791af268a4418e6019640f41e036ec69dec08c50d" , f "0x81bb76fa26fd1dd0b94cff3584c0412e30e0078bd1314f727b1d4f9ba042cc3a" - ) |] + ) + |] ; [| ( f "0xbced47f88c546259e527d15ade14083742a56e81ae04c74a3d0979630cbebe26" , f "0x5a247ddda94bb8035eb39187e4d804d0ea947615bcfc3363f6f74d32c2839b1f" - ) |] + ) + |] ; [| ( f "0x558adcf7ab1d0f3131157bdae0edccf4e18e4660bcd517c4475bf9ea3c42e21f" , f "0x7134ed7920ef295cd676a807e7512e287c0a435f3995724b6ba5c36ced43290d" - ) |] + ) + |] ; [| ( f "0xfa2237c5059039a75fdce6c66d30fd4b032afca9a600bb22ca13487d92949409" , f "0x27e7cec6c1801cd0ee4c994854ad0fa374518904a7e8f9baa751747609745335" - ) |] + ) + |] ; [| ( f "0x3d8f37f25ee08b6cbec49cec060d9651a1397ad00dc518a59429d074d4c72f0f" , f "0xa6fd05e8dd9eaf999414b1efda65fafda3ab0b27675a0b0f7ba94660f7b67718" - ) |] + ) + |] ; [| ( f "0x55cb20a857f2af39618319b29eecb407c85461bce4928b01a15058a68487060d" , f "0x1e6e2eab018111958610e7b2e71a6f6fc6bf43beca7d3da750998932690edf27" - ) |] + ) + |] ; [| ( f "0x7bee1545b59e2bf93b5e3b03dddddf5ccd7700a49e6288ea191b4b855c89ca36" , f "0x1514c6704baf64e94210e61959b12dbc6050d339cde11f28a10118514d693310" - ) |] + ) + |] ; [| ( f "0x2878cae9ee8de3a13d005ff4c259922d14c33bb6e24b97d6136a7c7616b6c21c" , f "0x63c4f9844107ee57f900039c2b333c88b0dcb7d1989c5c7936e83b446bd7d83c" - ) |] + ) + |] ; [| ( f "0x67888fcfe1e1e44e552d0f5c89cbe4781bc034a4527c3346fbe07ffe15f12f2a" , f "0x4626d409b1021526406b093a97082ccfd43bb19a29790ce4953892bfa444f129" - ) |] + ) + |] ; [| ( f "0x3c9aa481596a1d9b94586c3460b02f112abf868fe051c30e2c452a6a7d3fd00a" , f "0xcd2247223dbc1f4d572f9d4e2d58d5dec0f88aa56f93eb9e3756990a61453c02" - ) |] + ) + |] ; [| ( f "0x36fb1a30d36a4c39da8c7da86d097ac392f5e9baa7b39400260376afc43f1c0d" , f "0xdcc328ed20b82bbf9a5427c0d6beeb71a74db5f8de4ef3e36134c58f6193c41c" - ) |] + ) + |] ; [| ( f "0xae6773ea50c81f85753e0b2ac6268e631a8985f39d3ad2477184958cf1034407" , f "0x3b6953db6156366b8047374b852bb5d6a8839ed27db0bd845dde34b125c1c538" - ) |] + ) + |] ; [| ( f "0x3b71fb45c6aeef1cb2934cb3a1c20cd088a51a890a6cbf09d91ff25a8ce1c328" , f "0x13f636ddb4e91d34161a8a6eff63635774703cae99b06d66b2baa8dc6ed48929" - ) |] + ) + |] ; [| ( f "0xc48faed9a6265ff217291f0b8ad08e284dd65e2d7801a567e42ed27842113a11" , f "0xcc52c2f065a5a6f061a20e1b9c78f18fd95a98a1af99a7322489ce67ae460a3f" - ) |] + ) + |] ; [| ( f "0xd967bd14289882be3a5d63fa1f6a2e2beaa4228f691904d89f25b161b1865022" , f "0x8521370efe3b711db343ba287f0aba262d5809597d95ab4b08ff5a306fdd1d02" - ) |] + ) + |] ; [| ( f "0x8ebcd4afe72806d97aff3f96225643780e89f6564e03e6acb2c123ab4127a401" , f "0x16ecc82f2e07ebf1f47e53eb73fa3a16356046ebf068b329f47e1bd7a3bb852f" - ) |] + ) + |] ; [| ( f "0x69efc7c8f27ded72c7f0070cf4953b4f70a387f8ee40c144765328d46603e822" , f "0x486ded9359a09d91c1e889812dc092703f07983e3021dd05434bac49af908d05" - ) |] + ) + |] ; [| ( f "0x235dcbbda235f982e644da1f16c3d1c9b443a3af4f130d2b767fcf4cc4f4cc20" , f "0x182d48ea3405ccc5d994479cf017ebfbdca63be5fdc334874929645a5c57a836" - ) |] + ) + |] ; [| ( f "0x7dd4da5d055588a9b4a13df7ba75fca187bf14c8708189a80e53a91a0c36c938" , f "0x2a7f2a1c13874383b757d087a96d7fc9672f4c7eaa579098fdd7da30a35a2309" - ) |] + ) + |] ; [| ( f "0xfc8828b5d9cdfc6b846413f4ee09c5174f50fa4587167cc825b6a5ad9035e929" , f "0x45c2eb7f1b93b8f98a4c00504b193c3c4ed6cb966e19aee9c22211638b327403" - ) |] + ) + |] ; [| ( f "0x4a64d698155aeb7213bc81a2e1f0d147a57210a00c3e4ad530798d4b5ee59431" , f "0xe9c608ba8f01b02ebfc0dd66d5bc09b5625d9caf1ae530c391721b46a5acf03b" - ) |] + ) + |] ; [| ( f "0x40f57d201e523ce112eb2469205378240a8fd05de9b9b3ee61a7e28a6688cd04" , f "0x07f486e729d367791d43c3e9002dcf2aa3ce35712e11765a13347f3abffe0d2b" - ) |] + ) + |] ; [| ( f "0xfbe8264ebfc4ebc88d248edf0318683dd0c71e458d395467c4d2a99a4a2cf22e" , f "0x7040e8480975a2465840cea77a6545230ade8a2cf732b3536c17732cc2c2c612" - ) |] + ) + |] ; [| ( f "0x18a1146de8edf123e53caebbb1f512ac309cdcdd096178b26cf929726db2373a" , f "0xa73230439080b0b420e5956c8742568a4aa5128bf0dba26bbb105a35486deb18" - ) |] + ) + |] ; [| ( f "0x44e688d799346bee1ff44d512fae80dc7235cd327906a396b0b52304ea94931e" , f "0xe4018393ef563014c1e12fcb23c8f27073590e526af3e1eb60d45230208d5e27" - ) |] + ) + |] ; [| ( f "0xe594fbcc40b7a358b306ad3aca072b97036ac4eb4c175bd60900830ba324a22c" , f "0x6198bd6cbd80549b4ae7bbcf2ec82163f67755874b9174276adc211b53382806" - ) |] + ) + |] ; [| ( f "0x43f4caf4e5d8cb8db490cc6f58d1c0feb0a729fc00fea51ddccbc36b836f5517" , f "0x055a7688ec38044d7506aba142f868e68ecdf7eef079b82a6a63ff2fe32cbe28" - ) |] + ) + |] ; [| ( f "0xba65afc85e6e7e87a076476cd6a92c517b398bd6271a40e3c14d574a0628a527" , f "0x702c07b84b32204d05de13d4e0b71d6692a1cacbce5fa31e2ba95b942fd08809" - ) |] + ) + |] ; [| ( f "0xc04a236337617f79f8a106c78f13a8701c53f7f438aa9769cb9acc703e02a007" , f "0x0bc47ec4da9dd0ef089946c4c5544e4d8a1e60901a2bdddcee7494bf5e40c42d" - ) |] + ) + |] ; [| ( f "0x6cef26ed95757a18d200cf6efec5c5b00de71635dbacec0284711ab995108619" , f "0x86690594352fe1ecee358cf4baa164af020c96d5aeda47eb7b20e987440cd11e" - ) |] + ) + |] ; [| ( f "0x0deea1faf1ae24c5b2f29748649d9483f6f6d3f2341f1a0b31e88c20a0ca8920" , f "0x9e06971829c52a99a509e7e63ac2f67c63ba21eb3d876559d025ca20f2b19727" - ) |] + ) + |] ; [| ( f "0x27370edf34f7c707e4a5fd2cececd414e7edc1cc806fe68d2560cb61162ad306" , f "0x13e8fe06e552ec7df84c904ed0237b2b19f7bcd678aca4f4f7587e75f9ae501a" - ) |] + ) + |] ; [| ( f "0xc77f6d5cebf7298789eaa6af9e528de4f7b0aee2cb756992bfadfcfb8ce55c15" , f "0xbdf6436b9d4c15d9fbf87a46aeb61016e345cab90ace7c2e09a9d328ec6bd607" - ) |] + ) + |] ; [| ( f "0x4fdfd57b41b8e25d6def87747e6642fbac271433381b97cbc80303c8a29c0312" , f "0x9ee34333e01390fae247dce0987b6a3cd9c72079c7ff6163a9e3b7bf16fd7130" - ) |] + ) + |] ; [| ( f "0x5203357a0d4cda19a8d6a57e1e557da4c0ef24573a5ccda0cf3f11fa67f62524" , f "0x7a81d08cd00b27d33c2c518adf5b421fe2fc3d4ee36a4b8efae8282e8142c03c" - ) |] + ) + |] ; [| ( f "0x43c5b7d67a2b81dc7767eca05de9eae2c8bf6c8c4081163a12580099c298f315" , f "0x86314e307638d7827058c15aebc3ce79930742b06e6626b0fb4ea67f67dfaa13" - ) |] + ) + |] ; [| ( f "0x6e7558eac553d3ca100113ce5761ddfbf68fd2e629ad15149a35b714e87e6105" , f "0x4942e81a0fca03c8b97f4437b6f11c80ef542198e79ecf93d1024ee1ac629012" - ) |] + ) + |] ; [| ( f "0x897e460370fb0ddbd1669c65b9ad434492e95b69e48c4ce4ac8573b53fc0a807" , f "0x3fead2c4f4a6ef4428bab3ab1557d652d0acd239ec1fdbb300de0c7f1f616027" - ) |] + ) + |] ; [| ( f "0x27da9831ead1f6b83e2faad5c99e5af8f770b4081cf9027b9315820c9c6be013" , f "0xb679615a8a0f77c302fa4bf99aa47bf8dbe22027b425dd6881bbfe35c3a69030" - ) |] + ) + |] ; [| ( f "0x2b814a517e6bc7513e6f6be347a15eb9dc868af9bacb4c8a16b6a8d0ffdad017" , f "0x89952275d15dfd54d40d736209e8756e8c3780d06c7aed76bd0371f17256ce07" - ) |] + ) + |] ; [| ( f "0xfcca769baa60b38bc476e78c8503e0635e4bb83947843512dbe3587516b9d23f" , f "0x730e9cd2e184661a8259ecfe563237ad61b18179e94b897ca03219ea3439a93c" - ) |] + ) + |] ; [| ( f "0x093ee7fa4f06257674d9a9aa289f514b221915d7fe0e4287393487ecc2799635" , f "0xcdd4c39ebd4d6bfd85868181a154e1c0c43c91c39f61265efe3902ccded8551b" - ) |] + ) + |] ; [| ( f "0xb651a1ab6215df22e74c2258a1776a8afdc3c57548377f789daa4a2c95246301" , f "0x2db8fc5a9ced96f818f04ed7435f2b45a7d721b58a2b55920fc5de64b7549b2d" - ) |] + ) + |] ; [| ( f "0x8bcb0dda13572f1d4432eebae537785a2533bfb03f7a841d586de6a5808c2736" , f "0x7631cd35ffa7184e9710b06bc713576e51e15d6f9f3c8bc06f5f871b41e3ea32" - ) |] + ) + |] ; [| ( f "0x9854f555f5e87c10ad5ce05f3fde223fa4eb78f03f435eeaa25dd666a6ef6701" , f "0xc157ef56fc77ba11cfe7cd804569756bf1e82e2d6bb88a1885a129f8021f9007" - ) |] + ) + |] ; [| ( f "0x8a41a0f531d2d58f0dcd4123d5062044639e435ae2128e086e0bf236b8db9c34" , f "0x6cc22e5faaf596cdae60f7e1d9bf9981623eebcf491aafffb3527fc22b12be36" - ) |] + ) + |] ; [| ( f "0x06ccaa33b082207a1692423aadfbcb7183173faca968d4b0c73ae2d863f95334" , f "0xdb7ec004687668aa53a6945d74673713fd7632e3bb91ed26a8638fa4aaecdc2d" - ) |] + ) + |] ; [| ( f "0xeb83b640bd95dd52d3f232912d50ca1790e52125fc63455ca507ba752e1b533a" , f "0xf51aff3b1057df63ccc2f004f2160e505c5d592b7acf07fc0e9832be3a27260b" - ) |] + ) + |] ; [| ( f "0x29eea4c0a6a0023d26afd77a1e6301662dd5a02722c78915ae6b762b1bb30901" , f "0xefafcb305aebff7a95d82bab65575dfc8f3b21fdf168857c70cefab6322c5618" - ) |] + ) + |] ; [| ( f "0x6ab3f53c8048d194dcfb335bfc1642755042d5ffd55e7cdf470cbe0da2abe63d" , f "0xb58e7a88fe901b0b8da690d32991f50cf5a8b5de62a93b88f68e49255aa75a01" - ) |] + ) + |] ; [| ( f "0xfdbd207b12c524f41adbd4baf6a2a821e0bb22016f407aad02cb6b9c82ffcb17" , f "0x314861f8d6ae781e30800d64300c39dba166c4a2469ef7725073c7c09267082c" - ) |] |] + ) + |] + |] ; [| [| ( f "0xfffc5733a035fa9547068104fdbd55200d2a09faa22524830662ad0697267517" , f "0x119dd29ec4b427e4a06906b32c2d8502e790d9a954036403887f1987978ff815" - ) |] + ) + |] ; [| ( f "0xcae76c5acdfa829abf8aff6e6afd1a3ab50065d9d5696bed586d3ed592639310" , f "0xcde104a89402a422e2d57b6c2aee7baf2d40a948ad689582472e759c7b945b34" - ) |] + ) + |] ; [| ( f "0xcaf566765ff021dbe4848f964e9d524ccd16ad6ca765d5d6e3706ed09c1f681b" , f "0x5c17f626ef7f2063b5379d235f4e9acad1f576e221e32de06828a8212d78bb2a" - ) |] + ) + |] ; [| ( f "0x7b45c9de6404922565f44f426e6d73399ef7e2484c7a2bad0e68c137c32a5420" , f "0x3b57dc199e3df80e5e6de27f69df5edf01b3f68b25d73afe0cc5c6a77a400124" - ) |] + ) + |] ; [| ( f "0x460e7c450fac3e09be4cc86ab8ac758da8d97c9d28f6e858f2078da3118a2d0d" , f "0x8ece630c30c8dcb071d7b1f8184febb9d0bc520da961e0190433ca9cd6e1f131" - ) |] + ) + |] ; [| ( f "0xc374005a67934f114392a7eb0f0afb3a41b2540fc6817bcfde5004b0a69c7b33" , f "0x7ff1d4f6e0e3f1ce04b495787dbb78c9aa2f4ef04b6ebf167fbd762fd559ac05" - ) |] + ) + |] ; [| ( f "0x10c5babf6569aa3a8ca3cc25a2fd9f04f46e3afedf6c3e10173cce42b85f172a" , f "0x6c741a6d41a6b39f86643f26eb90e7471619b55943f7281aa60654ac51481102" - ) |] + ) + |] ; [| ( f "0x1ade812b5a8e5de0721e14d3319de97deadc3d6f7ed79ea2aef0b7ff19671a0e" , f "0xd75c4c6d04048065aa490a0ae9ebfd5b101c4b0dd0acf6a12a99c7ee1467572a" - ) |] + ) + |] ; [| ( f "0x5999bd575e6d3bbb0e2eb2e4730e5b798d24387fc37e54940a752896eedd7a09" , f "0x382820b1084d4f2e7cd1ff422f8df1a01abdfb12ecc963afba6211a209866313" - ) |] + ) + |] ; [| ( f "0x0a1a37f0b99ddc7659bb87be179fe8c4d1cb49b30f4ef642cc065a1c8f0f7f03" , f "0x1866a1df3a92d7669fbad683b9228115247c89cb8181509a1beb38961fff2715" - ) |] + ) + |] ; [| ( f "0x498a95ed06c0e016c9e208bea54c15f87d95f495aa9307b9538a9640543dce1c" , f "0x18aa7bbb2bf3d4a4921514394a92d65dc9bafb049e68964315a7b38095a89c2d" - ) |] + ) + |] ; [| ( f "0xae5cc7e74dd62105a93d6d2e6e52ef0123502cd3fd722c728094cece8f217506" , f "0x87f7167f83c1badea9855cf72fc070e5854517eebfe9a0b5000864a7479a8228" - ) |] + ) + |] ; [| ( f "0x814bc1fee927524e01eb1e933858e9bc9f1550c4b534267962dc00d248031f25" , f "0xcd2276b8974a1644fc9ba1b07bdd21426b0faa40c466cec3dd1b8159eb38a705" - ) |] + ) + |] ; [| ( f "0xf377aff9fc0966ef46973b175e8f2b64697538e4bbb6f0c0328416a4e84cf902" , f "0xb4f1d6d1145bf53154b56c664645b1a553d12d7549ac3c627004e160276bf302" - ) |] + ) + |] ; [| ( f "0x2245262299f5bde9e6196003d88dbc36c4ffe11e191e704474037ef23965f61a" , f "0xd5cf3590860344fed4f600e37fd7f652b802857430ad5a2c419afe9a26f1bd11" - ) |] + ) + |] ; [| ( f "0x8d4e883d7be515857b04190cde9c9542fa807415d1ef1ee8766d64e2c5794d32" , f "0x0d43c246fa311753ffbe41a10e7ed094610df2029beaca59df9f1643ab53a22b" - ) |] + ) + |] ; [| ( f "0x944352e8db4b4ccae0e0e47758c3610f183f33745365b55b65fcb726ef1fa616" , f "0x7c8f012cfa015cd371edfa4b0112c9ee40f219aa738ee11f938ef2069ccc9b1a" - ) |] + ) + |] ; [| ( f "0x180c800a10e621d25d77f758e9580671bc0999fe0510ab907650c115f807a419" , f "0xfa041a21f9b96e7693cf56aa8871dfa77f247745211fab566025428df9558622" - ) |] + ) + |] ; [| ( f "0x6936c52181b010666c95567590b2c0bf5c5a2303d79ae52cdd0e19c7226d4000" , f "0x736ebc76c4e5e9f0b9f7079071ab02de4b8aed1be239d4531c3c2af3332b2f2f" - ) |] + ) + |] ; [| ( f "0xdf0b628eb286f5d6e55d686081201c342d9e0476084669830969be362f537f38" , f "0x7dda181dc7b2614559fa3bed729ff092e3376508ceb60d67bb74e263ecff591c" - ) |] + ) + |] ; [| ( f "0xb22ea7d60343bf658c52c8308a7e3f6f5777a3dc0716a59a73cb6660e5ac4f1d" , f "0x8c167e414db210dac61660e50e2959c6f1cc90845fd5ee9ebade2fa00432d238" - ) |] + ) + |] ; [| ( f "0xe69feed80f76107904256fc39e6c1fb5234ea190b562aba82fd3a63a809b7201" , f "0x4299b74702c8e84034f30db18b561892ebb4dedf51b3c26712519b5e49467014" - ) |] + ) + |] ; [| ( f "0x270463c03e69676b95fc3274c413afd89d788e57fd84837165ef126170f0991a" , f "0x776f59cb80c64d30787c8afd8d46ad28441c5d557f052242444626bbddde3436" - ) |] + ) + |] ; [| ( f "0x605a93b5aeff12c747e3158f75dc1d41998ba579ab338d98087d460b39205f35" , f "0xd873bb4228cae455a96b63f60165ba3942b536815528f4ac4ceefe8fdf62f408" - ) |] + ) + |] ; [| ( f "0xebb6eabda65fb4a0c399f709e8b8a88100baf67c0a7b3312baa428f35ea27f3e" , f "0x5199734212013370635a46818c289939825d1dad185e1753fd1a51dec279011c" - ) |] + ) + |] ; [| ( f "0x5add90de7ccf35e493b703c9c514f5dcd54dc93749624c1c6c20185495619e24" , f "0xbaa0f16bef9ce24203c081bac47fc43aa1bbc036a4d8decb1c1b0d34c4437629" - ) |] + ) + |] ; [| ( f "0xfb6d25325d851e9972900b66fbf27abe90a3ae5a9a99890d601552cfae9d2c33" , f "0x62f5a960ba6b03abab9bf797d5a508cf4652f0960c234dab9ba5d14b330f410e" - ) |] + ) + |] ; [| ( f "0xa0af0fd59c61c35fd6b23cd62b4ce4680dba6e65fa74a100403f47a984a0b308" , f "0x4c7bdb95de66c32f59e621538b937a34faaa5e9f12dd180a545e8c45b3613427" - ) |] + ) + |] ; [| ( f "0x379afe8fe9a3674e161b1baad08891910ecb88ed33b7af42273fa09fe59ebf17" , f "0x41ea96ea4fc428a88651eabe78c81344939bdaa31fae16733b0abf0b1c1ea02c" - ) |] + ) + |] ; [| ( f "0x68f7657719a652f667e60e6b840c12ce31bc49eec37848a0674fb8fda8cd6d2f" , f "0xe7f5c8f095687cae8de42e7cac1ba4101f042c1e212e7ff714f07ffea7168a30" - ) |] + ) + |] ; [| ( f "0x8224134b6b536395df0d1b5f9bb2468c9ebb6358715dcf4ace7a5699fbe96c1e" , f "0x011565e2279abe8bff62e6a5cc951842a2b1be026f4dcbc9903dd4e250c18321" - ) |] + ) + |] ; [| ( f "0x436959586d5e0f6ce9cf6ba4586cc577ace44790dafaf5cae3aa92c8f4b40736" , f "0x3915ecf20bcb45ef6b64990918315a57e697e4d2f51e47a605dd429e83d1153c" - ) |] + ) + |] ; [| ( f "0xaa8a120c45689a349c4f1a1558709b8f04abd8c2bd865c3f314dd5fc3e562c3d" , f "0x173ecfa6c244d3afebe640a3187086f240bedc95fa980441086ce02736692c29" - ) |] + ) + |] ; [| ( f "0x1ab8cad604f0af32f4d7009a73fa0e46bdc62374ebec7b7c72a19e105f23481d" , f "0xcda9b517ac6efeb885a0c72dfe74fc11a7f0eacc551319b77e0be2c4d2fe8813" - ) |] + ) + |] ; [| ( f "0x5b5ff6b7dc533660bdc6214733205cc69415b7bb3eea67fc358a3a4f7679190b" , f "0xb4927abe272a0bfdcba5e12a8d764cef40d03b88387631f6d5944320c5171308" - ) |] + ) + |] ; [| ( f "0xdf3bbe9e49049dc483819e43bb30d8f3959422a5436ccbc6a2570130b6d6d13d" , f "0x039cdf09990c7ea78bb44c67b0ef6e1d5e8e224c66beb28b4acc2a1f97106e32" - ) |] + ) + |] ; [| ( f "0x6475c4f932ae88d6d2a7a4f8a3c1ebe05724c97e7225932ae03efcb79b57683b" , f "0xc6a63b546bacf0736e3a45b62ad7f28613a8570403697cd210967c07ce471609" - ) |] + ) + |] ; [| ( f "0x5c5e7275c272959c5736cd1f5e168bb9c58ce0e609594af6987f8ddbf392c330" , f "0x26fc3065a69f038c38fe1c277c07ebe3e1310f47bc18bc8c14247f9b55d67c1b" - ) |] + ) + |] ; [| ( f "0x8bf334d0cad93061c096591711d74f932e967210072dcab33b9f4950633cea3d" , f "0x149fd5929042f21fa993f67146a77a03fbccc98796439dd5f5155734e2ec1b2a" - ) |] + ) + |] ; [| ( f "0xa93ec7419bcba33b00503d19ae491dda63bb4f08d434c9b5ade4801d834d753b" , f "0x7c0d90496cac29d34a933def1272e589347c746c11d9c42c1b3ab4c3564c3724" - ) |] + ) + |] ; [| ( f "0xe7e30f6bee0d9e2a5be0670f7e9e9c05712c493b793b0c5112f5a9f4580f3425" , f "0x15b447e5ac54e8af1b7aa319410e048081b01873e18b2dd54086c8bd911c2421" - ) |] + ) + |] ; [| ( f "0x7043c66013094d3fac8d5999d57ab290a36927e928d6eb86155f976f93fa9d36" , f "0xc5881bab276036821ccbc8a2b5242343c3cd2f04681054ce397677af4233b11f" - ) |] + ) + |] ; [| ( f "0x9d6feab1a0de9bcb5e311fbce21cf551d14e8868ff1a49b87de58bee24422a00" , f "0xcae56548774f5ac39cc56b226aee24bebb5b26bd20f02602e61b531fda89dd2c" - ) |] + ) + |] ; [| ( f "0x4e1afb0e193d842416769108a5f39aa339edbfb59a4ec5f11bfa665e35b0621f" , f "0xb54b452aaa2279efc2aada2ce3b0cfbfe09d6431d929668ff63d6900d2eedf34" - ) |] + ) + |] ; [| ( f "0x11e9cf066015fc3d3bce2e7b241210496017b7e19bcc745d7f47c8738a492513" , f "0xdcdb0c0b1ce93936f58f0490f9f11d454d3aacfafa49aa15cc9126ec48b26e23" - ) |] + ) + |] ; [| ( f "0x923adcfb24ec934090c78c082ebba0f7f60c41b3d08e17e3c08b1086b775b80f" , f "0x513596bc790f81f49fad0eaacc9a19b14e833fffab00a7207bc023f16de3a830" - ) |] + ) + |] ; [| ( f "0x6a0f2f534a32e8de3e5ec9f1817764d50a4a94122a922283e812d8b51c895033" , f "0xda39d2ee3a4bb3c727b17f1939b160ba2465a13aa224a2b1e34a97f0a92bee3d" - ) |] + ) + |] ; [| ( f "0x02233726e9660dca36728561fd9eea740dd4697703cef5f4a0c15b179ba4530c" , f "0x6332c86ee1d5ea5a53409b6cd8cebd3c946f629487f90e453cafe8d36eadf433" - ) |] + ) + |] ; [| ( f "0xe69011f49f3527434d5fc37138e00af417dd624f760591dbee7434e52435cd25" , f "0xe6885cd381f45acc816007d1ebb94dca5f42ae068dcca62d5b7534bbdf8ab935" - ) |] + ) + |] ; [| ( f "0x5d002f3d6da4a60415d5634a1fd114214e14c46ece995ff06a3414169bb32837" , f "0x21f9746b7f567ba6c5406df6778d5ed10cea2d5f96a2a53d7a74bebba619fa38" - ) |] + ) + |] ; [| ( f "0x14710569e7f12898f7102a4167305336ad98655ceaacf6e1262e99cd7aa9bb2c" , f "0x5d204f09ed3e47cc25fc168bbbef42d7e59f85f82b601b58c8e010790858621b" - ) |] + ) + |] ; [| ( f "0x520645566414bd12ef63425b689767d1c6840c16626c958008f89de9646f993b" , f "0x0540384d2bf2adda8411eaa961ca038757d3e8177cb636a9588576177bae760d" - ) |] + ) + |] ; [| ( f "0x8419c2ee0083bc3d467209ffe23d7d5b166713aa6265465499e6bbe76674be18" , f "0xfa5a6a50c434662407c38e486a5aa74148c487b52610c8bfa7cbb3d091130130" - ) |] + ) + |] ; [| ( f "0x8a36ecabdd496df7630f867868ffd8813098e61395b7ce859bd18ddc2afd7131" , f "0x4c71672f994545f735face6bdd236c24e2b755c934dccde37419a7c2b2c49619" - ) |] + ) + |] ; [| ( f "0x7a6ee1f13619b415baa0b1f12c53c31825a54247e47115fa478e235ae2fe9803" , f "0x0c07a302b55bb9f143aa153c83d66aca2e56182bb780d707b2fc8bb56b00e111" - ) |] + ) + |] ; [| ( f "0xa7d37d1d0b26d6c80705a6abc8f66999723c99542b468e3dd339f195f3893d37" , f "0xa197e392a72dccd44c1f95f48a412dfe8f72eed4c3ec00cab2d12e3a1c6dc00c" - ) |] + ) + |] ; [| ( f "0x2682cc203a50b8f54b6d7dec11df0ebe6f2a32d860f6231562888790b79b9d11" , f "0xf6448baf03f981b296f23ee663f234f5235ef2ff779e0881088896848520652b" - ) |] + ) + |] ; [| ( f "0x30282027d663cb6b09b4dbd53f313a4fc25fdb162c8c64336a331482b9df6700" , f "0x8742f58948f6ded82c9c26e47b88dfe677dc820ddc6c22088917db017b78cb2b" - ) |] + ) + |] ; [| ( f "0xc6a29969ac762aae4fdd0d343ba71996133c19ba67611ffd9aeb0300b5265427" , f "0x727294c26fc0e1f395d256d11c206b498608419fb0e31bec883d1f210a3f812a" - ) |] + ) + |] ; [| ( f "0xefb6dc581b29ff757c5d6ed7b06acd8e2c420bd08230009fbc34334b8e268228" , f "0xac4a3d419ae906da993a3248031e95057955ad328c5900228ab36e4965077139" - ) |] + ) + |] ; [| ( f "0x8f28cc89d9b8ac9ce35d60b356b116ac43c5113250126d0c9264345b0d2d5833" , f "0x458c9159f1b092aec4f7cc1201868343bfb5cc9663dd02e6c90b937ea6578b06" - ) |] + ) + |] ; [| ( f "0xd0fd6c252e4212e3ed01297167a1a35d433e11e5e704c7c6b0db6e12ab835d14" , f "0xea0233df38440ab0974b5fb6168ba54a357694f224430768101ae4b1e4d3d905" - ) |] + ) + |] ; [| ( f "0x84e1b31ff95217d808632018949e63d9ea7f8c93c56033379177ea0194ae7d18" , f "0x82bbe69b47e8f0a6b9b05628b6e45e6d1b4fd0769b1cc59f312d3401b3c9e426" - ) |] + ) + |] ; [| ( f "0x1d3fe18a5bc652c1fb180f118150237aaf5de0a250b678de0704c6f915f0e016" , f "0x38781e3c3569ae40c8198f46203edaa4a3bb946948b3b88f07094d45346ea515" - ) |] + ) + |] ; [| ( f "0x40a5df3aa38b682a2a9ca22304945a3dede4e2b2c2d2c37258937f6aaef17420" , f "0x5207b170cc9fefd0c826377bbfffe1fd92f082b2216228f9b4390c399fa0a109" - ) |] + ) + |] ; [| ( f "0x42b949cbd6c6f16f366c52d33f0c288347823d2a7d766c6ea7129b62ccbaee21" , f "0xd626c44af422cb148bb5166d3f1616b8ec52bbc1a8c94c3c49c20a96fa64552c" - ) |] + ) + |] ; [| ( f "0xce9eac7f7faf2bf53dcf4a96767fab6491439832df6f2595297a0400b252ba2a" , f "0x389e5176cf7180b7304f98fa3c7a1eb103fc24bf0f97eb7aa2e99a6b8d29830a" - ) |] + ) + |] ; [| ( f "0x8eb7975775d7612b96ee96cdea8ba86f37cf562e36b442ce70aa668a2ad81c20" , f "0x4d180382fc6b139f0c7cbe2e7cfe5411bfebd1f42f989f36a80bcacbe409352c" - ) |] + ) + |] ; [| ( f "0x066a2315e99e280b1d78d4fb38797b6bf575c9b29128fd3a2f49d7cd33f17e0e" , f "0x8a8a8921113947d02bcfe1192988851f321b3727529cd056bacb7951cd72f50c" - ) |] + ) + |] ; [| ( f "0x65b7e115939f3fba7b153508229f13a17f19dd3cd61d9ff974dd91c05773cb39" , f "0x3fe94d53827b6a1ce2c7092c168511f3b7b7a09e069310aa2ecc4f74727df61e" - ) |] + ) + |] ; [| ( f "0x8fe11c600b72935f38330c01ff87ef0f9c4784f11c586a2921dd2c1763727513" , f "0xe9ec7a50cfe23a0b05f82f8b205c3544c1f0fa1b420e3505ceb2c508c0db9d3e" - ) |] + ) + |] ; [| ( f "0x396962e36c4e1c04f6ea3d429085178b26d406fefd0ae22bb22464514327b538" , f "0x07417ffaae1d99fe7f0395bc59f6d9862a36f32cdf7ac42cc51790f43ba2da36" - ) |] + ) + |] ; [| ( f "0x252fe788f02f41cf265d1c245415614ddb4dfebb61bb14095b7edf15131d990d" , f "0x46f74e9e090a2e3fc9cdbca598e490f20d16f6722c196178274b58b4f1e7e922" - ) |] + ) + |] ; [| ( f "0x7fb747b713b46fe00054f66f9ade637d8f117d68223ab5b307f450801c38c906" , f "0x9bfef61e0f58e63219a0e3960ff914f635c0ea1faccfcabdfe07f41eaf3cbb1b" - ) |] + ) + |] ; [| ( f "0x20e6afaba54322147ea010fd40170e20918e0e84b09dd55b5bc179b2aa9d1408" , f "0x80f5f4ab847a25f2debd5df390505da2f638d84b361ab501d03718b8a1d43f2e" - ) |] + ) + |] ; [| ( f "0x8cce2064d9c1db959d041c3d207c4f4c8b8cb5f2c54901613fd1a202f209a91f" , f "0xd3cae6bfe1801188f01a559493e21d6d986b6ea07fdc05ed5cb98f03d4267e1e" - ) |] + ) + |] ; [| ( f "0x9d1a14f11845b9d9728589831bb1fe09db7474385bb93fe5c219d052a9b8c522" , f "0x3ca3fe96db4f48ed3b8c7cd4faaccb81b09d549abe6f945af8265772690a8333" - ) |] + ) + |] ; [| ( f "0xc56048ef171a154b2c59225e664ebfbe16090294a20003d9c1f263e591157f05" , f "0xffd127783ebc56a602f14f0f601a9a82ff0b885f38878cf79100027809e7652d" - ) |] + ) + |] ; [| ( f "0xc82b2532ce1c0613e46aafe890d861bcd2ecb58cb22972bcf5bfcf10ede1cf0c" , f "0xd31a35e089626c008970955c54b421ec8ea899cfc507c81facf6f78227f9e52a" - ) |] + ) + |] ; [| ( f "0x03514789c0a6c1a35d391e1fa8c0ded0df5facdb4e613a2e35d6edcef8a7a53b" , f "0x7bceba3645bb4c29938a28edf0f53d07f648b56aee58b3b4b900860c52323d38" - ) |] + ) + |] ; [| ( f "0xe71f3904dbd92e6f9c8e090e4ab8eafc4cdc8058a23720728cb51a6da5bd4824" , f "0x614ec06fa1bf39bf277da30e815893fce12e6d542b3dc7894db84033cbbba408" - ) |] + ) + |] ; [| ( f "0xb9abb51b87cb4ee5a541f54c41c7223078dd03b13a4d9635765c9abee048bc0e" , f "0x315b1bcdc41b906f6c4aba3a0b14b35f26eb79164df9c82a510a740b7a344b3d" - ) |] + ) + |] ; [| ( f "0xe7dcbf928b0deb81fb032966a7cec774eaf9417c89a256f0c31a4e156baea437" , f "0xa1e8faa3ee0fa0c58dc08068ed05d23ab19fa68ce197bedf21faf1769379542b" - ) |] + ) + |] ; [| ( f "0x32f6a0c896d4ffaff58cd8565fd2d19e3966b2346b7a23e19de85579401fc913" , f "0x4d063eb07b7ee1ac2a3247b323bc8cbc8f865753f92f38a7f834792dc4761120" - ) |] + ) + |] ; [| ( f "0x8ca02a26f051e1705440eabc01a91a526f7d460d7d752e8b7c3c11aa105e1109" , f "0x8db4d1ec6171bb3d2796ccc059062078ddb4cdf8e11a402190380abcb8324d3b" - ) |] + ) + |] ; [| ( f "0x709a2a2ae24e2f8caf0092795559b6a7182934cfde7cf1efcc6407c30aaf1f1e" , f "0x763c8ac46532aad5e8228331b5218faa84f41184cc39fd6197e0950215d76411" - ) |] + ) + |] ; [| ( f "0x5186def9ac2e0b8da154b1d13029c83ffeea0e32f4f3bfca9c6469471fa5ff05" , f "0x845cc289b52cba9a7b20b4f2e8a63db6b96065e9badbea5fab8f5ddeab412628" - ) |] + ) + |] ; [| ( f "0xbba9b02a15d6db6ad099ae7f896dbbf5ab91b15bc70af2c607bf67762dfa802e" , f "0x45346d588cff992d3f4cc03ac71cb213fa7fd607b050e1911cbb279068804804" - ) |] + ) + |] ; [| ( f "0x11b39d42ebe2435799635e8cf18c419ec83e7dfed247017de334142165d9ae1f" , f "0xf11b42e96de5b9b6c358c6ef773d6b72c87acda02c50144e01f7ecaf397ad836" - ) |] + ) + |] ; [| ( f "0xd4a99d4e2ad40dbe4aa99b4c3753e9c1079caa44a35c58567775022037e0162d" , f "0x2098b24eef036edcba9fdbe82cadcdc7efbdf38c1de12335fccf6bacc7c97231" - ) |] + ) + |] ; [| ( f "0x509e6b67d4c18d5c4dbf6db797db3b5adc7eea9dc23fa229ea8f56e5c1f68212" , f "0x1740fd3f94a9fe2ee9bf9159d62edd35a1518e893544d32be7ab4de4636f1f21" - ) |] + ) + |] ; [| ( f "0xa76b7c23b97bd2efb8a183380656545198e312dcf6e47a4eac82d139642a062c" , f "0x8539ceb1f03dcafa19f62599954edac15e38bde92f428dfb61d008fb7190bd2a" - ) |] + ) + |] ; [| ( f "0x1c3c3b4fa98ceab3bf891272984e770814616160787bc0ce74007fafebe8de31" , f "0x85be8e01f5ee5f390faf7fc626adeade8402dda7b38843b04dbbe98174fc492a" - ) |] + ) + |] ; [| ( f "0x8a1f1ad986867027af76e553755d97aaf2d8f2d952a41a7b2f856c1dbb3dba2f" , f "0x189183e0a9403bb1c9cd89a9f5a41f50c05a7b367e3c62234f0a85a03fc9aa19" - ) |] + ) + |] ; [| ( f "0x9b1809b8299b36f6de547f9dcbbfb63eab30927e712aa182b1cd2768c7902031" , f "0xeb3e759b1152e5f472d6ee962712dbe2e45cc5d3c4ea0fff44205b3509e1ee37" - ) |] + ) + |] ; [| ( f "0x477a874845d79192914c012a9aba29bfbbb563949608435a46140d1ee4766303" , f "0xf589f2927d705988065e7755a74565a7871dffb429c9f214f2205750fc10a700" - ) |] + ) + |] ; [| ( f "0xc561c965ea43148583a4415311c6ab085872495ca100731fa1042cf406669c35" , f "0x1e3cc1c7926de309941c754ef50f3c751a1b169414351c33d69ff00a19b0dd07" - ) |] + ) + |] ; [| ( f "0x8c179021ce1d9b193e8f5046f052c29415385fde9a99882b79b2b8772d2d881b" , f "0x0a01fbe5dd893ebe0bbf0ea36bdd961ce75f1d3f348d8be3e66029df95985e22" - ) |] + ) + |] ; [| ( f "0x2942e4808aa3706da3da66599eb7132358028e89762ca98964277019822dac3c" , f "0x7c75a793bf76439a57cb8943374119692cc1971b13828af2d7007f2a094c0525" - ) |] + ) + |] ; [| ( f "0x528ae53d79d4d040fde3cbd3a0eca4f140349c3ee28ef401f80134ea180ce306" , f "0x093ae03e9b00cb44c66088d771b5cc124bbece7f813fb064737e46ee8b061220" - ) |] + ) + |] ; [| ( f "0x2a82de97cfe2f77175e1ad46bfe27ba5a6436ebe330f785a8480b8c4208ae43f" , f "0xbb66ef83203f4ecddb6d7a94b2b4a6b0b78b713fd5fe956404920cf4999d562a" - ) |] + ) + |] ; [| ( f "0xd8a5c1164261973239b0ce3efce4bcf024b23960867da198ff3c099b7203f934" , f "0xd214bdcb257cdd689896d82def8191b9544e04d5341b2df5c3c1547db411fb31" - ) |] + ) + |] ; [| ( f "0xbdb24ce042bcde92db11afd042c43643bd0783876539a2d3bf658013957a5433" , f "0x5124434f940ae068219dc9c15b57069ec3771451a79d29d2e05ac69dd9ebff23" - ) |] + ) + |] ; [| ( f "0x101b21d3de147750d36b689adc669121f17b956c98052da8c2e435a2173c6b0f" , f "0xd81f593f581392d9ea31036f02bfb648657448925b55ec2b8afe92460016fc3c" - ) |] + ) + |] ; [| ( f "0xc82789df7f48bc9addb889329456c7e7fe1797cb60562a4efce84cda9b0ca002" , f "0x40a9a16dfa3ab4eefc63617bd9f2de0d59dc71f36fc9179bf759e87211be8b0f" - ) |] + ) + |] ; [| ( f "0x1a23004403273ebf89b3a02ee40ccfb63f39bde20a3299be0c623effa1a2c90a" , f "0xd93aaeaac26fbf0806383e7e014d1fad0fcef73b51db302a9e354619e5c9fc00" - ) |] + ) + |] ; [| ( f "0x10a16d337afa48e400bd53bb1f7a3c951ecdb2750d5693d289074ad40ae3b222" , f "0x9ea22fdbec3bb6f03fa9da05d29f303018440308546c96ffc271fb2b4258140a" - ) |] + ) + |] ; [| ( f "0x73500b67cbff994c06497813ff53862a877c6b41bd40fbd286176a3e401f803c" , f "0xc3886e6136c58ea417360b16311557d454e920cf2e47275adf4491cc2ae7350a" - ) |] + ) + |] ; [| ( f "0x984bd25742b02bf381cbb6a86eb29c11218923eb8aa6aade3bc9070644c1850a" , f "0x184a00caeb484a3ed97aacbee89192f3541eeadfbef17ed55d00a64f4fcb0e1c" - ) |] + ) + |] ; [| ( f "0xbec073f569dca25801f176b7686003c658b28e55e6ffc177d3632690e64af00a" , f "0xc9bd65482abdf88e20f5533573ee743fe9c1cdda8320c24882a6dc1819cc081c" - ) |] + ) + |] ; [| ( f "0x0bad3499a4a00f04f94704148457ea09c0382fb241aa30582127a51080e9a209" , f "0x7fe17bc040a5ceddf921b8e0abe9275ce465e27f13eefd17060ed43460b6500f" - ) |] + ) + |] ; [| ( f "0x89e94757230e95d577ef63a3e9c855c795e4546d2750e883a4f8f4108cc90801" , f "0xd1f4bed0e3515e49b2c2f8152ee17db0cf2e2554c63f38326bff9de6c021ff1d" - ) |] + ) + |] ; [| ( f "0xccf855a2703f807d448d1f7a7ed15faf896853a91ac3e418ec864fbf09583916" , f "0x065591677ad3ce4dafb9fb184d099574a51d453307c9ec827c62af958b322538" - ) |] + ) + |] ; [| ( f "0xe672a6da351ae2051931117cfcf0298e6dcb2853696f1a55536734271f234719" , f "0x0b2367edb7b6ee8c8f703380075c0d3713b64fc946d0cbe6447193ff404c1632" - ) |] + ) + |] ; [| ( f "0x33561024894ad59e11278e7609692038a71e8350a7459f566864372facffb709" , f "0x5862a9c035fc8a3ae613054afd30420dfd8a0b0ae83a985c0ced7c3639927e1c" - ) |] + ) + |] ; [| ( f "0x45385b23e68e01ae2157ec7e647bd45b2a524de847b50f1c7cd918197c13461a" , f "0x0ab20cb2526a00ebfc7939a2bc2ed705ed56c74304e73334aa761f0bf3b48230" - ) |] + ) + |] ; [| ( f "0x501f5f94131e9ca48d4ef46863abdd8b3c31700df0c18c1574af2c6a0326a23e" , f "0xbb6b9ffff548fb0d76137ab9f98bbfd6227e868e3dc29a59a010c411da98412b" - ) |] + ) + |] ; [| ( f "0x01cdddafd39d48a335a7b72f2c5f2179f14d83b07d81a7142ed2537d470b2336" , f "0xa7ecc8f99653969c05b92973853c3e14ad521f748505dab4c43c31292507e926" - ) |] + ) + |] ; [| ( f "0xed419678dc17cfd38a65ccef603a77f4e368d7341c9c29497f6af59c94eede3d" , f "0x951936845c7b725b67b2cc9e88e0f4cb006f8bd362eee10c824aab53d25fd62f" - ) |] + ) + |] ; [| ( f "0xf40271af9aa77a798ebf5cb8de34dad90bbf8b9c224f3319964e9ceb5dc21134" , f "0x80086f85215ac7716a21f33af1b4f6bb893c5bfefe6cb62d98565ee704fbce30" - ) |] + ) + |] ; [| ( f "0x499755788bb03a2e3a6d4f271e8d5581c3432a0eb970cf2bcd756cf91213c939" , f "0x62c7c53ed47246ec1fb9504738f77aff649cfd8329856ccbb0bf69ff14e9640a" - ) |] + ) + |] ; [| ( f "0xb858ac2e158b9e059bc4467db5195a7a03194263389c36892f068823ddc74a0b" , f "0x348bb0d2be5e85268c3786c39487c397e5e1ceb3f88d9600a0bdbc9d531d630d" - ) |] + ) + |] ; [| ( f "0x66236bd48c9ca8ec2d16af3a197cabdb838f51f88924e936a22218a5e8fcb705" , f "0x924543da3ef9b7429cda0c88b05d2f30dc66332dcff9f98b5541d2e6f4cac539" - ) |] + ) + |] ; [| ( f "0x559b300ae10fa8c499c7936f9b4f5146e0e7dff15b581abf364c56853c370628" , f "0xb88018660ebda06827247410a173abf923d18991ed5c49ef78f282a8b9998f27" - ) |] + ) + |] ; [| ( f "0xea61414f4cd57d70501fec7a42a459ae9e0ec89ff51c104ab379da9ec95d172b" , f "0xfedb3763a93327a5c4342d1d7646ce2f9404425a51b5061a7d5afa238179a52b" - ) |] + ) + |] ; [| ( f "0xe5f5dc11a39157a3b4ec06febea377c205a3ff7f082009f6aac25c533eb8d206" , f "0x8cc265e83b4a53e143c7e740851b8aeefd2c3e579e76a75ca6fe0648d7cfbb2e" - ) |] + ) + |] ; [| ( f "0xef7a1d5d97516392911685b8b6e45bb5c653a655048170f3c45cc989a5425502" , f "0x97a3b340594e48e177c90120924840f39717839d3dba25a0e8a56f146c3c6917" - ) |] + ) + |] ; [| ( f "0xc6c92faff9d328ead2919c22567e3725da9d20899dd4bae95cd0e25caa16703f" , f "0x5f29e001a060c04864d24947f1cf9fb10bd777f51c345241b71beda6bf176b3d" - ) |] |] + ) + |] + |] ; [| [| ( f "0xa67b007f5f4727208c924ff490842e82fd1f35c62edf1782735f2d781232120e" , f "0x0c85d363b02e3b50fc949d4515cc9860b92d2fb73ce008294bfed9e9009b6a03" - ) |] + ) + |] ; [| ( f "0x12f28a9e28cdce6fa1e7580365e36907a3263994c66a8720babadadd2e2bc508" , f "0x27b25f290593e9344ed7dee27b8699613c2cf4d83b546d53fcfb703fa37e2f38" - ) |] + ) + |] ; [| ( f "0x32f0c48d72246c8497cc44ac9bca73e8e9e387ccec1737e458d65b47c5ac801f" , f "0x6aadeb6da1609dcec95db994b13915288dc370a502753b34b30dee8dee90553f" - ) |] + ) + |] ; [| ( f "0x6cd797f015eff37f58d4e234af7d230b755caf6df9890090f1f9c6b7ecd0c604" , f "0x67a4e229e09857a0c23ed1d7a389ebb1c518183aecaffb75e1b0cdedf2652b0b" - ) |] + ) + |] ; [| ( f "0x0a72eb5c10e60d8afb077a68a8e34397f2e20bda43c9e481d09afded81e4b23f" , f "0xe2c8961bc864a96af868cda61d4d35d61b8a23ab0fc9b308c8b3cd1e27e91414" - ) |] + ) + |] ; [| ( f "0xb77ee448ca82f8954f4e5c856a8311d99aca99006fcb6993139e41751cbb251c" , f "0x4abbd20980264bea6ca1b93702758206d95e323c4099c46da4ab8450065e1b07" - ) |] + ) + |] ; [| ( f "0xf69e0ae7e1efef6c7263663c87892e94d4862bc5372297f84ff71ab57990d71a" , f "0x213c212561c3074e03461308fda9d65da831ac5a7386c5178df28d472a065703" - ) |] + ) + |] ; [| ( f "0xae7f310c75ad4fe170417ab8b2b704b589ed0a8704906587dd7fc6e13acd5e20" , f "0x1dacd7729eecea37c8a5beda2bba9fff3fe2545297bd124c55b019e498efc006" - ) |] + ) + |] ; [| ( f "0x2f847a78feb74d2bd152b7edeac5f68652b742a7331e22f4ada2bb7a9d7a4300" , f "0x6e2031ad9835f4f55000ef8a811c3202da217fb28d338b30f7a34087b145cf09" - ) |] + ) + |] ; [| ( f "0x1df5ca22b4eb6994e1817384e467dca01d652fd20de6b36f9b7e0380d1428e04" , f "0x888292c3d58870b6c2038ac48eb07fd2bd2eda8f9298b0d62c612e37102dbe2b" - ) |] + ) + |] ; [| ( f "0x80288fe515e9fa990d5cbcc03b37e62e77cff7912029fb252bdc025d5a326435" , f "0x5f07719db5c6188eb2a9fc8528997ee3021ec6b17eb2c3bf03afc523d162641a" - ) |] + ) + |] ; [| ( f "0xa33ca68dbaa85826e84423beedd39e41db1145cc0a2c94c8fa2082956d2f2906" , f "0xff7098928211c23c6b3b74dadc22244e4e9242be9d43c90e48ec468eba00f43a" - ) |] + ) + |] ; [| ( f "0x980d823b66da56416b0613c0bb789d528c0d2e0d9a862bf58b66e713e30c4d17" , f "0x92a45ea51eb23b2732700a72140543ce80e2930ada5265308c56fad8c80d6f09" - ) |] + ) + |] ; [| ( f "0xac3670053f31c4242553569d480d2217a3f1ba6358250108b0480741a97cb434" , f "0x469747b5334543a0e0b676c9793a672b1698fc6e3daf665a33471c33d756df0d" - ) |] + ) + |] ; [| ( f "0xf0342b5f476664264a4508d87f1d35fcdc1da491d73e324a8a1d1a3200444124" , f "0xb0ec18f89902bd1e77a3e6227b3e0c4e01c2a3f6025d40246afd5ae5bf8ab02e" - ) |] + ) + |] ; [| ( f "0x182ab8cf32c05111095c38617109ff372040461aa904b06ab06e835ca6e1bb15" , f "0xb0201909a2b53851ceb13782a2693f774bdd123087a5bb22c221a7429fba7105" - ) |] + ) + |] ; [| ( f "0x9201fc9769aaaa933c3faaedff48ad847dbe238a470ba08b7ba412f466cee902" , f "0x94d40b60177382f6d6646fd418e8e74cda0b0c583fb190c2ffe56b4f918aea14" - ) |] + ) + |] ; [| ( f "0x50866e14f780397f2fe7ede70e1c9366d7195473cd469bfbadd921b0a095c831" , f "0x2111f16a1b8127345801d5c1103758dd61a151717976a2cc8738f69975b48f06" - ) |] + ) + |] ; [| ( f "0xf2883b46fc23b429f2081209ba6c68ec4777dc700ee24c72d720fc1742d2512d" , f "0xcd2b8208823861877d63d3d52582497151de68cb6eab483fe304b850d43a7c0d" - ) |] + ) + |] ; [| ( f "0x735ffdbd596f0a22363447c87a8668aef701ea709f43ab984d3404dd26040c02" , f "0xeab709352aeb90377532b8ecb60ff8eee40ca6f56d7fa40ab5b03d3dcd6bae0e" - ) |] + ) + |] ; [| ( f "0x732120f02a16a66bd2a1c8b6bfd0c8d50419b703f50c26744e8f4e4364e09c00" , f "0x09a66875e33b20e78a57c63784f4ee193a2ad51943ced8cdf13c0aa2904cb93d" - ) |] + ) + |] ; [| ( f "0xc8683a0f9ddb49461147cfc6186bb3ffd14c8ae9f89dc00b21266430f4c03d28" , f "0xb9532aa09eb4a334ea61948c68fe5aef524fa8be9a0b824bb4affe838c4d092d" - ) |] + ) + |] ; [| ( f "0x9a0c2a7b6df5a25911842cf6b821703019bf7650c7b3535fea84d505e729a934" , f "0x81cc1c855e0a6ef88d884e85c75748a5e6303bf9fbfc6a6151938448b7028a09" - ) |] + ) + |] ; [| ( f "0xc02f9858bcc1248d6a064e716a6c47707d4c6ab2ebc6768a431fc18358063b0e" , f "0x1629dc28ba61c691d95d4eb64324d6a49b8f0078900f79d0628730bd7b46553b" - ) |] + ) + |] ; [| ( f "0xeec994dc89c7c3acd7fc6de8713814fce296c523d49bbfa7a58700e4261b6a06" , f "0x7fe98f27bead46c4a9823a62c74e65abcda7aebe977466d5ca6d4d8b06260d1c" - ) |] + ) + |] ; [| ( f "0x83ba203e941a8f5a110e9bbf9a2c7a4cab817d6d1a689c4aca8a9e2c831a0a0e" , f "0x563605bb2d613902384814a57234d5c9cf4451faff0fec68bba83eb22c02193c" - ) |] + ) + |] ; [| ( f "0x2291a7b29b3950c46a450b0893f9b4df376b809e506984fd489a791dc4de1612" , f "0xf0bd380f995401e4059dcb1c39b68cebb645f92bc0662163f1d7b1a4f0387f16" - ) |] + ) + |] ; [| ( f "0xad052c224182a82d48b5ba4e42aad1d6bc6c6878b9473579b11b92cd8520f901" , f "0x03b3411d13aab891e78f56d18ba62d0040238f6c8ba82fe981238a6cd5246413" - ) |] + ) + |] ; [| ( f "0xd8130c43e1af6678fadfb9e1a28c9abd8c0a78f2bda3966deced607bfe0cdf10" , f "0xd26d5331192963c809c00f33377ca4e4e098d87fb7ff12fd5c3dae5677e99a10" - ) |] + ) + |] ; [| ( f "0x19d8e8d20f949c0013e09cf9785b0746fe2cb5fff9dd60fd4c3ec21311cbaf0e" , f "0x798f782e8af4ffb450006b29700f8388633b78e13439187647f733e8b5c1e534" - ) |] + ) + |] ; [| ( f "0xaf7b3642f76d51965c879d8472a5dbc96dd51933c520cc2d1f3e3ed9294f6225" , f "0xe7fcc40b4eded205928a6bd44528715b75b4f4228a5daab650982e6f551b8b21" - ) |] + ) + |] ; [| ( f "0xc3611f9b54f81b7d373bfdc66370e32e826cf541a757f2964b31cc3371f77405" , f "0x6c218578ab6129335ca9a5de24c7b3964e658ce814f0e90f60834a4ffe97a233" - ) |] + ) + |] ; [| ( f "0x8fe8ad9dbbb720350d9b7b5562ff73f4ecb9298a6514c73e46b1550f94e86939" , f "0xd2814d42369ac7baff64985c1ee0116b56d231ede3b3556c27415aab1c818a21" - ) |] + ) + |] ; [| ( f "0x9b72733a350a6a0f16a7cef675d46867356dfc8e66df9a224927ffe61920900a" , f "0x9e1f688a2cb70c391065cb22c6d73758794b6cf77d0482e0ddd1fd7c80264a04" - ) |] + ) + |] ; [| ( f "0xde7bf47313c1c76a97ef4e58099e98cbf5fedec6e819152372f9e54815ca5d08" , f "0xb6314b453c829f9634cdc2c77e54fd0ad35f46cdce7a9ca29cdf7bb746d6e22e" - ) |] + ) + |] ; [| ( f "0x3897009fb9c523a6ad936533970581d35699b260feedb933c11729ee0c161c37" , f "0xa94df1ad5e5104ea73ba90ee2e1b3be1446a19a69bbc7fb858c4ffcb462ae835" - ) |] + ) + |] ; [| ( f "0x25a73f9f4576db758d427ca76a0f501ff8dedc9770b47cf6bc3a1aad9c427836" , f "0x5e81968657b34ac964ba299ddfe88aedee043fc2bb041c3d28e54c4f30ce2b1b" - ) |] + ) + |] ; [| ( f "0x3daf4b3642f55fa9d79181f4a1a636770c126d02134f037547ac305a96b13033" , f "0x1d6f83259c519f6248d61f3f9f41455d2af5b586fec89c954109b1a7bf9f951c" - ) |] + ) + |] ; [| ( f "0x234447aa92d49f986051158434e5de0140f1f032f19f9ad37bf1be2fdbfa831a" , f "0x504ffe4481aea0b4f66a3b2b1ad93321789e19652e012fab6b7d360ecf332802" - ) |] + ) + |] ; [| ( f "0x0a249de5426663ae6129898c4c4872dac38ad0aba8f2141ce1ac46f6dd38e934" , f "0x34a18f276fcd62ca5acdc1f6230f99c7a9e0e0652463a5bbace6206023319809" - ) |] + ) + |] ; [| ( f "0x99ff1880d1dd994e870bd8d1c58a08b7b78691ce2ee081465a49727402b5910f" , f "0x9bf778236d40dac9bf1cdcad82c823b857f3a41a0d28d305211b87e0eee99c3b" - ) |] + ) + |] ; [| ( f "0x980833603e27fc95136add90b8b5929f7865b49148064c1a51924ceabc563a3e" , f "0x1b4d2870ea7e49c29c82a1094ee61421b710d3d325f7776d51bd890919fdd634" - ) |] + ) + |] ; [| ( f "0x6ff188ae81ed2b9f8e7572eeb2434cce32b95ba0bac1aa7375ce829be263d918" , f "0x30fe70ae0519bc6ce2f5715d4e3e5853d593a9a89b4dfb3cb46609aa29945d0f" - ) |] + ) + |] ; [| ( f "0xbb1898562aae792a1a2a7a69a3dab268dbfe3904aaa436b6d65a68044bc52e05" , f "0x82c97036c8a4bddb70b18b92be62a099e1e0f7d6caf79fcc7d7dc0f7716d3b20" - ) |] + ) + |] ; [| ( f "0x9c220305bdc20a38d2de73df89a50e3da4f46048783cd2ae6cacdee598d47608" , f "0xd029a29d5dc3261da2087375958642d7ad8c648cc055dd3c80143d3265267134" - ) |] + ) + |] ; [| ( f "0x1b776f3c93fd6c0dcc99246cb54d5c9625cd3b3ee6babc774f945a9af7f8e31e" , f "0x78042aa48dd1a9a150c554b9cfd79afd9d40dd75a3e54f2be4741038c81a042a" - ) |] + ) + |] ; [| ( f "0x0ccae9cd7888e4acceecbf2957e09c1ce3bed26fb11cb7d8b3450cfe44d8eb27" , f "0x9c67ecca75b779c06ba31016e87f4766eaa4aadb028aa1592aa3bcac8ad4f926" - ) |] + ) + |] ; [| ( f "0xc845049a1556cb99f41c863be6c1e2491556d5765d559a86776959ee4474a40f" , f "0xca17437a70e71968ed6f06cf9263c5195c610c64cbfc37daf3163f3bbc006627" - ) |] + ) + |] ; [| ( f "0x86e0e6f60a6b12bbbe6fcba476c97dc311e1bff0a282313167033b4303583f07" , f "0x6a53ceee87d6e340a884198270f1872a25326a3702e69e36da58e51d84364724" - ) |] + ) + |] ; [| ( f "0x2a8ddc120582213378c4fcba3c241e810bce57096b61af548d8419eeeb129e1d" , f "0x1354fc621dbaab27716a56df9841418f14195d838ee54fa00ca9aa8c69698a31" - ) |] + ) + |] ; [| ( f "0x7c8982fc136d19b491b0e6217221609cf15bf20efbf2623590671aa7f5c6ee3c" , f "0xc5e2f5eb8f1bab07cee6ae69e76444c7a994cbf58cef4272f2a6760d4cb84f0d" - ) |] + ) + |] ; [| ( f "0x6e7f3b6c15833f547047b008ccf2bf8a464beb350194255f962ab75b29079320" , f "0x6ffb2a407757fcfedf413bddbbe6e02570090a3ca91df6bd1535d50f8190f12f" - ) |] + ) + |] ; [| ( f "0x78e8ecbb875a77a2df2d96d8ae4059c8ad7d31431ad110e33c0686e766423439" , f "0x3f5cb23d3cb60d778dcad9e7c0b7fb22dac147382efd3c9fd1e140cf3a69c618" - ) |] + ) + |] ; [| ( f "0xcbd460575814d582de0ef84818ee12958951259abf5a37e08101418c31d26830" , f "0x9aec5a583eab504d8d06814ce731f5b3f3d3301232e2e9925c7822640188f409" - ) |] + ) + |] ; [| ( f "0xc3debc4678ea3046f92021445f4a37e013585f5f5138fd6411b1c59201d19536" , f "0xc93843d9b7873b42cf36bf6cd8136df3c559d432281d4d67748dc92c3795f61e" - ) |] + ) + |] ; [| ( f "0x37cb59e10cb9fd7dd68b1e209bb2669f9f65371cec17b01bd0a61d39aaad8d01" , f "0x9db08283a641ecd69cfc9aaad9da71ed60a5f9190c146bd1e73bc8802e5ead2e" - ) |] + ) + |] ; [| ( f "0xece4f7b135b3732341dc4b62923146d0ee133c071fc2cab0bdf6e7c6c8923805" , f "0x52a7e9c21d23585d1b17165a3608ed020baf3c9b58ecfdf90a67145adb337d20" - ) |] + ) + |] ; [| ( f "0x000a62941a7f5754ce94ecd9e189efc6c87ec035554e0670043211393bb5bf12" , f "0x2d7fdd69bd2e712c0ca89c100f00c318f4cbebbecbdda10817f6e4331f3a0d14" - ) |] + ) + |] ; [| ( f "0x534ab543244a0caa51f79026da6e5a726ad4e81f80ec67d1637530fe5c1c2123" , f "0xaebc2ab8bd6273e36bb797b56bb41268a6351ea2571751b3e8715a1c1807472d" - ) |] + ) + |] ; [| ( f "0x6a95a3d2c40180b0d66f8a504eb9510d2da387649c90768429fbc50480bfd913" , f "0xeaf3f25316ffaa85833845fb49c135ada75545e7cbe269f91b1cb9e5169e2e25" - ) |] + ) + |] ; [| ( f "0xb40037d9faa5763aa9604338b2916df0c84a6eeb7237da13ed62cea788a48730" , f "0x57f65835907134fc3fa09679d1214bae3974102eb314ba5d905b5bf3e567c23d" - ) |] + ) + |] ; [| ( f "0xb9445e1b236a8f970ab3d96a0403c98ff9014941fad632203bd4b163029fb33f" , f "0x0bc9a8f650c1dcb485b1476928e8c444964c925231d4b69f69f23466a8d5ba28" - ) |] + ) + |] ; [| ( f "0x67265e456049812208618ebebfaccfd3e4c43274f46aa1e8640fb9c2d95b0a28" , f "0x6880d667317825ef447fce3b2b01d9cc1197ccc0c5cd286edb586e2130290a27" - ) |] + ) + |] ; [| ( f "0x9657349125b633e6b1ebfed31b9715262e103e9179d7a1307c0672293052223b" , f "0x700be242f0d12e8054c973e93436fd0810522497c63bf435e44a005994f06a27" - ) |] + ) + |] ; [| ( f "0x15683e303b8ab90c47a2f73107d81d5153b9d77d51be46915d69f29bff821809" , f "0x915cf648d2bf161730f6b90f8fe9ef70c85e42d827e1402c0bed252650eed50b" - ) |] + ) + |] ; [| ( f "0x694826f6b432083bc1cd7df4d9196c258a0646637174047d9cd06fdcffcfbc3d" , f "0x30e80f49f604d6309edc48325fc45e2a0c2c992f8a419f184cba52e26425f526" - ) |] + ) + |] ; [| ( f "0x101dceccf0ae77d9e93d2728f09121fc357558173886bc56125454079010800a" , f "0xda9752aff13a16784ef5d7d1b5e4ad1734c3bd11013e5f857d5d9f357d931d18" - ) |] + ) + |] ; [| ( f "0x46b7a6615dc93d9e3c2c67e35e78ea8227795d4e78c8b48c01fc4b7e980caf1c" , f "0x89efd73953e3936a5e84c45e3f966b3b870dc5fd4a2774f0786d3b94ad6dea0d" - ) |] + ) + |] ; [| ( f "0x556b549a99c4b3874e5b2d7f1210147c93859c8e56405d63243f05eaafbdc60b" , f "0x4fd20e6bca13abc883ec3e5a189571238df363c6faa035ba293f7e7e68ec3c17" - ) |] + ) + |] ; [| ( f "0x85663075e81e362f8c5e7c7e161284276ae86b4f7045ef5154b8474292b8f03b" , f "0xbddecc40d8cc009a9feff34322a48e07414a19a14665e3f041af66f9d23f7819" - ) |] + ) + |] ; [| ( f "0xf5d5333e94fad89efd94fa9b791b1b8cf54fd41d454eaf3bb4d3f872e37c1b27" , f "0xe4948c93d990b4b64cc8906eb48ec42a98cc70de257e9e12076bdb707150a61f" - ) |] + ) + |] ; [| ( f "0x76ebb956bf82ad78de35237787f2a702b8cd28786675aaa0e2bb01749154313e" , f "0x95cdd4a4279ab5224dc35b89bc8282b028a1ffc98c41171c13cc73055a651629" - ) |] + ) + |] ; [| ( f "0xb403486334a80da58711e7e32e661ba3d47bb5980168da16a2da1447282a7b13" , f "0xc27b868b4ffa5dac0a36e56e58a20d3f5e8b1a0db0e0ab277571eadf5c913d2f" - ) |] + ) + |] ; [| ( f "0x42a43484a921a42d0b39d8e72cb87a9cfd2a1fbde1f989e0d3656347e8f24127" , f "0x1f126213932c51940aa8607010a734e25b55286824e0f663ac83c8fd8e6d910f" - ) |] + ) + |] ; [| ( f "0xf9c4c974df4488b58bfe7556ab05be8055da4ca0f6366a5029bf448e424a8533" , f "0xa9d1ac057c76b1cb4d67e8a4f79394ee8aafa0cb75014a6fb8effc4488315937" - ) |] + ) + |] ; [| ( f "0xad0316579e60ece171f24695ac5d7e4167b8554b74a8d6ae409d76dff3f2953c" , f "0x37678d19b93d3dcae2daa3ab1fbcd0d95d671e63b57d90b43b97d5bf5775d40f" - ) |] + ) + |] ; [| ( f "0x14c03b79e405a3c442bdeab76afe3ab10e4f93467893dcfe714bc3e0d6200825" , f "0x1018498871d565d84e6fff32700a59a6ae045dd7504b1d32205289922ad32b02" - ) |] + ) + |] ; [| ( f "0xa38bab0310c90a24daa6070e70e6c346487b7d6f1fa5fe1015507c263cc3e00a" , f "0x75ff4aa1db74d672b7c3db496541e9ad7e0c2c87424237eeb19394b04b3d9a0b" - ) |] + ) + |] ; [| ( f "0xc68d778bfa8214abdfaf3bda6aa08e698cf0051734146e6172e39f7243066f17" , f "0x5e231a5b30dbfa70cfd424f2dae0576fac42030b12280866c49aeb7ee4a1e114" - ) |] + ) + |] ; [| ( f "0xd062f0401a4913897ee3bdcc0eb1e4278e53004ac5c2bec2625e63a27382972f" , f "0x8c50cac509986447c78157fa3ff7b4b0c473a6672eddec5439244f7cbf5dea2d" - ) |] + ) + |] ; [| ( f "0xdad055d85868010405635618eca72d99b0fe6f6310d27d5154f7188b9a5ac60f" , f "0xc2f1797cc25efb7ecade93320ec3125b223be06600863a067016ac90643d8d3c" - ) |] + ) + |] ; [| ( f "0x902310e20bacaba5982970f4b04c9771aefb53597b99f13fd511ef4b72ec4a30" , f "0x3c4ea698d5a81f85410ca83e2d56263b24a2aa0fbdd56f669594121bd3c4760c" - ) |] + ) + |] ; [| ( f "0x26140a0be552318d35516dac37a77b55e0ac44cbf49079a5620fd3e8e1c7950c" , f "0xf8cf969b67b3948d28cab732d8914db483c1176caaa1b20727dc18ebe7bd811c" - ) |] + ) + |] ; [| ( f "0x373e2318c131454f85b8d3447643fcb8731413518a94532744e6c5cd08296d28" , f "0xe55d57c4432cdaf35f99a8137e6901032d81222c66e1e73853a80f3e18935404" - ) |] + ) + |] ; [| ( f "0x569e41418a1e96f3bfa3e8f4e5d8915c76e803954ff9fb93a85588e1b6840a05" , f "0x5ffcbe0d0c050a53588347e3f243b8e1e695c92aae24ab8360339f01d5bc6927" - ) |] + ) + |] ; [| ( f "0xaba92a2c94caf75d2862abb5fff8e094e4ca5008ed75eb8a2f7fe43adf7d650d" , f "0x8c1e897ac0d30b3ccf6b25f5b202621e4b6cfb318b0a4d38c62c3a2b99cdbb2e" - ) |] + ) + |] ; [| ( f "0x2000793b0b76d699ab341c9ddb583d124a4593b54bb58635a44c86459bfa3e03" , f "0xa0ddb309403342984895ed7efd25312cb4899dded58177d982e9d2c0c4b3e931" - ) |] + ) + |] ; [| ( f "0xea9e8e4133140b2e49d921d9072b7cbea297e0805161c7374f73fba3b1afd715" , f "0x4979521edee7ef222e4ea8cb6fcc63e61ec4a640d1ca8d2431e4b9a19881af28" - ) |] + ) + |] ; [| ( f "0x2f7aa49ccdcdabb51286d7bfc7787cfb32f6d955ad47cae4327a415db2ad8538" , f "0x66005a79182b9805e4942049d529dcd2d0bda09526253151f68e9cf80705a535" - ) |] + ) + |] ; [| ( f "0xb4f6d2d03251dace36cabfa44a7d2275a3bf05a14600973bdf20f93d7e31c308" , f "0xfc6c2cd4ae198d813e7fcc299579271f8ee841860262f91d1918cb37349e910e" - ) |] + ) + |] ; [| ( f "0x654cca6a117f9f5860ad7e8dfa32f5144083e07ae1968af878194c7455f88900" , f "0xd874bc4c55a8d648cb748a3e37e4bb962daecc5163d69c1d8cea346c1e97e73b" - ) |] + ) + |] ; [| ( f "0xb5ea7837d7782ebf45f35ddc6bc95273deca8a90ca97f59ef0aea732d2fb5a09" , f "0xbdfed38d6e777cc152c715cbb3555c84e263bba7eccec30bb99ee05cf973791d" - ) |] + ) + |] ; [| ( f "0x9607c3c7b350332d715fa1e06470ad557d9224210170f8af2ab40e9fb607d727" , f "0xdb0e726c41ed426492dd0bcc6b76e4695869b4ce12818def5e989023db3c8f04" - ) |] + ) + |] ; [| ( f "0x1976e25f979180de82493656a057a5a8b44f2a114ed3abccdc3610e0369eb21f" , f "0xd7606fe163a91e8ca84eadc00f0a950c3b3f89c7e96ddfe77479d404d22b2708" - ) |] + ) + |] ; [| ( f "0x1fd8797a48af2d46a913d242238805af456202fdbfdb9c64637e8c6f818bc811" , f "0x15367325a0aba3b3399cbf0582e739c21d416f0f03c6f6dd55dc2fbdaccac617" - ) |] + ) + |] ; [| ( f "0x46029b7077c0df0883514f2b32a832d05a976f46b2a2205bb318db52f335073b" , f "0x166aee0ccb53600f80581b23ee16e491c5194d22fb2bbfbc43b9ea277b1b1034" - ) |] + ) + |] ; [| ( f "0xf8d85bbba1de6d9e48998f1ce3cd67dbcf67378bd7bbbd2e72efa9f39201770b" , f "0xff5fe04ac45770ea06ace63d9ae9da660b8a6d59b397ce43f975089863642306" - ) |] + ) + |] ; [| ( f "0x470597f4336afbc360160f36fe993366343eee0758e050441e97458bb3518d17" , f "0x6cf604c7baff1f07d1966b9356fc8476d90e9ec5407380b3201a1b3f78bf091e" - ) |] + ) + |] ; [| ( f "0xea9524cbe28df24f912a84aa380203b558f31a38d0b7041a034e967561874808" , f "0x54256f6e67cabe752a597e8ac2da6d1868c93fbedc7833b5a187a95ae6c13e05" - ) |] + ) + |] ; [| ( f "0x83ced11952eea4470bb38822177c2a792f95b4fd4b8dc89fb9919d7e65c57803" , f "0xde93680bde4a87f358b57b84bec225165d6f8a6390707926bb097cba2dfaf31f" - ) |] + ) + |] ; [| ( f "0xf517f1e420d2b4f1964b319066f09dd7f42820e5341b351427ddaca8a794ee3e" , f "0x5bcd0930bddcc38bea7e1e845d27e7ef544ceca1eac36309814972d04575aa22" - ) |] + ) + |] ; [| ( f "0xb59313bb0e4d408b7f8bca45293ecfd2e7d7bf0800d6e79a40e6ee9af227be01" , f "0x5e9a1947ae872fa0c7678185af810f5c233075c9e57ced9fa5c88fd90b830d30" - ) |] + ) + |] ; [| ( f "0x070da2fbe8a4af2f75906c0157dd4bfc63a19f24377718592cf3794cbb5a8327" , f "0xb5ee3bc538ed05ebdec60e06e4d94629d10e9dd98af91e2ad2ca8732ae9f3f0a" - ) |] + ) + |] ; [| ( f "0x2d2bc7a472a46572e0dcaea55cde6c68db499abdbb2f9af0158419d5624aef0c" , f "0xa00158f71c393ef19a7e71cec259829dbcdd6a95975ab6b3f6700a805480693f" - ) |] + ) + |] ; [| ( f "0x00069bfe22d12025f9983c9dc867948af76005e71211b9b6c8746cce85cbd81a" , f "0xecb667858b62314667ceeecb8d25a47166762c59354f224c8ca7c8a62c596907" - ) |] + ) + |] ; [| ( f "0xc91489cf51c9e2c58c77648e9c5030ee1672f286844714de9fee491fbf26f517" , f "0xb91d38af607cf568d6bd2c98862ee4aeab600c97a8ecb434c92bb1e25892ad38" - ) |] + ) + |] ; [| ( f "0x2b03e744748005b0be1996a3a146c5754940261e391cd29ae70a8173cf11c43d" , f "0x532f58d2030f968cb160a66a55e252d19a95bcb5ea7f17e3adbd2a22c8505505" - ) |] + ) + |] ; [| ( f "0xebbe1d61ce3e5f1cda81864284c14efe40525a0277f1ea8360863d38a936dc04" , f "0xadd3d0d232fd67005646cd8a25f5fee4ff465ce2db4a4d9a238214e2d7183910" - ) |] + ) + |] ; [| ( f "0x69a42387d04b6f0ec195120bf6e3584168df907a4da572f9c69bebbdeb2d7609" , f "0xae5fccaf3e92496a2e4d88ba8d9094ec9634f8f04008d7ae4f0b135099c65d33" - ) |] + ) + |] ; [| ( f "0xbb3f67a7a44bb01a5c22a9809e71157cbd6357acde0aa478b10940ee690bc715" , f "0xcf3783f2559f4fc0b850a2767c20638637c4ceafeb6e388c6e5dd109cdfd1419" - ) |] + ) + |] ; [| ( f "0x262961502ae0802ba81ed5c2f7558864c34d7927e386c9ca72c347a88ff0f207" , f "0xd9eabfde9812031a39d74b30408fe9681acb6072da02a1c987b39ab1ae33b326" - ) |] + ) + |] ; [| ( f "0x8a12f1a028137b54f048830d31207c9ffd2a31ee57704f7d3106b4860e610c3d" , f "0x8f1aee75db65c004b82ccd39afa760e7d33b585d04e6ee2318c5dc1645e76508" - ) |] + ) + |] ; [| ( f "0xc772bbd119d3daa3d1403e0e8ae6a2cf1aab4ae790d7c6e2742d89f5d049be2c" , f "0x85ed6a84c834fa44e96bcb2072f459c19c67c409ee17f7a5d7c9376d19e5303a" - ) |] + ) + |] ; [| ( f "0x595dc3cbc3a741d74cae681aca2663a2ae4d33e17374505eddcc648abcbd501b" , f "0xaf85a0972900c5f5d799f6202851eaba50f5efcdfc843ab686a3c876e3cd291f" - ) |] + ) + |] ; [| ( f "0x42c08e0bcec0bf9105724f2e2a77f0ffc7c97653e9dfdc94d59d0968580a541d" , f "0xccdcc316a031803a392146b64e47fe2856cf610e8ef800c5b96c3c082f588a21" - ) |] + ) + |] ; [| ( f "0x4f5f877ce3ea43f723f596ad82961a35fbca7bb62c05cc7d65ddfd9f4dcf460c" , f "0x5ac801bb4d7157966aacac21c40eea06bbbb035ee89a7fa76343d661db6f1434" - ) |] + ) + |] ; [| ( f "0x1cad9cbdc4a7c2fde4664a8daf5ad010667cc9f3c0aa632cffea4c1aca0a050b" , f "0x5b7c2bafb2df76bc71cfdbc6ef3bac25d98c5a0d6f4073ccd7af2261513ff70d" - ) |] + ) + |] ; [| ( f "0x9c73832ed96bcfeb287ac70fe66917a5fac9bfc9ceeab4377f500d1f2b054d1c" , f "0xedfb41a602ee572447cc8e9a932e2d397c2b416a6e565efded0b804010a6d035" - ) |] + ) + |] ; [| ( f "0x2d694376020e7cb7298ff9c6f8f38b9fe898fb9872a633fcb4b6964739bc4205" , f "0xa13469145e48bf5a94838e5f0aa1d5eb40e29cb342b39e5f32c588db34faaa32" - ) |] + ) + |] ; [| ( f "0xd2ecb9a4612db76614b94fd18e62ff2e64b78f39a3942561c343853befc43c02" , f "0x908eb13fc4d70b276456ecc59eeaac61897d224bab66a23e2b9b4b0c7d07d725" - ) |] + ) + |] ; [| ( f "0x967eb95445ce04c8939bfd54f0988e23c2136e534e71cb2b0f8bf8a849c23233" , f "0x7a67a183d7e4a640ebe1a2877ab31c3ddcf627d0921cbe07cbbbcc39a61a9d12" - ) |] + ) + |] ; [| ( f "0x843f3f56e80f38a089d3c4eb38d5275cd6f83d5801b1dcafa0ac31f1be359634" , f "0xeff219c808c78baf4ee675f3a8b86f6ac2ea2e5df883a8af5c5258059a079a01" - ) |] + ) + |] ; [| ( f "0x5b9e109039bd9f6d42f48766ba6821de00c03ac0b2d7a51c442dd7776daaff24" , f "0xc62c14ef8e170fc8aac50ac81375aec926fc063cf064150e12199c2392f12d08" - ) |] + ) + |] ; [| ( f "0xe36fad0558aceb78f800596b168be7c3f83fdcf3f197d1ed1c44e01f92011a3e" , f "0x9c74a08f1fcd7972d4def59078170ac8bc01587edde92a027c4acb2ef7556a37" - ) |] + ) + |] ; [| ( f "0x533eca20a07446d3b2f4a085800812a33fefe853ada0951467b6f329a5f80730" , f "0x85db4f6470eee08594aa51aa570fd5df87253682bf8d59e86edd19981efaf02e" - ) |] + ) + |] ; [| ( f "0x0effbfc49fb4749a2d025b09cccfb1b23b0933353342f98bd8110c798cfbe228" , f "0x564afee7cc8ae8c2fd51f5205863b4f53989dd0499d78707f134a624d429ee0d" - ) |] + ) + |] ; [| ( f "0xbc920a6c5e7451bd543a924ccd0009a15a176c4ca15a7b7651317f07621e0a17" , f "0xdd3e25b9758fd8a9dcaa1da4e1c561b13bc48aecce64e9855263ecd0bc31ed1b" - ) |] + ) + |] ; [| ( f "0x9c26bda3207e85fa3d2e212fe40eec22f2694ca714a5792a2faf8d32a9c10525" , f "0x9f9e199f2356a734ce7b34ce7671ed8bab162a42a1cfbed68dea0e32bc4e9c2a" - ) |] |] + ) + |] + |] ; [| [| ( f "0x43c64f165e64e2c4e56dbbc29f33ac153712d7376667817e22f998a7c471322c" , f "0xed2d5cd5bc2710d271b12cc764f42f8ee3279e0045430b47c592c5228202710c" - ) |] + ) + |] ; [| ( f "0x4a3ed6d67d886fe76dae29706f5e374865cd014eed1f337b0da53b06266f761e" , f "0x423bdcc9e52c3ceb520340786d65f92e49e98db9fab26a1a68788289fba2b23e" - ) |] + ) + |] ; [| ( f "0x93a320ac297ca59bedf024ba5e1a09581e67d00e30786b5fb1d64522bdfc7808" , f "0x32fe279f98efb5e78e08818671dadead0c3f18af5daa7c3afbb51f9d9f14ec2f" - ) |] + ) + |] ; [| ( f "0xa5e1d6c5be9d097b4818c592406bf0f74dec72b2b97044df8287c75f6e6ee82e" , f "0x787ed0f3d87de780f5e01d40ef2e90c91f03bbf0fe6a6e594673e078a9fbdb2f" - ) |] + ) + |] ; [| ( f "0xb9f7a916a1b53bee204b76e645ab4d3cbf4a891954e06c4b2b4e3b06cfd8120a" , f "0x0655321dcbfff2c69ee793ba7cd617a9198aa1395d2e4a8c69cb2d78b630432d" - ) |] + ) + |] ; [| ( f "0xd7f44396d8d997c3af4935e3460353765c6605987189f2695fd96f2c138ba02c" , f "0xf2789144911c077df8e548b5165d52b25cf49f2dbca0a8754d4ed09085d50f35" - ) |] + ) + |] ; [| ( f "0xf2a26215aa6a6552224832f7eaada262009394d03cd53498f4922dc644e73d24" , f "0x84de67d0d089dd666305deda05845e2a3b4b1050a2f2ab97a1f85789b7b7f431" - ) |] + ) + |] ; [| ( f "0x0021270e543ee9e2d8ae3f2ba1f3d31eeb7d0fdecd5c9cfd186c981e36ddd426" , f "0x071d21261487d98c534ef76af98ca869f1c4be57f96ab46a8d71a4fdefca833f" - ) |] + ) + |] ; [| ( f "0x7a8d1ccf967d9b6f6c5ba48e62c1bd26eeb75a6851ac808a99326675d67cd712" , f "0x9332c6bb8ffddaa49694e42c59908c68f697f41e23b4de40c30d7db91daf670d" - ) |] + ) + |] ; [| ( f "0x9b262f239a013d034fdce2704c6628b354968c7f9818f88d3d21f1532fd66f31" , f "0x87f3e84c1f2cd027443f7a62bd9be7c5da7c34e1830e492aae0acb2e6d09eb28" - ) |] + ) + |] ; [| ( f "0x308dc1a777da3ae58adb6cb34572da1065f59ffaa152ae903e2d323da1f19b0e" , f "0xa6843b22716dc6ee37c5b56a9abe0375268b543db4f50a61fbe3f6cb8c860c3b" - ) |] + ) + |] ; [| ( f "0x60a0c390831bbf1709fd106e015f1b492fbc6c33f7d5823d9007e2dab08c8121" , f "0x60e49db4e61897c134ee33ac9349cbe0b613cec9823182f73efffb93a886b637" - ) |] + ) + |] ; [| ( f "0xcf6ac576afa6808eacbf205b703a2fb43423dd415df9d595f802778fef37ae3d" , f "0x13f3e14f595d23ad77971270090a1e435843e0c29173d560a1b6c7978ead3f26" - ) |] + ) + |] ; [| ( f "0x51cf054daab1ba669d1e9f92bf55be2668c47001211fe3eb2eafa42923d6f714" , f "0x961e98b2796ee5aa857f6bb86baf1e09b1764063b07be92e143242c4d2b0f60d" - ) |] + ) + |] ; [| ( f "0x527ea62ea5eb9d3d0d406e3247d3415107b3897914cedc90f594ca6f14c3be11" , f "0xcc237a3ab66adff34dca0a3640fe348ae0d5a4c807370ac15c8c2c70537fc903" - ) |] + ) + |] ; [| ( f "0x672c027181d547192fd5373cece45404ecfe6d733129136c220f740070ebde01" , f "0x9e9cce42b9880d99b75664405becb0663f5fc141c6309a7f62f8f0547f70e91d" - ) |] + ) + |] ; [| ( f "0x5130cd0508a3064a34a76dad0891d1c3e5dd024792ea3ada4bec967a0e02bc15" , f "0x169051f2d9f5322f711ac6f389ae61c832bfbd258dc3aeb7c86e12edaf22833f" - ) |] + ) + |] ; [| ( f "0xbd8396cf71ad4d12fe542f2dfa6ff8d0c665141b7c91ff9ab21b1b1338e60c14" , f "0xe4006b0d384ac42cb2e364a086c94f2f72603ab3d6b68335639d403d6a528426" - ) |] + ) + |] ; [| ( f "0x9cf6c9ae5bb19c04d5135e19ce44ae39faca148be4fcd88386a9f9d70fd97512" , f "0x46289c008ed01d2271fbc8134b47c7df94ca23a30588e70c77393e2e868e0b05" - ) |] + ) + |] ; [| ( f "0x948f2aa43e993b315f6e2b2fae58e0911613a290678e0742cb1bdd25890de40c" , f "0xc82f1d5cb85240a923b40f85e358f47dc511964957923512b765e51e8b318b39" - ) |] + ) + |] ; [| ( f "0xdd789f84ceadf21da736125e8d9066fa696743b685073bb357cdbe15dff89716" , f "0x8d76c19f97682fd4b9b904c539dbe1edbe7c1c6b94d030754ea5819b37028b35" - ) |] + ) + |] ; [| ( f "0xc16812888e2843e44afde42090eb4beabe54e5fba5d16768b869af1ba390c924" , f "0x7ec489e4d3849c9bab1a8c0e710ebe290636a8098c743d48a90ac020710c2a3f" - ) |] + ) + |] ; [| ( f "0x36544a3ae6531bf9898424dfdddad604c0f4376d515a62f6b6431ec3ba1fac0a" , f "0xf6b28377e91c3efcec9c6f9840fb33f0ec644c03b9016ca7cfc9b30994c9e811" - ) |] + ) + |] ; [| ( f "0xaffa4a156249dd00e86788174f44b2c42769da103765eee38f225f2be8286f35" , f "0x36e60873b4da8f391bb4ea7a9e4a5f53c479ce2997fcaeebe2d859294485bb1f" - ) |] + ) + |] ; [| ( f "0x1557ce1568bbcfa5834a6158ec836ab4b3e8dedf762d80cadc4bcc140d269c1b" , f "0x90e2a0aee9b84917c67e558658f3a1d1a0105839d9d75a908c44504f98439d29" - ) |] + ) + |] ; [| ( f "0xd8823863102901fa525a0d31d737ba904b9a70b2db5794be60e025e0b9cc8e2f" , f "0xf6360566af88d9e16579f759652146623eb9503aa336077ccd4162634fcc2f05" - ) |] + ) + |] ; [| ( f "0xd9a3f038721185206a103c0a0b6127a87cab38b6d3dced0fbf25e8d27dd7a93f" , f "0xd2b3bc741e113cb05bc129e77ae0305a1132c279ab1b5c9d0d759d4c3b8a3e21" - ) |] + ) + |] ; [| ( f "0xed22ed82b9db80eba94671b47c8638c1cd29e650f184f38939b9d553ddb57016" , f "0x83867146f6a364baab9d27e44588de2bf821865f0bf88c35f8ac1ace98714532" - ) |] + ) + |] ; [| ( f "0x01fff576c61ddde9c1677daef794a17ab725c3b77818c5a20f98848d81cf4227" , f "0x72761c88201a8206c11497e2284da7c17d5de14ddbe3cf2e245c7a89bdbeb834" - ) |] + ) + |] ; [| ( f "0xe1d147da11c45044223b2683b3b00c52f6009d7ebcfdfbae9259c491987f212d" , f "0x0ce12f0fcb9b9c6fe03c60e95b81d739ec98534de11958b16eb5c8ff17d8d637" - ) |] + ) + |] ; [| ( f "0xb4a2db2320c2261a6c74cb1da219749f073a90a51b7b5d48eaf5d6df43d32918" , f "0x645dd68f2b508bffcc3fc4736caf12bd4f6483056f07a35816e3b40c0e48041b" - ) |] + ) + |] ; [| ( f "0xf53496e9ff6778bca6617cd101264b6458a205745343fc6127f4fa79df4e1f3a" , f "0xa0168d34f3f20e2f4f94e61535f915ca30646c6ec1d2f2f91e345f0760787006" - ) |] + ) + |] ; [| ( f "0xb3b1e6fed5af9e421b27b6cef9c60a615443fae5699171070fdad3e53a2d5a16" , f "0xcf894a36e6e5fe42644eecaebe187977de1ff67f071b316240ae343bbf3a922e" - ) |] + ) + |] ; [| ( f "0xefa34ccec236a7cc3e68182b3b9d84b69f8233a1757e75609651cb41a9868018" , f "0xcba24d1b29572ccf5398ad493921814ec915aac1843e4fc34c24652ee16c0a15" - ) |] + ) + |] ; [| ( f "0xfb6d1671f2725789b434ba1fc61964b2e068adeb7a065ddcba722aa4d2eea102" , f "0xd8e8d013cbab8310eb3c9d9f440924bc78da0cc1db077a8448b8b9ef180ce30a" - ) |] + ) + |] ; [| ( f "0x2af77f324c29813500216cb38adcefe3249f5876781e4dae8a680690b3834032" , f "0x518bb8f2a26170346e3c9536cb9ba1232ac370fde0e057227c7cb7ecd6c85308" - ) |] + ) + |] ; [| ( f "0x5a8a609eaa2e5e12bc28bf88b79086f5602194a449f6ed1b2253a4981077a309" , f "0xc68479d1c6effc1d07dbbaee4f35802e932cd4fa1260a791070e8bc913c5c333" - ) |] + ) + |] ; [| ( f "0x4ec775ee8028ee50166416643d42a61e35281fc299a4be7c05176b8d3abcca19" , f "0x69d63c09bd6f353689fa7e220f563ab6f29fd81367f62439874242128126d519" - ) |] + ) + |] ; [| ( f "0xe968dd1203dc7fda02febcb159b6c93ef7ab93fa8f9bbcbf48df0833ba02d93b" , f "0x64904b20ba671f384bfa7ded7309a0620692b099b63e50e82d4d6091f842f720" - ) |] + ) + |] ; [| ( f "0xdc8d32906747899e5c0d4d3c1d4dd43877f593972b36793f77efe28c578e5023" , f "0x319cd82139cfd3f670680d1e670dfd386008dd54544f89f6e2f04482c341b731" - ) |] + ) + |] ; [| ( f "0x0fb21c7145defc20985c3dea15c3008a678b18e9a8beb32b9748e9c73ef9e636" , f "0x2462ffc0e4934bf1195a2ddd6f0b17d8cc9f0469831fa400c6157a36dd392d36" - ) |] + ) + |] ; [| ( f "0x4a3eb750ce81fef11ffeed96c84dab2415867a02e2a72fba7505e0623cb8e92a" , f "0x02dfd7ca7ae7afac7c667a6bd6c1b669a1385de2cf3a8c019c46ef0dd0b38728" - ) |] + ) + |] ; [| ( f "0x8d58dbfaf5ca73213c2ba2f05d958539f18ff089cf067bac3332bb6bd2cd6417" , f "0x5ebf023b1ca1239aba0cd26b7bf1d70738f40cf3bc89d45f07248ffb30e4820a" - ) |] + ) + |] ; [| ( f "0xf524cb829892f7b4ff288e8ea69df4db63678ca53d6963975a5ecdfeb68e893f" , f "0x601c4f8803cab26500a1e195a127f7c88f301af5e83cda6fb40f465db195c201" - ) |] + ) + |] ; [| ( f "0x6dc036875d09b30417848cb0f7c8039adeed2b51f3651b4de1c447a5a646753d" , f "0xd342c424463f2e9e870b5ad29876ec97e45925322ca16dc0c46d9519927d6b02" - ) |] + ) + |] ; [| ( f "0x3371b241ce18c2fdcf224b99478e639983f4fd2c8acf12de6e602bad621a9515" , f "0xd103094419edc57033c20bccce896038b82ed8734f86798a7d17f5602628ba22" - ) |] + ) + |] ; [| ( f "0xf1a2ceb4ea0100211173f100d4fbd34eb0d1c7902e7cfbe1f8da50e5adf4ac06" , f "0x1088eaf878cecdd0caabbc7c170218446b5da8eb2d628f68139b90348df69007" - ) |] + ) + |] ; [| ( f "0xd22584501d4365d9dfc1e791e87b359530690c6bec4365496afde47ba2064016" , f "0xe8e9d2f27f82dc3d298b95c78fa45b342f5617b4fc7db2943a4f9d9041e6cb34" - ) |] + ) + |] ; [| ( f "0x18483c04fe58294610ffd8eedb0b7ee360fc28237986473d237a0b3c3b00502b" , f "0xa4f87e010563d657d7a28b4a762e6b283323166ec18ec7d34092dc58501a0f2c" - ) |] + ) + |] ; [| ( f "0x23d99459197ab3326af1ccac05e731da7957a3bdf04c6bfcae90d7cbcd419816" , f "0xa31c33f992d2cccab8b93289f605cec3d06acf72c3986850c3a603b1b77ddd01" - ) |] + ) + |] ; [| ( f "0x387db695e2af6e199287f42f9d0a4b3dfac37b7ae7bb04d15b4ad9fcd61cdd3d" , f "0x39fba6bbe474abdc9ace752f1e22435144551afc94411645b4d9e3e27271bf36" - ) |] + ) + |] ; [| ( f "0x831c63464329028d112364ca75d06155b586baf0b7530805c6af493422c00b39" , f "0xcead90d9c5c9269cc3eafb8b0b35b3bee8a67d07b75af41b526ae5ed47b9c530" - ) |] + ) + |] ; [| ( f "0xd669815cd45deceef1009128283ebeb8034d18fe9ed3a0c04d0756c93570230a" , f "0xdc05d2e5321401963a948b39c767a6204844dbe50f6df9f615547ba2e5c9ae11" - ) |] + ) + |] ; [| ( f "0x6b9dd83de050f511ebf945097f5a47db26b8d710dbf60540bce794e1a590e20d" , f "0x94377f9a01ce3506ee6362d68e69f2469b86cf7e440fbbd50890e7faf172d523" - ) |] + ) + |] ; [| ( f "0x050f59874eb3f0e1858123642fd6fc2e3b85561f8ad2084fd89268654606df30" , f "0x28ad07ed0f81d83a404cc3d7c0c4aac35feb7821282ba52b683de2108d91be25" - ) |] + ) + |] ; [| ( f "0x8da3c248031b3b7480030705283d7da7ad26c6b8860a5d9b6e82061030bb190a" , f "0xf46242b243da8a382c36eeb67dbf60b8f7e2fbe8beb2c3cddae1af00fba47e2e" - ) |] + ) + |] ; [| ( f "0xd64686853626a7d451c066c0585c9ee55521a1884766f909da2a5cff44a7342a" , f "0x81bc0557dba5c9dd6fdb3943d86adf3fb0a4ec93af5924e6fb70a724088fc625" - ) |] + ) + |] ; [| ( f "0x535ff27103897aa706606bb8c960a1597cb02a394f95ec94684c4a34e9f13113" , f "0x453772942f6ed24760ab2c323f6fb9514fc6f3bc426741590809541b1cc6db36" - ) |] + ) + |] ; [| ( f "0x95c481e2326999eafe4df651f44030aaf614ed02090e4e0213b4d5bca929410e" , f "0x571435313f81c25d3d2c0a573913b5d8c22c26a9ec6362450248cd126fac2531" - ) |] + ) + |] ; [| ( f "0xda27647634eed755be2a69a6094cbda67ca8cca5f9ae3d0d1bea2d9ea15de60c" , f "0x510e2b05fa8b8b158a94e4c29e53e584c511a9fd415661cf89921bab362f0c1a" - ) |] + ) + |] ; [| ( f "0xc71dde427eaec06f392c755b18ea52de4a55f7f3d1acd5e884932e030746ce2b" , f "0x7a0bc40f2777218de34a9882a90b661dbdcd6d0165685b2841ae1262e67c6e17" - ) |] + ) + |] ; [| ( f "0x015730ac9b624567c93973276dd047c492553f0c7775bf42ab33434b803e4815" , f "0x28eb26e7844fea61f17df1f057d1b045985a0d903f9a4ebac03b92127d247a0e" - ) |] + ) + |] ; [| ( f "0x61cee950f13d9385781e16b26928a720159adf3e7e21cea135b339837ffc8615" , f "0xf9f4e1149c40450378455a2334c60bd5faae86c87932a896a85ded7a3b2bad00" - ) |] + ) + |] ; [| ( f "0x1a64cca7651003fa3cb88f3f81ca53d38ad09a957d894d8fd8e2806de07f541b" , f "0x5d319e09620d96bab16350fc8ae86cf742dcbacf3ebc4780a214a7c640372d08" - ) |] + ) + |] ; [| ( f "0x47b93cd33c1128e960705558462142209395478e2e4f23d516806f11b1fab427" , f "0xaaf79875daeb7e8d0da17a321156a639e2c8556faf4e4ee2f673591e2f737504" - ) |] + ) + |] ; [| ( f "0x2655eb0cc32265df65a8b64330cf6678b0d11504b5c395b38f9e27e967833418" , f "0x0ece12b378f20ffe3e7d046f018892efe4e4ec59dc47ccef835207abd0be8c25" - ) |] + ) + |] ; [| ( f "0xc6a4c2cea18c6b325048a82a6f361a8a30af3f82be7afa9e099f3061748f6a11" , f "0x291ecd003b42671a812363c6341e4374b04384c18022ca21a096355fcebdd433" - ) |] + ) + |] ; [| ( f "0x483cfe8655c1b7e68f7f192a41fb242564521a8f11506a74804a3e59f7b67f21" , f "0xa3a43d1a46104a4b2e0c26485a16b49a29fc7d36611fa1d39b2c2299b2650f10" - ) |] + ) + |] ; [| ( f "0x0fbec0813b0785253edf05273f50b89823e6479ccc4c4b4c2036e62c1507ff08" , f "0x9f00cc4c3f3f9912f8832397e34486b3f8f7fd27684dcd4568d241f45b0ecb17" - ) |] + ) + |] ; [| ( f "0x3124b7826979a9a9ed92023f6f46fe8f2bd0c411ad183c1dad3e6a77b961bf26" , f "0x3c961af9f770829fc7166117c629ab8bc2b9c2bd797a4a903d884c0450339e11" - ) |] + ) + |] ; [| ( f "0xe5b54208f6348200590fc1d3bd498c775f4cfca98229c79331f9e194c18e101a" , f "0x2e1f9a19d336426eb5af0c6b91d249b1ffdf744bb8a7ead4d8f220883255be26" - ) |] + ) + |] ; [| ( f "0x732708fc475f51f1670837af56058d54b7acb4f7206fbc330466bc5474ca6206" , f "0x28c65f6d897db8fa019d38d1584f5e8c962e8c283d5f27b2fe9c5be0b172202e" - ) |] + ) + |] ; [| ( f "0x34c71e829a591ec2b11d283f6660020481854234a432ade346038e9a10895d18" , f "0xecc2fa19bb314e94c60cfb457c0319b6df5afd79ae685dfbe056d5daae882d3e" - ) |] + ) + |] ; [| ( f "0x075fa82a463dceed409d746f0c1486e167ad5d34b6148ce441a8243a8c922a35" , f "0x2124eb3e000f6571e3f71fd6a0f253d16001c5e65dec520afb3ceda3761a1a1a" - ) |] + ) + |] ; [| ( f "0xa0fb6b37ee2633bb40f7bb63a10ba877506a2150185c187716b09e5e237d4d2d" , f "0x8404410bafba47ddb71309b5b9df275bd0987ca2ad8f0bbeb5620131dad0d429" - ) |] + ) + |] ; [| ( f "0x59bd6f528299ebacd40b62bf1abdc04fcd48267c26bdd782d9e7e9b6e6a6392a" , f "0xc9e1e64830076475074a18d1ceac1e97631eb863ce22398d41510da866fae834" - ) |] + ) + |] ; [| ( f "0x31224ab6904704df98a5fa1339934cbb10b874620fe494d5d5795fcc238d0904" , f "0x3e0ba4490fa45df6b8629d8532950bec4e62c0076ff860572e198fe7f26d1d02" - ) |] + ) + |] ; [| ( f "0x45273c13fd407e59a30ba1539884a341312502c55b1fa63c7da7ae9a53a90515" , f "0x8f564969c4e65eacc330db09682c97a6acbddb7a83a0673f494e32bbe47acd16" - ) |] + ) + |] ; [| ( f "0x8ebcc82939734d324c1ad8a29e4bde4ca61d76e1e0f76072d26faf5beaf21a0e" , f "0x9440c5700c1f4c93e84f0673e643b0c2ba1d67d4d1120be913510f3c442b2007" - ) |] + ) + |] ; [| ( f "0x06cc8baa53495dafa212828ebf438c2d72376ec618d51b089da873123e030b18" , f "0xe7b98001596ace040f2c7c6f7db7fe87a3e6c52c9eed0c3a7b3c559167f92d1c" - ) |] + ) + |] ; [| ( f "0x89fd2f959cd1ae7188fddfba149d11f690854990edb84b24f7dbd9283761f31f" , f "0xb8dfae45448aa4df27cf4d4a50fe1371ea6f8647e81c4de28772bf78e09d0339" - ) |] + ) + |] ; [| ( f "0x1ead27fcc4382eae117758368ab13451b710a695d54be7aa77972263b9b1b624" , f "0xadd3afc654bd3cf2bd5bd2f15bc68d2de6b84faa477ab9ad0daef961f5721529" - ) |] + ) + |] ; [| ( f "0x62448d22bf99a442d7b2ff016bd98d36a41f2cc1d6fb37ad977167ab8042a02f" , f "0x5549cb4176e08fc4c9e57f62ef4197dc7dd1339d06ac0623d6a9b571ece3f80c" - ) |] + ) + |] ; [| ( f "0x439d18a7399aef887edc225c49574d78799471c64ec23a49bae395715f8a813b" , f "0x790a3cc5f7de2a706059502bfb2abcd4a2cbc3baaffc2746f0fa63484712f500" - ) |] + ) + |] ; [| ( f "0x1c34a556da80eb8f08097d10c6b3bcaf763fc4fef1763c8f0cb02d353ae34e19" , f "0x1d27e6579f41fac1722aa42cf0c8a98338c94b9d15893d8ef97bc37cc0a6240d" - ) |] + ) + |] ; [| ( f "0x25eb92fe8cd0f30b9094add1f0bf3cf4df8e914b3ad5463e18dedbef03791013" , f "0x2f2a9991f2ff89bc88c32fdb694533b0c1debe3d025f452d7ce86d689306fb1e" - ) |] + ) + |] ; [| ( f "0x87481bf6d162842dc45a7b1f11b483f0e66b67866db820e8f594ffad2fe18020" , f "0xa6affbf8fd8793613c7d0ecb41471f57464f11468462f8d406cb62c619935b14" - ) |] + ) + |] ; [| ( f "0x154cccea81077c133b1d7c7e9173562fc7464e0bd4d7f8dbea714c4660a4660f" , f "0x352f6aacaa8ca4c197cab054a6bf139bbfd9eaa1a59efcdaf63d049440c11f3a" - ) |] + ) + |] ; [| ( f "0x9db4cf8b15e7eb2d44924cc4eefaaed701690064a0eae724fc6593aa3df62d0d" , f "0xfa0f80fca2e79313fb0e3dcee343e2e42995e0bc52076d187d60b731861bea37" - ) |] + ) + |] ; [| ( f "0xe42a4b3c4c74b26d790d52d8230e18c0728765d90b661233c9d680b79ee48220" , f "0xf131bb5d18f8d079c9210a59fc345a3b5b0f964a0460a07de0241d7b71e4b327" - ) |] + ) + |] ; [| ( f "0x52343da93a62efa5cca6dc7877d80602659553f4936fe8b12fdac5ae2d716235" , f "0xed9688bee2e6a97472a4989a43b257e1e02c1494c25d88cc1d6fde0c29069127" - ) |] + ) + |] ; [| ( f "0xf3cc6ac0dd504c3121d655b711e667393a892f5a13db34cdaac7b6f06aab9810" , f "0x92f2f9e47f0e681d66ba46288954e57a5ebfa3227acb9da4281590ebb9e64d30" - ) |] + ) + |] ; [| ( f "0x06ae4012eb21e5c32d431d0e4aba0426a73f9caa211c1d20e7ae8fe3b1ac5d34" , f "0xe94da7751e130a48bd5b61ad7c6d640d2f1a3eec5132b160187b7f6652652c1d" - ) |] + ) + |] ; [| ( f "0x3e2590e77ef5b4538c833e85b94ef9afc8b1e58985adc143e2260043cf8ed517" , f "0x77308084de82438ef97d561f3f58352a7902a4b0df56f85399e1dd1e41e4c309" - ) |] + ) + |] ; [| ( f "0x2935799415e8a61eab5d4d47f4d852b6f2efc9abfcfa0f4146fc0c46283a1823" , f "0x80900817df365f76b5a227c904b37322b951e86c79479760cb0492087a840614" - ) |] + ) + |] ; [| ( f "0x9c490436f7e02b8fa1f6038c78127c8052fa98883f929432d22e6d9ffeca2b31" , f "0x3039473b375303c6e9cc61ddc263096eeba27a5a0ef7991722a58ecbd3dbb602" - ) |] + ) + |] ; [| ( f "0x8fbf9e7766b9a76cb627bf4a91b3132d31c0632a3fef31b2d6d95923c622e301" , f "0xbdb9fc28fb38a504ce40acf8e3f2e26d89429f8ab305935c4232c4cc08642b3c" - ) |] + ) + |] ; [| ( f "0x79c63f5f4e687e0456441def361b372ef115e660e929ca942da192f70cb6b10c" , f "0xab693422e530607a23e875e6351cfb8fb25124b0078e66eb425d82a4f228d503" - ) |] + ) + |] ; [| ( f "0x55911af1a491dd0392742f96e78e8676350c0bc52df5af6fb28d05149dab7c3c" , f "0x2d5ff261536319ee4a99fdfc0dabd8e7c36ebda325d938b4cb957e6b89f39e1c" - ) |] + ) + |] ; [| ( f "0x69c41464774c111c59518ee87d40f5a09ed21da8d16ca9a58464eb9ff163d63c" , f "0xe9f6cf421eca655e9e3f0371c0383f0d66e722a4c6959d92d5db7e8ae4be251a" - ) |] + ) + |] ; [| ( f "0x08ea860a3e1adcc1691abe6041d7a22feef1a1a37647da9318cf0fb198be8602" , f "0xf8a7d30f5f14984c9ff688ef89a3dea8aad4c48df4bc96857233b83f6f5f830f" - ) |] + ) + |] ; [| ( f "0x9d7a4530726a265a27c0d46b90a01b06798d5e1308a90c28677295bd16408313" , f "0x94a4539833697daf1a056acbe818de7b90f8200d4c68a28bf0d06bd5575f7326" - ) |] + ) + |] ; [| ( f "0x9ca3bbec52fbef074641aeae98cf86387beedb28bc6b483c450bedf4671eea03" , f "0x22f29db98cf2caa2482aad7c41f7365ac128f88e827bfbeb8728d1963379fe14" - ) |] + ) + |] ; [| ( f "0xd695143038a11e7c804413abd0a37487de9e8e04fbde123833817ec4f7e72128" , f "0x66764a89b3dbd011f7924750b3361862d28e17bc08dfec891a587826de593f24" - ) |] + ) + |] ; [| ( f "0x123c51636b20d3df632c80f3f9a23578fd3f54c34d7a225172f9ba6dd84f992e" , f "0x812ee9d1006224242ec2c697e1ce6636d50988b91e12c3b3a22677a2cb5abc1c" - ) |] + ) + |] ; [| ( f "0x2e98b559d6b22970fd0545844ffe93e8fc2fda09791102a87c2754b9b25c5a35" , f "0x7eb5a6f663ec95939cdc58a4852f1fba780ffc0dada602784443c4ebce886015" - ) |] + ) + |] ; [| ( f "0x50a67c5c90af64925505b974e93f2622853d51f95a81aa41b12349a3a4ba491d" , f "0xe2161fa6352eacc32fb0f63fbdfe00276492d710a1cee1e660c2118c35c7ce3a" - ) |] + ) + |] ; [| ( f "0x1606766be07b52303e1228a49dff2816719323ef33a35d1740ac2ac2e10a5e0b" , f "0x6b043a9ce072dc52df31e7f73a94a67591eff87b408be5e9459c0bb1e9ee6615" - ) |] + ) + |] ; [| ( f "0xc7df1603a1641ecbf96851fa35bb8aa09c43aff142c109548c77857b51106317" , f "0x9d1ffc2b3684abeeaaf9cbfe40199bc5cf7e2fa8a45854cef90a1b2a8ce5ce32" - ) |] + ) + |] ; [| ( f "0x8a239a7995835490a4318c7e69e37e82299534cb6d6ec3888a0c703d3e8d9b3d" , f "0x2a0b24168ee6233ed28fe3c4a78f359bcce3110595d73fe5832032aa77461b16" - ) |] + ) + |] ; [| ( f "0x229dfd39a1ed5bb3ab6f1316e9becfe347778f100f22640abf1357c76334970f" , f "0x1cdf1bbb10630266e09f6a47667600ae72b2729145b64f0a16c4e055e4535c35" - ) |] + ) + |] ; [| ( f "0x421b1a39f15a2bd6ca3800dd236ef9f269dfe15f51e769e433c80c2199672033" , f "0x11872b1e6168797fb8f7de5aabb50cec49fb1e37eeb41978a2559d0e8ad2e11a" - ) |] + ) + |] ; [| ( f "0x46ffa1575e2b6bdbb177d1273bc9acad38bff6906b11d83e45a79a7579a48d3d" , f "0xaeca1f74c039ecab003010962be6dd87b965ac7b1a02e23d5de3f23e0483c908" - ) |] + ) + |] ; [| ( f "0xeb7d83d40c651a962f40d2851aadcd8b5afe0a1c4664405300c683a412012018" , f "0x8c76208062ec4e338cbbd2761542e22bb9ff66605f54fd316c4c21d4f2405a20" - ) |] + ) + |] ; [| ( f "0x1ad9c9f0cea83e92682a2de46d448010f5b3b713f3e1a61ff68f5f5e5ab3643e" , f "0xc1667cb31876a5b53e426dd0c175324a60c37b1a8b4ee74d37f00c5852d6cf0b" - ) |] + ) + |] ; [| ( f "0x3b2375e9f8121fb4f6d627428088df26d860424379544c9b5663e61de200ae00" , f "0x0db8ca53e1ce4a2c8b7f39f1e0550ecdb1a011ff57cfde45553ad636197def1e" - ) |] + ) + |] ; [| ( f "0x76c79f4b1e7e14c2d0fbc9973226def3ddb965ece647eba67c54d33428c4af01" , f "0x6368b36f70ea80dd00da1ebed4e10bc4473052fc1d8bddfe67fdb70c89e29014" - ) |] + ) + |] ; [| ( f "0x771f7cb4bc689aa3a1604ad1e6a9a93a759d7a31d1961dbc8e0afc8737415a05" , f "0xe47e6581f26e9e4ba4c5022fce17c2f7f9fb13a7ab66bdf54c512c096dc83017" - ) |] + ) + |] ; [| ( f "0xc1c0f9a54352ce9cea8da62c252dc2ac16903c0cc4f1df930f497f94d174b105" , f "0xac6c0896f41bfe08245bca46cf2fadce9f7694896cc9caaf14edd3ef1b258e3d" - ) |] + ) + |] ; [| ( f "0x9e16316949b06ba8350b0214c8856a2998cd88008a77f7ae0250d691b45bbf1a" , f "0xa272a51119ffb32b1dc53eb19a11a77fde0d718f93cc2dc86419f71f5d4f841d" - ) |] + ) + |] ; [| ( f "0x1ffe3e205981d6611081f00475280b546a8c78f06d2e8cf515c8ba78f5e27515" , f "0x8e8c7e3a0008c4262d530045e859edb3e26b9141af42a120842e57110a114b3c" - ) |] + ) + |] ; [| ( f "0x9bca90db5c68308919d432c4df7af7ba2f19a545226c40606ef45fbc9e000513" , f "0x5e1d15f2763067f16dd254c4feda4316de5fd3c02308bf6b8956c79528566115" - ) |] + ) + |] ; [| ( f "0xce44fc6cb935176f4a32ef4a844bf75abfdfb466f9564dd6f61258309b8fc20c" , f "0x795359f8ecb245a9d54cacd910bc43e03c1701a68fde39148c4b095f7f004c13" - ) |] + ) + |] ; [| ( f "0x8693f1a74dcdee3ded3a448cdac967ebcf614922712ccc66548c0475b75c9934" , f "0xf9054dfb035cacadbad865d3717e60c2558589890af6f79c3724b795b72e0109" - ) |] + ) + |] ; [| ( f "0x01f551ff2f996b654e30329dcead8dfa251c83e696157468a5a91e0d5b06ab2a" , f "0x0f40cfa6cf3aa091ccafa40d75ee37d5b203df5e7e9f47afc50f7ba3e58a213c" - ) |] + ) + |] ; [| ( f "0x069b05e61eb10497de0f483049454acafcf750d64f6dc9739d4bcdffc95c6a10" , f "0x89ff72507e5780f8f8da40f9058f87eb3693de636863db46690087f4278b5b2a" - ) |] + ) + |] ; [| ( f "0x39b6e802813ece5b41b258e88bf9d3647491374b0dec02f737a60aa746c20c0a" , f "0xa26a401b794a41f4ce10a1ce969457402d52e635b0504bede83b4373c65e6d19" - ) |] + ) + |] ; [| ( f "0xc95d93778b1cc98de2697b8854dbe186b8024c2a7c884196ae1d44738ece8b2d" , f "0x25ef8be210d6b2054cbbcdd12dfe65f6a3874c990a186bd3113f2b71ac8c122e" - ) |] |] + ) + |] + |] ; [| [| ( f "0xc1d564826665152d3636d1a69c983f9884ff1bf745e594ad11cc20f499ad2c0a" , f "0xcf570c1dfd7aee171e4427c28389ef145d45980eaaed86a2d3f8c5a0a9502c09" - ) |] + ) + |] ; [| ( f "0x7b62fad92d3e1290a32ed348d8a78ce95f7faf5e9ca7e831a124389ba80d5629" , f "0x756379c52ec82af9e5be274ec820b1c8323546b9d865b8ade4a857fbefa89100" - ) |] + ) + |] ; [| ( f "0x9650dc6b134aeadab5d94ac9822aeaf9ae66c917094db945941e11107b48a134" , f "0xdbae06659c69525455790bf74e1a1ea58b8a71de61667579938fecc9f2cfec3e" - ) |] + ) + |] ; [| ( f "0x9e9013d9c41ead3b3b77b2d0e44b0bf3affa9b8f0f029c4d7dd53f5c4e888719" , f "0xff5bc79accf2d8510d73f973a21e3361ab3a326c376193f2af7eba6294de5c13" - ) |] + ) + |] ; [| ( f "0x141751b9b4ab5105d6faa77478b60242d71f63ee55b161c81011230cd5d7bc3f" , f "0xbbf4627db858c4935dec68f7462ccd49211ac1d69df6de27de48fca6e4b6db0e" - ) |] + ) + |] ; [| ( f "0x27204ffb3f78fde379d51964c0ef555181dfa4abc4e3cf2f2288a40326405414" , f "0x47a7c4adb1093ebb355b43dd75a737d43cc67d501ee339fca9ef934f2715b71d" - ) |] + ) + |] ; [| ( f "0x3b8577207ad4597d4b3d182629db7895ad6950e7de39173dce8bccac4961ff2a" , f "0xd013a9e53446aded4d2f9959d1b44786d6a4d54d7a6920078e9a0f1be6165e3d" - ) |] + ) + |] ; [| ( f "0xb051529ef7fc0dc617c4fcf7035089e1cf7fd48ab5eb8d3537b72540fd2cf53a" , f "0x910ed1b2b9e838510bb0a804a185fea1582f81f97f65dd99a524a3bd299ded34" - ) |] + ) + |] ; [| ( f "0x9ee0ffc489694ff81a270bfb7ec94780af4c1c7475b5b79611b1b60ebc8d3e1e" , f "0x17aa302be0cf91e435966c42b99538c2f8a964aa68d0c8106068721f282bde38" - ) |] + ) + |] ; [| ( f "0x0131ce8a361ae8644cf56b80ee2ecc52a2a270c4965c00221951f2e4ef6b0d29" , f "0x4deade08c6dd90faaa28a38160f368a61cd9a279ff696580b0131d1783df9f21" - ) |] + ) + |] ; [| ( f "0xb95358925eb8c4cc3da24464df9b29aebb55ac86545833b65fc40cf44f0ab924" , f "0x666cdea014a1456f810a0cc7c540bfb35da73bedc386930df4c6a4921fafdf3b" - ) |] + ) + |] ; [| ( f "0x3ecf3a8a3623323c716594b5c3fc4d81e509cc1fefdd49b224fdd9d78f34ba2e" , f "0x510f5080d82729d3c2bfbc7bc3e9ea3f09349292749fe86cc25fcb25b2722235" - ) |] + ) + |] ; [| ( f "0x16fba6b566882c0d59608e4aab2c53ab3f2a7c0cd8257a7ac826b2c316b36820" , f "0xc11adfb48af1a88018d1e389038e8efd488527633c390eeb90b5218dd1965d3d" - ) |] + ) + |] ; [| ( f "0x226ba45c628691d88eb93305d6ba8c2bf17e42c889c27fdd6dfe99c097a3c016" , f "0x91daf83a04d8b9ca8abc9cf7be488286576816ce02d885e3e50993e8b33b4c11" - ) |] + ) + |] ; [| ( f "0x8a65be2b91844789788f7218d1f35fa5e39f5301a89e13a053c460ca6c65a10a" , f "0x7041c96f88c4544f8a6d12db4f3e9137d561608d1cd98f92f1621d82e318190a" - ) |] + ) + |] ; [| ( f "0x7e6258494aa6e2c1f20838b7cbdb8fc3adf8118c1c00e4dc3d7cda5134fb7819" , f "0x8ac6577375ae085c9300aee46e47115a1b2c16a72674a7baa52b749b4206a701" - ) |] + ) + |] ; [| ( f "0x89d763c6839302a06f70d0ead8a43c6eaa73bafb12ce69f54939a31eb14c5d2d" , f "0xc3246aeabdb66f9cc8c292a4072122312451bae0b51e2af4521dcb9d0392fa18" - ) |] + ) + |] ; [| ( f "0x554332f9e07d6afe4589840e184fb946657f4c995399b8a09920aa5f7deb220e" , f "0x1ab93836323ac09abdf9197b34db1ef4db83f0cd9c807f3765d573e8dc2d1320" - ) |] + ) + |] ; [| ( f "0xc5c345600041e2f4dc99048cac4a80cd6fc77c7d9e1933f4c25ff580a41bea18" , f "0x3d40699c8d45003c1503f2138c3b3d0884b4171e216a300d13fd91ad556af52a" - ) |] + ) + |] ; [| ( f "0x062f25d98b6237e5462df29093a51411559d29f8693c01b49c9395abea257334" , f "0x05175bef18b2d954b5fd3bccc15e7ec8480823f2eae4551d8131b597cda5902c" - ) |] + ) + |] ; [| ( f "0x324ae7d83e1e308bb99036f07e6ebe908bb0a5c6834a04637c513970e1f6cd1d" , f "0x5445d5e6377fab88f7406ed7923427d80266c1e27c6e33f58588ff21b5f7ef36" - ) |] + ) + |] ; [| ( f "0xcf77b902e4aae0c06e87483bf0b48054c2dff6b91e2872e99debfc62fefd3f14" , f "0xe7b1537d5c6d8236747dd173f0d6ec41a415edbc87a771738358827bc677381d" - ) |] + ) + |] ; [| ( f "0x2563c1e6b2bc6ee2100375da172f54496dc6157571bb0444a56f533996373e09" , f "0x63245cbd1d9a87862dc2df8ab091eb7919179fa7b9123b3a1a066a0793e3211f" - ) |] + ) + |] ; [| ( f "0xf29849d8b86a1e886549f7a5378aacd6c724f3544702204ba556d71e5e8b5503" , f "0x838bbf742170d54a53a284787c26f649fe67b5aa034e65b3891c93259714f729" - ) |] + ) + |] ; [| ( f "0x21bc865fb9da9250c7279a6ca4388b8de9ae2842fd086db0b8e0ef1653836b2e" , f "0x2de37ba589c70c4c28be8acd41caf6e55c509386f6a275ccb2f726973065b605" - ) |] + ) + |] ; [| ( f "0x7fef034e61a45bd3863ebfa388df934edca8d96adeea6125dbe1063e538acf37" , f "0x65a4eb0846c7c32c54607e7cf877345de2beec6c734c87a8e69549a516458018" - ) |] + ) + |] ; [| ( f "0xd90dc0c4c06aec1b901571c88bf5c6dc44f2e867b735d9e90cf85e9e35636a25" , f "0x2d97690c35d303c9183bc7df11475a049f4c1cc60fac7a4148f65b08caded038" - ) |] + ) + |] ; [| ( f "0x0795cbc15ecbed66591773b31052179d90fc2f32049aa2353fa7455dff00eb3e" , f "0x1e20579a72f193f9b25dd47b3f1e76c8684e293f39a871f1cf56ac161b5a5303" - ) |] + ) + |] ; [| ( f "0x7bad662c0ace2687015c4de79b3ef0b641193c2a2ba2e68a7e499fb5cadc6933" , f "0xc222fdd974a2b236b2d9ad31167cc5104caa43db05120a711b9386de7bf3233a" - ) |] + ) + |] ; [| ( f "0x07f4444082f8a178c8aeb26799dc0d71fbdf65854948d42e4a5f2f2d84c51415" , f "0x834b46f5b8a01d7e3f496f3f3585ea7bd32fb86f3f557d75f28bc4e99996ef03" - ) |] + ) + |] ; [| ( f "0x4cf07a70b91914ecd5e572becdd13a55b604e7bfb415c8894ea219a6bcc4790d" , f "0x820e6b01b020b2fb3e408780094c4aab32a3113512c0954da24124b4d5702036" - ) |] + ) + |] ; [| ( f "0xc08a30e3ed3398cc7d4e67f4f4436b18a9548749f2b5a31c9e46845624d2a515" , f "0xb93f26dfaa7afb4597fb9e95c293d8897e2566883ed50958fdfa51aefbe6973c" - ) |] + ) + |] ; [| ( f "0x66fdd7042b4b4db5d7ec0df7f1d88173c53386fd5b354c0beb6ff411437b0929" , f "0x9d63f332427543e5cdeffe67cb2fed06a8603f0c6a0cb40aa6fb1d1e4c33030a" - ) |] + ) + |] ; [| ( f "0x7b8c5c3a58d25e4c8eb2d8662dcabee6dc012125a07a9e1713b42d041793881e" , f "0x94f22a7753e4426ab54d10f9588b94857e2250161c1adff525cb12b262916529" - ) |] + ) + |] ; [| ( f "0xbdfe6ecf71a15fa9167e936cfddd7cd49cc3d6bc635b5b96b889f19b17163b2e" , f "0xaa3e8784c96904c6610bca021ceb13d74b089944f1a103cc420f6e0d63c32529" - ) |] + ) + |] ; [| ( f "0xb31b4dbf528ebb8ae0d94eb3e3ae1a8c53859dfd7723db94d823df2916708d15" , f "0xa31853502ceba6d1dd7c02f4d2fd317737a38313594ec49c825f89ddad226819" - ) |] + ) + |] ; [| ( f "0x1076e72b4885d9e59ac54d6af4684629444cfcd618cb856cb4940a84898da616" , f "0xea2db8bef60fba22299649533db460a940113f6af91ff632eedd0f7a2ea1e10d" - ) |] + ) + |] ; [| ( f "0xe165768669e8ea4864ad4c9b322b22c362a8b19931a801a0a1a65c06b9d79e11" , f "0x98973699442aa0cd6c1e3361d01286e6bb911911fb81911921f5f16303bc8000" - ) |] + ) + |] ; [| ( f "0x23696f1d0c6db6682e3cae8fc23b76999a253852906700f77633bb92cdfe530d" , f "0xa17e86beec3da8b304136ff56fd6631a27ce2af90e2606aaaa55662dc95a6406" - ) |] + ) + |] ; [| ( f "0x721fc025299d1a566e61ef944cff6ed1d1808de0c2cfb354cfdce1499149ee29" , f "0x7589bc6638194b3d185a59f60a2a0ad90c76b2f79b370e73a8e25d29945a4e1a" - ) |] + ) + |] ; [| ( f "0xbe604b8ac683dd84702d9f48f6eb69c527ff95d6a7d8e56214c0341803a77f16" , f "0xed10dce977d649c2700fe70c3408a0904586b955335dbdd057ae70133061ab27" - ) |] + ) + |] ; [| ( f "0x29d40b2de704a5e9bf685d391c9adffff4109e6981a7b778fe5f2223a252113d" , f "0xc02788cce67a982877de2f501ac6d3ac97e5562b7081f6766db45537738a281a" - ) |] + ) + |] ; [| ( f "0xd1efe4e07a20a9f02b6a93f784798327b60dfff58ccf45d36cce6209305f6f31" , f "0x7751a767c7413bc5bd452e608019060eb6529493eaf1275c660d619dd237e519" - ) |] + ) + |] ; [| ( f "0x15e265f8d369ce44a5f9418606fea10ae2c0969bf29d7d57d9b95c7a407db618" , f "0x24c86d821555ba058c62a325133eb79e24f1817368947e7ceafb422395ac1009" - ) |] + ) + |] ; [| ( f "0x4aeed2edc24c433e6d02cc64a04ebd764635aef627a12c64155504393440d413" , f "0xf495463a067137c03a1d4459a83e924185ed65a8b70df9e945708fa50bda9a11" - ) |] + ) + |] ; [| ( f "0x60f5e04390cd3d5bd0c35f1ac5863c410ef600601268ec2bf04fe24a55cfb739" , f "0x83079a728599c952a22f46dec3b3286e8c05f49f969948a5bdc08e4259116418" - ) |] + ) + |] ; [| ( f "0x10ea9879c467c1d3af9705b1b41b7de8163c5836771edb7e38a53c20476af71b" , f "0x6d7ddac2ef10a73e8fb7fa42ac8a22dd64f2c00d483304f69921fa344ca2aa1a" - ) |] + ) + |] ; [| ( f "0x0e8741f8d08bafa415ba2e0b4822043d51813c7e8c13b783b17a938bce824035" , f "0xe5e2afef612fe6d202a1fca23fae643ff9911d27ec32b1f7d5c9046cee13fe19" - ) |] + ) + |] ; [| ( f "0xfbf204eb79634a912f659dc8e3e2b306652e4d67ddce9154b5272e3ab9395308" , f "0x760f6f43d3e07611742466ed11695addb6ba7814672f34892aa68da9e428d015" - ) |] + ) + |] ; [| ( f "0xda74c3500f24a9c545218f2f442e7dc9331237bfd57c2b14ee863e902c59f73f" , f "0x805d9149a8922e2eab14e4000e92d9cc8eac7f6850befc1a8f9f90bf6403691d" - ) |] + ) + |] ; [| ( f "0x906ccf50a5914d8fe6606894c8656737a9ecc6623f53fa52fa9811242645ef32" , f "0x255fa4979f44dd6fb65accf213ac2f6f45c7bde09709df2cb6b8bb695574e514" - ) |] + ) + |] ; [| ( f "0x7f8e64c567e176044fc63ea57618c56a74329b0d0fd106d704c284ed93eedf36" , f "0x2fa3b28dc6fef3d70b05268cf911f35a6c93eda52f52327041e71f62ee277010" - ) |] + ) + |] ; [| ( f "0x300229484b3e5c0279635ca4418b0d096d331237c7bc8ac87602a75ca0d79203" , f "0xaa2ab20a58aa14793a718b62de75ecdca2a29bffdf548e7ad8e9e329eb6ad636" - ) |] + ) + |] ; [| ( f "0x8e5a83bcfb543db6bfe7d5965637a2608b13b5fc51900ac48c67ae9126f6211d" , f "0x7de10292f17820b04435d58e018e14f3c44f6982fb6c387885a5ea01161c562b" - ) |] + ) + |] ; [| ( f "0xd95ca276699267dd50dfc1428b75150d183adf298a83054edd2938012187b824" , f "0x0941006404e4a9aa9461a47ff906ea2366e38ad4df4dfc3328509c7e8d3ff30d" - ) |] + ) + |] ; [| ( f "0x7ecfad1459bfff45e058707b8db89cd2773ee6afb29419919f977bffaccb5712" , f "0xa16b58e9f0d13e92096bc857397a7e655d0ff572e34079c491b79642052d7406" - ) |] + ) + |] ; [| ( f "0x51aa9e91ffe94c5095fa18117bef5a031f575f27cccae0baaa5a69c3b335652f" , f "0x59a5e9bf7fc8ed1cf627d6148df707cc062ad92bf6451957bfd48994899ef72f" - ) |] + ) + |] ; [| ( f "0x25bf3042f58883fcacc86d25b6bf52945dacdd007013522ad9028b55162bde2b" , f "0xb3a1972dc4c2f884bd3e345c9d63b07b6e5077242f004425ba1e30de7f887803" - ) |] + ) + |] ; [| ( f "0xa48d5f43476f150f90542aa50300276a471c8e370fe52be4a7a4177dc56e1931" , f "0x99b2bb5cf3a71be44e5a3f2ba1ad300440e9733830e833f3f06cd21f0fd8a205" - ) |] + ) + |] ; [| ( f "0xc715f140a002b7ec33fa4757247dd90ebc8d9f61c85214a54d9092a61787bb0a" , f "0xe3c0efd4f1ccdd38b088710d929140f2bcf32d06e034f1cb0d0291b053b6111c" - ) |] + ) + |] ; [| ( f "0x125fb5e4f287ad6fcfd1912fd667901fe036c1cf1f1828ed99bf6315c2bf5926" , f "0xbd547de869719818ca55bb968a19404d7ec0209b5456dde01521160fc4657e15" - ) |] + ) + |] ; [| ( f "0x38b96f7d8ba17bf09e0ef992d03b6d92197b4faac211ad7180a6a85150324934" , f "0xc66090380c60a265fc5cf17168c7c7b4f1999cf06a5f41da42ebda10dcd97303" - ) |] + ) + |] ; [| ( f "0xc511177a955063ad8a493794ba5fd5788996a2716ea051ad8afc0b4a2fa6ba1c" , f "0x3fb7f8b2268633e3e8270ee8675d7c31b748794ade409248756a83acc3ba0b33" - ) |] + ) + |] ; [| ( f "0x56a361e5891127d4a2b5d11fd1e430351abc326b6827174d240adbe4ffc50e03" , f "0x9bc2c711ca3c83b9b6e2e9bf03ed5c0631c7101b3963d1793124c484b45ce801" - ) |] + ) + |] ; [| ( f "0x7485020c49427bf88ead403507df4cbab35fc0c9888c4e0110b5000a20b36f21" , f "0x40b3845f8f40eded4b4f0aa75cc6058a203eb53890d98f4ce115cc20d7afcb23" - ) |] + ) + |] ; [| ( f "0xa66eb23f4e8d663ff4a87a517e97eb99f55e14ace077275d77add7f619395d12" , f "0x829060eb2f74b5771bcbfd9d2df19847cb549c028068578f5eeefab641df862e" - ) |] + ) + |] ; [| ( f "0xda214c3bc51bf02ebbc41796c86cd32871d3bb184ef673ca16eab32eadc2aa16" , f "0x9b3d9956ae8e505f7b74b7c508f813013cf44185013939fc3dfc9293a794521a" - ) |] + ) + |] ; [| ( f "0x81c2edceb6d2c7921b2a27fd71ef18653668b80fcf8720b99ab25c2e2fa67510" , f "0xde270e74196c80406a68e09258279e7c7e1f6e4015eb0bfed6ddb79a685d0a0b" - ) |] + ) + |] ; [| ( f "0xc1d1bfb5187560a3b3d05bca25b91574ea68c4ed9446f0f08e345d27535c3f3e" , f "0x85f10ef4c8383d7dcc80733e5e7770bc800ecb57b2bd52240347e923f79b8718" - ) |] + ) + |] ; [| ( f "0x8f22a4d72fe1b848116e7daefa0a944241ff2da8742e5281201d229a26cc5037" , f "0x80a7ebf45975312f9f2cb1d771bb2c3dc9c26d427de0ecbd1cbb8e6b57ee5a06" - ) |] + ) + |] ; [| ( f "0xe02ff743c54787dbaf6c8ea5c8ec66b3d00914984e7df5169e71f9ae5106fa26" , f "0x8536b23b17f8402017c44317348f0c5c49541863db406fcf7a74250e5afe2c16" - ) |] + ) + |] ; [| ( f "0xc2322bf42c71f8d76274661ebdfb0d8ef882d0bb9227b98449e20bdb8a0fbf0d" , f "0xec9770fe48ce40708cf6586919254f3149f171f91258ea61d82d4b1cbbaca415" - ) |] + ) + |] ; [| ( f "0xea4b61e4b7cdb5f6d6fd1db2f0aca92dad24cec928c4fb2b59f93ab37b640f28" , f "0x8ce7fca2ed6abf6ddaea3cf6fd0a42c5162ee1ed61864af18bdb84047b709531" - ) |] + ) + |] ; [| ( f "0x912c327ab9d658cc0c97304c0718491c5a2c94a6ca3d075766e454602907a223" , f "0x76f5ab59e769f883a385ef7716371408ce0f0116b22a459e6940581f075f4d0e" - ) |] + ) + |] ; [| ( f "0x85b3ad0a216e543b4362f98a116314cb0c1484561b94b4243ac249344143b32c" , f "0x6c5d8313e59dffd1b9c55f1bf5fed0fde3a14c13df3dab21b3d14f8da3ab1119" - ) |] + ) + |] ; [| ( f "0x8feed314fd5cb40312e10950e49e9b56de328359d24f59ea4875d3ca0e341032" , f "0x8f84c098cf28b430da32fbaf088f1e7a946a0ca7d69d394578aa3076697e4e38" - ) |] + ) + |] ; [| ( f "0x36ae48436d9b8e74bb65ecd6cdb31da83b781da1a78ff68ecd292a7f5c45781a" , f "0x2d7287b309b2528dd2864c96881edbbd5fab2c473d864464f8a185e16e03dc1e" - ) |] + ) + |] ; [| ( f "0x1e472825bcd4e7efc646f956fa45fd7b0a0911c62389b87c783dcccdc6319d11" , f "0x50db6d61652e4e461b0af484f137053610b0ca7388318baf76179e6b662fc00f" - ) |] + ) + |] ; [| ( f "0xe660437b362fd1f7175463f3b7b4dad742db715728795ba8d11d86800c9c520b" , f "0xd1442221e2b68d6dfaa911ed8aafbfc09558d4ec0f49462c22939b3a87f60339" - ) |] + ) + |] ; [| ( f "0x211d7bf6bb1d57d01c7af199b540b280cac96212e7d97ea869edd5138d5e4e28" , f "0x678fa98e6d7632a93760233577f160f64e83eae10f60e75c8b19492d0af54b01" - ) |] + ) + |] ; [| ( f "0x11bb517992c25c104a5fa1bac91ed3bdb608e6243dcdc11a3a8d049b10f6133d" , f "0x4d7b23d5d5a2b188e9019632c2150b78970e8abab91fa653348f78ce6c01ab01" - ) |] + ) + |] ; [| ( f "0xacea43d705adc98449d072ba8f6d636b384a2c705da20777beff9bb5f1f2e907" , f "0xd6500fe78a81f9c59847fc66a55327d62e56c0c65d9a4a5ff9b8b75d764cf834" - ) |] + ) + |] ; [| ( f "0xc53e218134f2335ccd0ceacf7b451e3f5ededfab425d5638f1ec034b0b931829" , f "0x3ea3e4b49cc6aaf3e9145c67595590b0cdb26265fae2face019ba20554dde126" - ) |] + ) + |] ; [| ( f "0x99c907ca2ffc4be64dfe4ed952c6dc11965d9cab6aee2caa6db6395e75e23918" , f "0xb5b347435ddec54d83904fce2d7d82b5788b2474d7ef0bdee2ba63bab8adb53b" - ) |] + ) + |] ; [| ( f "0x16e9c916e8799f7f9110c7d95787b25d5071e1b291e52cc8850e0e28d7b6570d" , f "0x56a9e34b8f24f5e593d75f3ef90fdbb8d17d5e9df0c0939bf819a14fe024f833" - ) |] + ) + |] ; [| ( f "0xe73f6504556ad97ee15fbdb7f8bc1ad7d8e51f10eafbf59905f288c5607f321b" , f "0xfc1ba43c55625347789cf96e0149be2b94392c497a212c944438751dede3fd33" - ) |] + ) + |] ; [| ( f "0xe3dcdf79f92a4d65983f20a571a6b628bf77d17cccf11f4adcccfdc2ef12743f" , f "0xdb77811f27e36b9a9a52ceaad9ae793ab02c425f3ba4898526d4eeb45f38f603" - ) |] + ) + |] ; [| ( f "0xe0d10eb0ba3143362adae896064ae0a523beb4793f2603a3ee0a63cc9b6a0610" , f "0x92b37f83e2fc68afe4e168203e3e6ccb63ccaa5e455b2503150f8c27d022aa33" - ) |] + ) + |] ; [| ( f "0x5b19f89f076acf788b2be0b079116c2ca6b30cef35ec0e6941284b9bd73d9301" , f "0x679f0c1214cf5db3c6c4887c36a6d9c192c273c353b565a93c1c190a6622cb14" - ) |] + ) + |] ; [| ( f "0xd57a7c4f7d9605a90791bc6fb27c3faea1cbc724985ea2b92887783f06e9531c" , f "0x87994ac146674f0519f8f3197d879f92817ba0edfb637382caf57eafcaf8b236" - ) |] + ) + |] ; [| ( f "0xd7b8f8406b2fbcf5e18b2cba731d647a1ac99c359623c0fdc567a768c50d9b0d" , f "0xca403cf8c39169235d33052b4f519f7bfbe1f53f6fe27f09cee0b169a7202b29" - ) |] + ) + |] ; [| ( f "0xd58e50ecfef6e8391cfcad544f15caf3408e9d7fc8d07d4628cff9b00ff42136" , f "0x44258786ee8ff1606c306bcd4d21f90a85a31f68f33c8fbce114a1571e119a07" - ) |] + ) + |] ; [| ( f "0x52d9c2c319271f3807f9d7aa33626a15c52793641dd0b75f876ba8449b718817" , f "0x89ebf5de8035a6108d9fb19024b4f08832c08943395307964aed3ab04481c824" - ) |] + ) + |] ; [| ( f "0xd1bd08cf55d48ec4dc09f7f0393fc5828aa2208b200aea0ae90af0d755925218" , f "0xe3d011d70a309928638fef7b3ca56e69fd579ad3777ffd67575900fff4ffeb0c" - ) |] + ) + |] ; [| ( f "0xf0486e44a5076210e25d4631edb798728c512c0fbdff99dd0398681e2c266622" , f "0xb69628e01bf1d9e48e18af2315a3022e057e64a888d625339d4a9318bd3c6d24" - ) |] + ) + |] ; [| ( f "0x528442c080f5bce9d346fac4e8769e97e207bb1d4fd8b7f86945b0f834b0281c" , f "0x5ff7801f52ec0f1a2bfa8349a012ea40f5b496ba4bb275e7fe6bcd33870cf400" - ) |] + ) + |] ; [| ( f "0x5fded3c649a40193fe40878531f0b16f0ab2be746258aff285a2802ae7476d07" , f "0x6c47fd2f282797fdee5e8706a431fd3338a6867c7b1d27d0b723a5a9c8cd3913" - ) |] + ) + |] ; [| ( f "0x148a7409b91f343b4424c9cf6a64787831a86969cf854a8d0546ec40b1883320" , f "0xee407a9616e77f989c4f840fd25d96c64d6b56feb496855658c7b399bda71f2f" - ) |] + ) + |] ; [| ( f "0xbc056e0e81a6d86a832e9d47113ef5f001e9cda21daaa185711253aedb1baa34" , f "0x86ec17bed35685c496ff7cddc30714dffe3f6c86556506cf2fb88ba36fd2f102" - ) |] + ) + |] ; [| ( f "0x711ef55fa92655e8a63d671b30f3349c4f059f93804e96a65122f534f9fec70e" , f "0x2a840684b70f9988b952f4b3bdf1239c43b0ce2001b82ce3f839fc39d2098113" - ) |] + ) + |] ; [| ( f "0x5a8e7a8ce3c1554effc21faaa0796baaab4e7ccd1cbd04adbb030abab075b50d" , f "0x18bbc098f86ecc68a5b8d91fdc135ff140ff2e026f61750e751f7b3d25290405" - ) |] + ) + |] ; [| ( f "0x988f07c511ddeaa14717ae7bab2c763407c0c89bb988e06dd03057fdc6456a2b" , f "0xd9b9aaa94b782d6edcc2228883cafa60c7562234f42bf3b5ddcae6488824c237" - ) |] + ) + |] ; [| ( f "0x250c1a4e8c6302a17867da1652df8bfa7132fe52d73e9101f71b8c441c788f07" , f "0x651c2b07ffde24bab4c03dde81b00272689d6774dbc188de84eb045f887bae2f" - ) |] + ) + |] ; [| ( f "0x8d4d55f3f991214fe117489b6d7958c948e3f555dd03e77675946173b836bc0d" , f "0x94aee75a2dbf8c717137fab61c2f5e2eeee044f7d609112240161557771d8518" - ) |] + ) + |] ; [| ( f "0x6d3c4184497e2bcd2997c579007ca156e7dde3b2e011640f665f6464a61e9d25" , f "0x2f83b526c7a022447c293e1753f77b78024066746608b8bd3eeb9978b3a59d26" - ) |] + ) + |] ; [| ( f "0x230e62e1c96a73c51a60747a57a138c207036b090a5e565eb06dba4a9503770f" , f "0x57808af215ac7eb2fa08090c64b04d4eb8e1c3887b9810d3f61ea7e604e0b10d" - ) |] + ) + |] ; [| ( f "0x7b42de513f01c03226742e04a39fb8a7c8b11f11efa4de9dd346457195de7e12" , f "0x7882af2921a5798b19fcce10dc8d26525c353bafe4d0348e924bd9b0c11fd438" - ) |] + ) + |] ; [| ( f "0xb27ff37fa2d5703e9da0cba17cb7142a603c08ede8e75c4c562191b25f5c3237" , f "0x7ec2feb8ab54e34921feee290692217e8e58b728eaa10f33a3a66990ff89b73c" - ) |] + ) + |] ; [| ( f "0x36b724f071a354b8ac6170885d794c63dc211ebdacb5b6bab7780d26bf581638" , f "0xd0bc128388a7d7db4c9d7a3200121b99339dd8ce422fdaf9911a4ca2f2476732" - ) |] + ) + |] ; [| ( f "0xfaa46f7fadda6fb17b9bc5226b5b0e683d058d07a1d423dd40045826a6dfa121" , f "0x163f21eade172fc12fdf4ab529549330ce346cc4ac3b9a7141ce0ed148d0b309" - ) |] + ) + |] ; [| ( f "0xdaa26ae17ec51574d2578935386c9905dff624413c7cd824ce6fcee568d88a09" , f "0xa46b91f7591b4e64ab9e6c6442732e745505bfecf6e45d3798a41ba93bd02112" - ) |] + ) + |] ; [| ( f "0x92b439a213a58371a48909e42b6619126f519e8d3596853f9f53cd599cf06a1c" , f "0x16f55a77b5f785566ef33d2c1d34fb5b1a814e059f1465102201babfd3b7a50b" - ) |] + ) + |] ; [| ( f "0x2b8b0e44774d739fcc5578f588744f6e3dbb311b9e113db526719df9c02bf51b" , f "0x3db36a53e09ad5a38d06cebaba3ebb0bd1f11bdd79f88e9703c69e386111f10e" - ) |] + ) + |] ; [| ( f "0x35fb20fdec751b02a9ee761b9b42703e891073bfd671f2f19c8e54812a80a31e" , f "0x86304f320d5f669b4ee3a27e02b307e39f1b9c663cb36c034ad295dbce95f71c" - ) |] + ) + |] ; [| ( f "0x15f77c93b18c7bcd08583d0a5923568ec435e00d46b3e2599b245ed41d33bf02" , f "0x2411b760371e4c475a1937dae4091b51a19d7f49c5157c65424e7c8f2765c43b" - ) |] + ) + |] ; [| ( f "0xdf07edd9ea1abb7322e1108661e35ee32d93226d4298bd943f9a2036b6d87018" , f "0x05ed2ae81df2c730e53748412ee1a9f7d33d9b7d640a5c824df8ab94f8641924" - ) |] + ) + |] ; [| ( f "0x6e04681bd12a44e7c922183c7aa71f168caa7b59bc29a89b6f54fc189f53923e" , f "0x95f483f4fe0a8715955a9639cf15ef71f408a102844ab68ddfeec43f8ba94101" - ) |] + ) + |] ; [| ( f "0x3bd4840d6b96ca825d64636b4992d55bdcb66eda584782457a7641780ac4d20f" , f "0x0ed7ec3a7819b5e31a1aa140236ea0b51c32d2592900194bf099f21f3c98e317" - ) |] + ) + |] ; [| ( f "0x105e22cfe9f1e668805f0b0794dd7e77d5477591356d98d5299f2fb6813c0a1f" , f "0xe80e850a40daed92959bf64dc881b60053c9f75e3b201e0aadfce30ed39a0a2f" - ) |] + ) + |] ; [| ( f "0xd12a87dc8a280b5141f398cc43b877af75a022796175d2030032ff9f546d0c20" , f "0xa246789707b1fd51ef1241df5fdc9c53da24e0575590472ef31cb0b820b95a3f" - ) |] + ) + |] ; [| ( f "0x45af3b62bc4dad786ed7af0eb9f2e3f07034da9ad9a30f6222b8d97158035a0b" , f "0x3a078fc5d1a8fac88942a63077de6b427668af4515bd191ba1eecbf97d0ce42f" - ) |] + ) + |] ; [| ( f "0xc0afaafd440d92dc3725d0c1d3130fbae182b4c67ad9291efeb333236cdf0c34" , f "0x119dc408faa075177aac1977df7d9abd0d291def3d4f30a2a099490de8c82f12" - ) |] + ) + |] ; [| ( f "0x31d3fda2cc396ed0f5963b40aaf50db28c5fa3592c05c1697818fb9a8ddc031c" , f "0xbaeb4789c02b2aa60655d666d266432116007f69f7d131fd7d1339265b55ea03" - ) |] + ) + |] ; [| ( f "0x32c8c64cf18358d3900247d8625e51b6504953c5e59fec23c182abc3fa46fe12" , f "0x7d7afc2dc6695f7006d596ac881d815a60e352bd9595b4376fbcc93cdfdab119" - ) |] + ) + |] ; [| ( f "0x7666eee042fbcf11128e35b1a9fac1dc1c231870af56ef408ca3cb4652d09134" , f "0x7ba80542a7834987afa65214546b348cce0ac1624978b2e84d44f82fd33c3602" - ) |] + ) + |] ; [| ( f "0xca907dc5d40af29d6ad6d2c97d865632186c0ab4cd73c11db7e55bbfb52a9d1a" , f "0x90bff6274b8a9be87cb22d8945c94055a9116bcde86ec4093ca40f15121b2a3d" - ) |] + ) + |] ; [| ( f "0xe849b5176043aee7026747b04003dc946152b260e5ad0467cc192b89f117040e" , f "0xbb75d302794e1eb6a4af2d7cbf5fae90fc02c8ab879da70f44032f9e58951513" - ) |] + ) + |] ; [| ( f "0xde329cf199afe3819f8bd4bb2d5441c5dd5f04ac1f552d5343efa92edabe5b0a" , f "0x6e031da87e2c3c4d55630ab91b93f52fbb40b06c969bbd1082fb4501e2fef428" - ) |] |] + ) + |] + |] ; [| [| ( f "0xa97e6158ac4636a57c924a5f7a03800f2d399c84ab6f90ce7bbe24417f933e2c" , f "0x0b67966d2830f8191bbe14e6db1f18bd03ba4b1ff120c3998c5a722fa798d400" - ) |] + ) + |] ; [| ( f "0x119efb3760a0a0e9e0e3b88ae6bd552ff76714732e754271b6eee5946204d02b" , f "0x0262d2e237225a6611eabd11cae9dfcf29df517198428815d45e137dade6cc1d" - ) |] + ) + |] ; [| ( f "0x0c0ba1ddc327bc2101eb6564373fa48f05a1ae40b3a530a366e16370b789d51b" , f "0x8e72383c7b7760f286f3aee88781ead941ec00fafb1c01800fef1468d260dc37" - ) |] + ) + |] ; [| ( f "0xd7147a1fa5583ece7ac911d65a97f67a9be0fe6f06b45c2be456624e955a993c" , f "0x1559f4ec443552021f43f3efe67eed54ebaa35adae035e7c47e403aea90a5734" - ) |] + ) + |] ; [| ( f "0x842c4289b5fc380799cdb32126f541cb36771cae11d13b3ca4a7b3b90e21e607" , f "0x7d95d96e40a4187c6382c16a7225596678a80ffdd820f13a39161ddc913ae602" - ) |] + ) + |] ; [| ( f "0x160aca2c1b6149a8c15a0f827738803ea49d325e60749ee5e18fddf919478e06" , f "0x064e07a5d3d144288d4b69432c7d1506dfc46d4a205134b03c7d56f45d25b633" - ) |] + ) + |] ; [| ( f "0x2d63fde4b5aab3b0e1be15c7859362f67b14bbe08a98e6273446a8c534fd6d24" , f "0xc5133e4b4f3f1e0c9592b6709c889f06ae479e2b1a888ee471d190f7d53d8f00" - ) |] + ) + |] ; [| ( f "0x1e1a13d004a2b812af97d3e71b66873d168e46f4ee1515600625ee773c84d536" , f "0x15483065d18a133a4687093f3f2a413c84cf8b02d2308a817309d95b24a2ac14" - ) |] + ) + |] ; [| ( f "0xb7551f2ec1bd3f3b585de099fa662fc15481690494a0fb0d37a8c87222534c36" , f "0xc14cb9736a0d0a3c588a47e8fb395167836dc21a9163d2c13f0d726445bdd61d" - ) |] + ) + |] ; [| ( f "0xf81feafdafbe549e1d5acde4130ed7e882b0ce3d20bc4480c6daae9bee6b5c25" , f "0xa057ec315ea289b2a23cb542a67531e9715db4f7f18121ffd4fd4ed4372ea02f" - ) |] + ) + |] ; [| ( f "0xb786bc02b4705e0b81ded06ee777c9f221a154e72e2972de7c9ce8d2ab917f37" , f "0xe26b69970eb9a58d7163871e82a15d7f1ab8fe07b430c60f11679e15f1bdc42b" - ) |] + ) + |] ; [| ( f "0xbafd0382acd8313bc4cb56fda6f79e369f31f5691f7380e9c8ac8ab52d8f5c35" , f "0xd6f4d5a7d0adda4c497bfe40e2ec068f26c9f7d26d9b18f895280559d58d673e" - ) |] + ) + |] ; [| ( f "0xea71d60dd5eae642a48a642ba2398970d5c81f9761da5ca82414c103c6bdd73b" , f "0x6fa647c6073f84c200a09c511dcfa80639f07d2a363c171864b83834bb57372d" - ) |] + ) + |] ; [| ( f "0xb074c3bc8171f6ae142856c4f59fbe10097bec3e2f00e574659e15efd9c24c21" , f "0x1d2554367aaffb2290dce1e08323887145835d74c65839695f635816e0351f2c" - ) |] + ) + |] ; [| ( f "0x19e0e6efdb9e23cbd5e6ff67d51f1ac2f8bb5ed41ea957b5de70642f82cc8113" , f "0x42e8eeb85ebe9b3c69b73683e4fb526e80bc06cbc5a01d5cef6f2be4fde50501" - ) |] + ) + |] ; [| ( f "0x772f6e3823afc5a3ab51c4b46330d39410fe2ea28112b15eb4db3c399205892a" , f "0x963f750671d28a98c1b007bf2c38abb5fb61f0e41acf418cb129c34eea08f607" - ) |] + ) + |] ; [| ( f "0x58780454077e74f782fd6cbd8f131e09d0f6a9937e2b6b15fa6c712070659804" , f "0x1e3c64076c98e3d2f3cce8fdbd9ab84111880375564a33bb159297051760ed1a" - ) |] + ) + |] ; [| ( f "0x530fc8492dfdaedc1a216c400f17335955286140f79414e97ad4e6447b6a6936" , f "0x7a48db00e8f54a61ea441d84e24159d3bcc18d01b9e0aec78f1f7eeeb8dab903" - ) |] + ) + |] ; [| ( f "0x9a4f4c65fcffb131fcb4894923e3d6b29f3e4a299d77535d3bc2a122606ed63c" , f "0x952174945a1a69cda75a99b628f98caf2271ba17229181f7c5dc57942cd4cd27" - ) |] + ) + |] ; [| ( f "0x4a3b11ad2bacac24a578fb4251f76614c1b2ebbbba617ff2d022bc4f1e1a9a1d" , f "0x4bd29354d3fd763d50209806b0a4089224f7f369092683301fac389536482715" - ) |] + ) + |] ; [| ( f "0xf5eb5b69260313742448232f611861a990efaee8dc315d654966b54bdf4f671b" , f "0x76a386fc5ec4d0c23599dc1d45655e75e33810a3a5b789b310ded3a89ac53c15" - ) |] + ) + |] ; [| ( f "0x93b843510c78d310c1563819842209858f92855919f05c837f57083a8b130432" , f "0x7d275046a6764e077081cc6fd5339c9f9c2706f3236433ee27e78eaf9ef15d32" - ) |] + ) + |] ; [| ( f "0xf1c00244dd2d7b16fae9e80c5f797a4102ab1f458a8d0e1854230472d586702a" , f "0x7dd5a6e4ae991ca6b1047cb0da2afab3f3d667abcdb01cc873a94d9ae64b583c" - ) |] + ) + |] ; [| ( f "0x3323c21be2dac14f25a65afc7e46a4955a62e14e4770736c891b51fc397e762a" , f "0xfd88f3c3321dc049ec29eb6f3e1c1657e0eaabb439d766b8e70d441799226903" - ) |] + ) + |] ; [| ( f "0xeb3ad5a731082aa7b3b966a914661566dea5a9db74d402d615038f8dc768c43b" , f "0xf960bb5d996cfb656b0fbfdcfacd53d24a6b22098566f4de33ed782689c7a71f" - ) |] + ) + |] ; [| ( f "0xbda522fbca176a5b0b4e2b6ebea360dd634a32bd9003b1980aea8ab2e28d1207" , f "0x5beb24b256df72c072d724018f14091439cecab9eac172f9b797331c6a6a3421" - ) |] + ) + |] ; [| ( f "0x1ceac81539ce60c5d583951b2ff4078af3dca2ce8ba7e47c089d7b1cfd92a22a" , f "0xe1cba6532c41383c928e57e4d35bec0af9df3c0ae7ddfd33e7ad1011b9171401" - ) |] + ) + |] ; [| ( f "0xfc501b69e3f3f6d7de4446d2987cdff7261d4e66cd5b6a085f7dc02102750738" , f "0x868985c7a44d89e3adac113380fc5b2cf3796943d2906a2631cfed38f6d6a02d" - ) |] + ) + |] ; [| ( f "0x052c3e64b1871571d2c2827de5858f8e350b7fb284c42fe1e9eab94b4c2e4b0d" , f "0xc75a737f1fe6f3061fd0ba2898042877935d703cea535b5b57a972200f8eac2a" - ) |] + ) + |] ; [| ( f "0x3a791908d716ade2faae067a3141020d007175260cec5bb066a6654a79f28412" , f "0xfcb23dc644070e559f843bb714b5396941005492508108ca38bcf5cc19079f04" - ) |] + ) + |] ; [| ( f "0x207c412a8f7e23820f145fd6e18e40944b308d4dc77e1509f2f3ecebf8e34012" , f "0x43d10f1f1bcae69c930d179abd79c23ef0e551aa0f46d82b101d69845cc2a91b" - ) |] + ) + |] ; [| ( f "0x5312869972187d9ed351f8b175f0fcf6264279b0bde0fbe484832f0416d74822" , f "0x86ecb5e9a30e8b3c40ff267b0c6bc9634d90da1b6c3d529f3375c01e51630a17" - ) |] + ) + |] ; [| ( f "0x7421069e381b1687045a1e77ac4bd8d389ccb18a5aa26b5a76479c2798c83517" , f "0x1d1ab1d8f53ee191ab7217e71c48eef63107e9be6c02af07d21f3f9a0fcb5902" - ) |] + ) + |] ; [| ( f "0xa9dee69f5b5ee0a453a38a5f1a5cc3b36b11cc2a54ff8d27878e9a71a4cf3c08" , f "0x6a149f017c0e4975fe4105459233088447305ac935ef6bd9eaa674baccaf031b" - ) |] + ) + |] ; [| ( f "0x231fbbddbaae0c9a2890b68271e6d739c7f83ecf2eb382bdcf67785cb2eb8a06" , f "0x3f56266fe8f27efa487f8787b6f666e3d65fd30ed070782eb8f0da2e1d5c2c10" - ) |] + ) + |] ; [| ( f "0x7c432123f25a11c07cc98639295512843d470bf06c1b2b4f6d587ff454266837" , f "0x09504b39b35c6cf9e6deed6dae12d30fa7d6f385181152f8ae4a221ca34b7212" - ) |] + ) + |] ; [| ( f "0xa2e9fddd22a432998dd56bbd166c8f96b9984e09973919662a1680c9d0a2170e" , f "0xdb239801161e85cc8fb6a1805d1586a0278467751522f855b306719d86ad420d" - ) |] + ) + |] ; [| ( f "0x6ff5d1b26129dee01880bc402253cfd1ae987001753106812e98b655bd25970e" , f "0x0da4b054a2c12ba17ba04dd5ca31abb1a1736956b2e82557fd60db53c1ef7121" - ) |] + ) + |] ; [| ( f "0x46333b2e780347ad1c23571f951e6de6b4a1f15b320eca9c59ccd70142cd0826" , f "0x1bb4a2d7576b69e893435ebf11d6b4c7f8834f709c26dd3d7f64927f81be9d02" - ) |] + ) + |] ; [| ( f "0xa552a61b2ae6804cdc1c8971855097ab4b6f4c04fc2ba27e4fb0cbc4a2623c13" , f "0x5ff2274ce041f59225be614254a49ef794ae9d11780465dc2b81e01a36920603" - ) |] + ) + |] ; [| ( f "0x298c447de18f078ae894cfb1c464edc0dd1adb69b58989bf7db99de9dc7ad327" , f "0xa278254289b27e8a16095cac78b3b4047e7d414e1d5232dc346c40e1eb198a2c" - ) |] + ) + |] ; [| ( f "0x6ed00f131d291a14514a75a28c2cd41a3ef90b4ccf154cedd1d3333a76ec3c04" , f "0xad75ce0cfa9e9d75fc6d09be42c7b586d971a5edb2a329dfbb836cc4b7282e15" - ) |] + ) + |] ; [| ( f "0x074cc07f9d67f4f3978ecba0be34942ef513ba9e52d5787032e2005dbe2ff921" , f "0x8b01452ad162fbe0c8871a4259edbb3a30c23db32816ef57c6bcb47568a30916" - ) |] + ) + |] ; [| ( f "0x8bbc80ff0d94fce8928137d3d089900f601a7cff17665e2cab81dd95418d261a" , f "0x896b25a49992e603473297f1a356b8da9738bb2a543dbaa605fc2e00f9f2f93d" - ) |] + ) + |] ; [| ( f "0x6c1be6d11ea464bd333b99a9767ad8d300063ca22e707e19c3af2effa943b40f" , f "0x27875a27604d39abc9a05f7ba546515d94a80cd7a26d34474717320d92eaeb30" - ) |] + ) + |] ; [| ( f "0x569074411db10bba7b4e0b371037d7899d2207a15220194644c634979a9ad420" , f "0x8fbc919c0e88a9c13d4f7075f2d2678ade67f3724a7ada2fe73e706e7090ea06" - ) |] + ) + |] ; [| ( f "0xaef708ce3495988503ebcede4453ef24277337b5cc598984825659d6f48a020e" , f "0x77aefecf1adb9477ba6a060e252b56c0133d3bfcd1fe9bd52c05d617401b6f3c" - ) |] + ) + |] ; [| ( f "0x31daf3301d0ac81286e21665e9b80d8f4b2cf08d38d6e69b412d44354c857516" , f "0xa9cabfc5a8acaddf593580524adcff40c2569605cb929e6a5a8352d249062608" - ) |] + ) + |] ; [| ( f "0xae05e30783b3a435b29946c3965e420df59da56d16c47d6816937716a218ff26" , f "0x50bf5247eb7cf8fa8567e531a953d6d9ada7f37c85b0f82bcec46b3cd16f0c3f" - ) |] + ) + |] ; [| ( f "0xa0490924ff071ad022dfa047f9b0d7f6f4aa1a05d24dd109110b59aaf0462927" , f "0x235d945227de0ebf404fe8379c7963c24c7896ec6bb73bff9eae6973bc32970a" - ) |] + ) + |] ; [| ( f "0x49faf26dddb7fce55a86a1119d66de4417b0d81271979b8715d1e5460b991537" , f "0xb5f2efebdbea5e663d11c59c874cb2501b74623f1495282bd60e2adaae566707" - ) |] + ) + |] ; [| ( f "0x39bd0c8a3b0bd937f44d622516fa7b737c804b028b4381908cb84f2178b40428" , f "0xbc55faae44c0dc88a792bfad600c315c924f54103c5fc2712b694874012a4d32" - ) |] + ) + |] ; [| ( f "0x029d785ef3211c2b96a19a86dd63af64ff62b787f18194f17713a3831ba49715" , f "0x01093a5c5152573b444eb810445042c452d087ebd20dd737cdb1f3aaf73e9524" - ) |] + ) + |] ; [| ( f "0x8f571a0559525626bc32b904afa4acfa7d3cfe1c0942c76c9e3d7245bc8ea910" , f "0xef954f499f1889d3db0a9ea24164923439503a6b49cfc63a2d88f8f7adcfb50d" - ) |] + ) + |] ; [| ( f "0x440de39cd5087b80135a632b58fad83b8f8de3e783636c2d1befa4e18197d116" , f "0xb283fac955ce89a751f3f55c6913ddb41a1ae953131779ed0e32e3823dcabf14" - ) |] + ) + |] ; [| ( f "0x10c740b72c8032a489bd68e35197bb710bc1c2d1358b1c02755809f6f0f5f73a" , f "0x35d43c634a96f8411c01c715a2f9deb681af340f833e038ccb420c3bf85e9430" - ) |] + ) + |] ; [| ( f "0x1c6943b4ed61e9cbd91dd5be418b9ce4abfee59a4ac20f63ea1a9871527c9519" , f "0xb3b36373a5719731013fd7e541ed5a52782a54cf5eb8dec638e45e3588b89e2f" - ) |] + ) + |] ; [| ( f "0xc3a8d21fc3f8e602b5f36b070a7fdf20cfe5418ff1a2488e09a1bb700f447f36" , f "0x36001717791ff18470595113b958c3b8c7aec72a3bd81c5992eb83226e95c102" - ) |] + ) + |] ; [| ( f "0xc362cb84e0baea072b8743474ce067d3b049ce8f9899551193d09395d887801a" , f "0x4e9833888b5da8e21a5a26017f0a4be4b9aa0d4d3bcfacc3be91ab6f4345271c" - ) |] + ) + |] ; [| ( f "0x190cef71f068d2b969fe394d1aebefa3be14b80a4a0df93f77617f83e2708c2d" , f "0x50717eb85f8683de951f96196961bc466aa982faf800b40241e8de24f3b3022f" - ) |] + ) + |] ; [| ( f "0x278c3cd9d28f36adbc1749e89e08f7d430c0cee3f22ed80b4fbcd8d5969bd01a" , f "0x105c541471c0864a0dde9f32ef00041d606200277b7bdf2bafd9147f95a17437" - ) |] + ) + |] ; [| ( f "0xd4c340217b71dedfb9c303a41a9b843a31fb5c9146309475dd6335a753ac2f18" , f "0xa1bc0fa565a9d5f513fb3eac0caaec06f1a68cde905ea2ce86b6dea5816e5c21" - ) |] + ) + |] ; [| ( f "0x03ef642ddc7ab42451c00021f1f4195c0af46a8349282a4f6b9dde950c471c1e" , f "0x83f0dd8157e8a1ef9d1f08ae55fbaf2bfeea301a243a8851a032da9a724a4d36" - ) |] + ) + |] ; [| ( f "0xd44196bf272eec8e94f8237722809d307efffc58248723001b1cea29c1fe4d21" , f "0x79d88ec1f4428ac1accebdf664b11e87ba1c507145fc3dbda1a1454393cfee0c" - ) |] + ) + |] ; [| ( f "0x20e87f4e912d3845a640ff509b688d3a374285bfa83ec9ead23e84eae7f3562c" , f "0xd5d022d43a2cba155ebe5bdab8d28bed2610d30314a58302cd7c2a2279a69311" - ) |] + ) + |] ; [| ( f "0x5b86b3ccf3f27bc08bdb84e7ccef2a5ea4b43c67d5dac135f035216fa7053e18" , f "0x32e7da1cbf9996ec4b754adc33f5fb7e19ffdcf80b7fe84e4badf8b31987b310" - ) |] + ) + |] ; [| ( f "0x0454495c6b0128c903c446c40ab54962e274ca015f3153b2b852ebb7e12cca33" , f "0x1d0b23c2a3002230be21c1196214a4ed8a4bab7a2abe35d8712a032ee98d8600" - ) |] + ) + |] ; [| ( f "0xbb4383ebc6d1178e14c5976d7397b7f5a59007cc2e990fa9d7989be2c47c5e1f" , f "0x2a1b7a510707ce72f22a476a4f91d379e49fbe827aad1cfa9588e0b7af9f2c3b" - ) |] + ) + |] ; [| ( f "0x81da35737033cf060b4cfd39ca57dcae984614443039a1f508097a708b04a017" , f "0x8838c0a430eae6f0272b05c3769a13d18944f39bcabae43e2fab5fd348d5d03f" - ) |] + ) + |] ; [| ( f "0x213b43ff93253cd4102a9cde255487462b537a2abe87f2336559585792049303" , f "0xe9bf8d213f0ac56273ca795bd8def2b9afec1e1067875465e26e5d99c1349c3e" - ) |] + ) + |] ; [| ( f "0xbbd3f24f45753a06985b10874b69985db3cf949b25752f7fe9a49cfbbbe0b036" , f "0xf02d4e5802955f6668d46e25d3eb88e3fad437d08f41f35073b4f2bd38379520" - ) |] + ) + |] ; [| ( f "0xc1eaf8144a31b6847040fd4131aa3939c8c3d864211fe7850b85c0cde1dda118" , f "0xba26fa60dd514718fcc6142a15b632042df2e7eab59946fa340c016dd4ee1b02" - ) |] + ) + |] ; [| ( f "0x4cdebd63660f181f7c65f77041c798190d2a5885cac48e06f830ba7fa9cd0706" , f "0xc0e4277ad9b4ca2e5acec133ce81b2cce7a4ba4e8ef31cbf038694cdde63b036" - ) |] + ) + |] ; [| ( f "0x7f11cba1d4a81c6bfec5fe260a4337ebb145b9a4bc666a5f13a81681fdb25926" , f "0xe1b698531611673dfc7b58acfe2a8891e357326ffb7b6ea2e1728cb5af49b51d" - ) |] + ) + |] ; [| ( f "0x57968866230a514a49850358cea4baff295986c2b1ff01986ec8e8bfe0532230" , f "0xf690a403da8b7c0c3ee0deddfb41cce02ca1f0e1d94af9c9f145e495b352980c" - ) |] + ) + |] ; [| ( f "0x728fb1acebcfa0f1c8acaad2967957399c67e7192fa7bf5d33da371aeb79340f" , f "0xdfda1033f5f02de670a9cdc3dc90137843603cdae9f24be7cd4c71d40879bc21" - ) |] + ) + |] ; [| ( f "0x28e3020097231612bf111930a93f16f1f4bc21c5a2debe8a7582b2f986b93529" , f "0xdb6fbb4d57fb8b55e819e321617ddc0e883e120ae3c07d8cf76c3ba6943e4130" - ) |] + ) + |] ; [| ( f "0x902c02242df4906742140cf02ce345369b1c575a0b2f4ee56c82248b03cde73f" , f "0xefc20b20fa566d03d112e1af1fe38a84f1f74ab0640f241d698024b1714cc812" - ) |] + ) + |] ; [| ( f "0x161f2cb0c0724d3cca59ae84817ee3a43f2faf0fc2d22711ffada024c3dc2635" , f "0xb034a3443d0afb2dcae2cc5ae5dfe095bd7db43db0bd689d35b1b5612535503e" - ) |] + ) + |] ; [| ( f "0x8995c52bf403780a92e7c88e468fd44e8993191dfef85e7fab314020c237642e" , f "0x52b6b851341f2f18ee424fa4ce408b2eed1d16c2f7e4e49fa2a493e272b22c2b" - ) |] + ) + |] ; [| ( f "0x3d4556c8a1c7c46ccac28edd5be803d9ab06e5eda5a1be675cb14fced84a1f0e" , f "0xff037557a897da4645650d3f2746c48245b1624a040ecadadec5b9afdadeb720" - ) |] + ) + |] ; [| ( f "0x004166e7ee418f850c45bf3d0bb618a4cb23f6ed0eb4e2c5418c2a645e2ca32b" , f "0xff581922d8edf492799e586439c803a8d39780bfa204bcd548a548bb7afbf335" - ) |] + ) + |] ; [| ( f "0x4cc2419e37a2f5ad53d86c0a4cbf3a8b91590a536e46e7816d2e383c2fc5fd26" , f "0x8cb3343ab3efa6cff8bfa5290839c51dc6751f3076ac9f96679995dc59978c2a" - ) |] + ) + |] ; [| ( f "0xcc06918b022a23b0301d2c55e9acd8e1f203987f37ff02849001a2afaa0aa109" , f "0xf98ab14b53e182f60c337114a348ddd2375f273e775c506d3c1df4d99a2c050c" - ) |] + ) + |] ; [| ( f "0x00692875442ccf10752681b0feece9a4856476400e7dec9620f85a84f55eca26" , f "0x2d420db66aaeec3d7d1b4bdb8b00f960dcabe7090596cf8b96831d33c8b18909" - ) |] + ) + |] ; [| ( f "0x85c187ac7b538b8e843fb735deaa187b132718b653b921e020bc90864710ff29" , f "0x12687ed4d94d862e1ff61c715cdd4959fd42eb60912ddcb36353db17fe342a3b" - ) |] + ) + |] ; [| ( f "0x3036308f19db54e8a5f6a3e42d55f3891c235e5732ab12c1b20f2dd927e54029" , f "0x81f47f4b0b13de447d049e8c7657b14873e8ac8074313e45ea29b943d88a0907" - ) |] + ) + |] ; [| ( f "0x752e9ce8a962320e6c905e369165f3d132bf9e22feaac19e7ccc9acb6642421a" , f "0x2cc5853c8a1750e673cd49231d6db2880f599d18f08f6d8c3a5656c53580f406" - ) |] + ) + |] ; [| ( f "0x53119d1e43330f44c268e516921f7bb2b4a39ec23770081bf66fca4221035f1b" , f "0x85aec61e789088717a391c496c8d2b06a3018bcbd8b1bbabf892efcbb11c4a06" - ) |] + ) + |] ; [| ( f "0x26f60f592e364933bf6c3ed2ac76135b61833f6d56f16726bb745e696dcbe523" , f "0x0a3596d57d3f60d58a67fc66fc38e2acc1e55d9423f862e59298293eb3be7924" - ) |] + ) + |] ; [| ( f "0x39aab62b337064e8b29a4352273886a1980cf2eca021bd59f69dc237ea8f6928" , f "0x67eb79d398ab2daae9934e4589766e9b015d3c94477c4aed1e74a2e3cadb6702" - ) |] + ) + |] ; [| ( f "0x48cf7f3684522f4b05f538b20adde8554bd9e641c75fb8e9cfc421d4aac6021e" , f "0xaa60d05b7cd8578616d0d6b15fc907a28ed021861e7191747fb1541a74f63307" - ) |] + ) + |] ; [| ( f "0xc0c2fda48efe6757e723c39698b41cfddf7cf99891625d17e611b4ce79c5a910" , f "0x3818c610a014785515ff630ea10b508c203b4e9785a1d8daaad8522a38b07225" - ) |] + ) + |] ; [| ( f "0x07fedf7b2b902f3f19dda0917ea74534fdb6dc7e2e121f24da455bf1c155b001" , f "0x693623e60766396e32196b53e61c8e6f799a82fbd5ced19a7e7ff7fe41185111" - ) |] + ) + |] ; [| ( f "0xbdeda3736479b444a631e4f76ac89fea355e0c7b35852f19b5755d242ce3172f" , f "0x3864988117fd2a2cb66ec0dba82989311510109392836dc435e54f3081048a17" - ) |] + ) + |] ; [| ( f "0x080990d010e52b59d0fc93b9299a7f5c467941d6bdeece0eac867b8ece651325" , f "0x88ae3cafe4f866a8a0eaad07dd581c1ea8e128df7ea75306ea6a3097bce60223" - ) |] + ) + |] ; [| ( f "0xd6bdf83cde1e108e5fa1a816cdf9476593849c4f889e925b81f40130c1c14136" , f "0xac7d77105c46b8126eb262d87db710c308c2dea3010df6c976b14ee2a9218a19" - ) |] + ) + |] ; [| ( f "0x6b25b18f65f2746e4d2c42e981e3655ed3b355611886cddf635e55d37c293d38" , f "0x22b5a65766d212f242be5a919e5fbb2c4776f0206e0cefafa999198a5e43851b" - ) |] + ) + |] ; [| ( f "0xf59960e66478640dbdbdb6f6a15096f8224c132150cf8a6eb12521970dc5f732" , f "0xc5d6cd438ba561cd9f95e76813230f71d387b6ae5847b59b29fe735485e4f638" - ) |] + ) + |] ; [| ( f "0x82fa4439039793ddbaefbc028df1a5017a5da9bf02108584381f1ef52725320e" , f "0xcf16670c47d8a2dbcbfbd98c833615f9306696146a54f65e79e33c72ed7bdb11" - ) |] + ) + |] ; [| ( f "0x492dba2c4b9c901b8b128ad59fe4387a70b1201770dcebb4b8e095b3946edc11" , f "0x10abaee4a9df3333c96e39eb9259cb666d05ce3ca95c8f856bd4392011e57f08" - ) |] + ) + |] ; [| ( f "0x49cd072f012e6a55ecb8228ca2b62ddaac7897352e920352b440e6c4e52dff35" , f "0xf639bcc4d20194cc394e93ead322d0909ffad8fe01b479e7d48f7fa42c5e221f" - ) |] + ) + |] ; [| ( f "0xa3bc946eb28a27d2bd9b90059d7b2877ede2ff0b4f3a91470b74ffe90f165f21" , f "0x18bbfd210e8806fa5c0eda8f77eeace4e65bb85b694ce1b2b699c5d537b41e1c" - ) |] + ) + |] ; [| ( f "0xab6fc6d570a30e48183132b5f37ed5243e50a909b68cba87b4b0543e006dea2b" , f "0x3727449c3c95e8b882b58f44489fea52491f005c605d2adab10a45407fd3df32" - ) |] + ) + |] ; [| ( f "0x5ea4a8f281a20cc3af6e94ac4e2e35ad5fa6476a3a1276a1b9ebcb6fa76d820a" , f "0xcf18faa3a6d8c7cf0e853e3ae57012bf5476da2578501a51528c8227e7101403" - ) |] + ) + |] ; [| ( f "0xb37b8350808c8b0b9c69bf04a50d32b3720af173bdd13281a6424550acbb0722" , f "0x537b19a9a43cdf73b8efc56b9d3207abf5d789cdfa75dbe48446abe15eb9693d" - ) |] + ) + |] ; [| ( f "0x37376bf4fd490dc735780ba2d49e0530d8f68d3c39c280f768d03be2b1aa0833" , f "0xef9216a8f3a255505af08f6506d559c49e2ac27d9ce0aaa90150fe1347d03a02" - ) |] + ) + |] ; [| ( f "0x2de46e9ab3cad9fde2f4314027ae5c50b44d5707f4508ae70df6bc7efb6c530b" , f "0xff6f194cf2fc8b920515451dc25f270d65705c18fc41eb08306a80450a6c4228" - ) |] + ) + |] ; [| ( f "0xe7ba16e19009d51d2080c93d1e95dd1f75968c8cb5ccf18428ce9343ccfb4c28" , f "0xfba656c1ecbc484012887b56d4963897c747ccd7d353a8cf050daeabe9f9a100" - ) |] + ) + |] ; [| ( f "0x870b184b4f682e10ef4f0d6eaf6ecd000339da604414c88f8fb0a0204170c839" , f "0x548ad28f3687db367f848320d06420490e5cb0bda720c8b0576ce57d22de8f2d" - ) |] + ) + |] ; [| ( f "0xe4822f561705daf1c82a82a9343e9683000c6f8d9059a72bf11c8a1e63e9e802" , f "0xd1a9d3964fe50510c8f8fa36cfe79400850f559e55801a3d94218bbe43234f27" - ) |] + ) + |] ; [| ( f "0x763660e5a75d4463c61e8e76e3be9fe5d906ad5a8cfdc2ff1f7d110b0c66fe06" , f "0xa1d47b39b46b76f4965d27ea61882defe80841dce83b2df2d2b4eb61d929520f" - ) |] + ) + |] ; [| ( f "0xb65456b72a83d4d53a0a22c46a3a15d42ee839ca06e80d3a6b2a0aafda599b17" , f "0xdf0a2b6d07c53daebe90b7fa1c48d1d65823d491075b360f3929368bfa9c3510" - ) |] + ) + |] ; [| ( f "0xd127b06fbcf51be0a89fbba4bdfe4dce3a12af1ae669b582b4b6e27f92e18404" , f "0xed847519a98ff58c773f3ec201a5a3a66fa2448defbf3b05627ae4704a84b509" - ) |] + ) + |] ; [| ( f "0xd066af6805476db74e663761f562017e5296195403c92ced0e524ea18936090e" , f "0xb0bfbbe5858117ed04a46105ece7cb765042dfa39c1ea59a63da7db480a9d202" - ) |] + ) + |] ; [| ( f "0x564ded1190dca9a79ebc716fb442c7fd5856c8a2cb1414059b20b5233bb2ab1d" , f "0xff80d5e43c40fa6952c0aa79e7eeecab54eca9a7515178af9166777695e34c22" - ) |] + ) + |] ; [| ( f "0xb3890a557979f1ee1b4c0541f58a73e5d9004c760b1085f47cc8de9fb547050b" , f "0xd42185d9f734d62f63c871cfcaf7b7c334c436c0d7f143cd5cc71cb8f795d91f" - ) |] + ) + |] ; [| ( f "0x9c5a095a5774297fb4b1d5f943a55cbd1c7493905b2e6f728196964cd705212d" , f "0xf07394f07541cb5dff470a5cf7b2ab0b7c6f2e0d859e5eb3fb23742281a51a14" - ) |] + ) + |] ; [| ( f "0xfc4727aca49bcfd80b321cf2ae9d02e204ab893589a24d54890b5df75207670c" , f "0xc94089b4fd412a6ef7f8b44778de7e82d48360ae631adf406f89a0e6d0b78a11" - ) |] + ) + |] ; [| ( f "0x0187727d9b6ba60f819c79336de30200f4f2b0179be2edce87a2fdf59b0ab12b" , f "0xb30579fdda9b366cdd9c4e2bbdd7ce024010da4e4f6d42767509c4c0c7677839" - ) |] + ) + |] ; [| ( f "0x4e155ceed4a32d4370b3f08f4bfeba403c1511f1807803f332ffb01a7e4dc82c" , f "0x6ec5e15dea3ed8e1a2c07395268757bf56a51eb48e1b096aba77c5092ff65201" - ) |] + ) + |] ; [| ( f "0x8d45063a39be38b8770612ddd4f2eaf5db55fc4ce5647adc727730f9932b1c01" , f "0x499ca2b64e7d37b0283058dee30aa2106502ec02155ba90413e135f281f9000a" - ) |] + ) + |] ; [| ( f "0x9ee6063116fc55f125ca26c558343be115741e1a3cb673fb4afe80145f99492b" , f "0x62385e426b67071918bff60d0a48fb461b3253a6baa5d7d8626db19d619b0d1b" - ) |] + ) + |] ; [| ( f "0x498e809c38c5a3aa7da08bedc0069d6a20ecc98dc9039ca91758225dd641f539" , f "0x4ffb23f907c82ba6d0b9abded59cc9193eb43cb83f7feeea6f227aaa7e9c4a0d" - ) |] + ) + |] ; [| ( f "0xb6110e7f1e25a7bf250de322a642360043e73dfb0afac15962074c1f3f01391f" , f "0x2d0db4225e0b0b1fd7ec30932448a8c0c4b0164bb2558f693ed93fad4bb6a917" - ) |] + ) + |] ; [| ( f "0x8508f089ae702dee324262e7073ddf3d723e48a37f7e534a93e6f343c3a73738" , f "0x295fea23a84cc6a607986f5a6b8ac3f8ad935b6ca49d459560f521ff64e27904" - ) |] + ) + |] ; [| ( f "0xa9c05d828c216f7fc4b47d9237240f6833879de277273bf2e570c9b61f5ebc33" , f "0x722133ee2342362d89395239adeae9dd847d3bce2c32d815e35b9c25ea2cfd07" - ) |] + ) + |] ; [| ( f "0x72bda24adbcaa49238c435bfead06242c2419c8a247d211a6790faa3d0a9ba18" , f "0x804a7670ab67338eff04d1ad525dd545b0014248c1fba4df28be502bdb3f1f0a" - ) |] |] + ) + |] + |] ; [| [| ( f "0x1bba802a8a8c2e7fc096fe16efe0801030cde21a83e4c0e4b4b7e956a6b02a24" , f "0x4efaa4aa255ee15d2f4926646934ee9519080ae0558ab8917487da790ebe0327" - ) |] + ) + |] ; [| ( f "0x642b573777df8f10c6a70c6b15980b0f76b0bab941437e63953d11d14a49e40c" , f "0x0158130449238eba8a48835884a3bfc55f4415898cac903adcacd9145020363c" - ) |] + ) + |] ; [| ( f "0x048f89385612ee85bb92edb27aacc25fdcf83ee0facd38350cf67c3d6f8a4426" , f "0x8f3a1478201742d10c60761536e4a07887dc044d523a63e4d8548206b1a34228" - ) |] + ) + |] ; [| ( f "0xee843f09f81eb64e417f111fe6dbdbfcb46f990f6265230ca88f49be9e75ab06" , f "0x025a832fa295c308e14e63555b9ed9d117b27b3afae90b490f5f7f52ee90dd1c" - ) |] + ) + |] ; [| ( f "0xafce81669a59792542cdd2aa543e7d3b44f4078dcee737961698a23663c11f32" , f "0xb9b7e43ac459be6c1b212e1c12b074bb8e140bb1fe1f0ec7e4003a5773275c15" - ) |] + ) + |] ; [| ( f "0xe3516b2dd703777f38d547b7da036550561847c439fa3b5c2e1a2eefe32a6b1d" , f "0x5d3a0887ea73bede74bc9c066580f5cbb1d591b275e8c16b4711221e8db8f40f" - ) |] + ) + |] ; [| ( f "0xe187ca076b9818d1f0912757d9315b579e87f646263d52b82a0cf33b633c9a1d" , f "0x30af161b8a92a89aeb3ba11b36059ecc6741b407a858cdc1a8e4fab6c7c93f16" - ) |] + ) + |] ; [| ( f "0x17fb6b168814068a65e664b49dd91201576d95f528f96512a050242f47b16907" , f "0x5245ad72dce5eacda9617cc51b63b064ac3443cea0e9647461a5d4c1de67a230" - ) |] + ) + |] ; [| ( f "0xbd768948ffd97e51524403a280ed56333ab94d2ce230759474dd6e0b578fb009" , f "0x65df9f8b5cf7b9572dab6d003fd6d4bc409c4b2f36c902f7b5ade74328eeb029" - ) |] + ) + |] ; [| ( f "0x9fba87b78e5d322baabfe202f262d17bf7a23932eec1798597c22432ed6a7c21" , f "0x62d10d3a485b6484a2e5c2caf3e1c765b129547a0fe6cb26519b4d7db48c1600" - ) |] + ) + |] ; [| ( f "0x2c4de06a6782f44e078857c38806cd9aaba79c24257a730aa8c21c3205253e2d" , f "0xb209f8b2c0dc3185a9aa8d31a32f4cc8a37588e137398773aeec26bf804d850f" - ) |] + ) + |] ; [| ( f "0xee06af56a9ab9a74746cab1e20fdabed177020ff1acf898e1f2641e6cc4e092a" , f "0x7af873f8cc3ff96c3497ef1df53233ba755434e9ee45dd3eae11d4a02d23912c" - ) |] + ) + |] ; [| ( f "0x03f0c95e8a2640ac4a6b7913d18f357f62e73bc659851a7b10a8ded8624c413b" , f "0x7fe7b99fc7c83413b120d785f45a35a6b7a4e332632e82da3fb2c1c54fc09319" - ) |] + ) + |] ; [| ( f "0x77828e5d2e7c1d13e4eb074a0d8d41050bf27eee6292c8d2432c2f8ed5228309" , f "0x40b895327b286f0e8edc44cbad31f05fa36e9d973067c34800558563bcf1ad2d" - ) |] + ) + |] ; [| ( f "0x34a2d6a07ef4940d05a22a75b685ca32a3091fb401d729fbd37dfd27db9ab007" , f "0x0b128445ed7ccf52cee95342425969dc42eb5b46b4a8cc314313f58462001617" - ) |] + ) + |] ; [| ( f "0x979c08590b6621fbc23ff03bea41b31b033e8ea0c7e69ecc8c342cb986963d20" , f "0xa2ec6aecc844a2f5e1aaf6be5a4a843138ffd87097d8209983d8312d7df39111" - ) |] + ) + |] ; [| ( f "0xf159327f5380d243298e491ac419d915a8263d56071d657df45c4e0434dfb635" , f "0x38158d407aabb3021c359bd9861bc73457ce1b8aea9ffd719c3be85d21428412" - ) |] + ) + |] ; [| ( f "0xe0f2d80385ba7c1652d7ecb1ddc5ca67ca4bb6f5bb749f9b8ddc62c316820d22" , f "0x319cd600b83d1a18b03c95a67edc2a3074296aa972dc5dc71c79ccdcc3623b08" - ) |] + ) + |] ; [| ( f "0x00545ae227aec6424642389f3dba1a91cd8e617a1ad8950631aeb25e018b941d" , f "0xccc2fe6096628ad9a5de70906a1dc4dfd6f84e88957da089db528eee48679438" - ) |] + ) + |] ; [| ( f "0xc40fa6ea6614a7d5b0ad00f306873c725903e63795029cbce7d8bd2fd5ca691a" , f "0xb9508bfbd74aaa7bf36dbf6ba4570e6bccdbd875cf21b3aeb06976acfe6ff81c" - ) |] + ) + |] ; [| ( f "0xab993361fce42bf2b10c61f2d8b5890900013961c02fac28e6b781344e69002e" , f "0x8654964cb45753ab130d8dbf097eac8b140b80228500b02ef2932d12ae9ad128" - ) |] + ) + |] ; [| ( f "0xdd4561b8af12876c439022a7cfb9bd7ed0b475f7dac56fe0288639d57f8af92a" , f "0x65d3d13241fcabb50268df24fcef78e2944b0986e7bbe2ff7aff2f58b8010916" - ) |] + ) + |] ; [| ( f "0xb9aed9cfb55fe1b294ea4a8c903617c9d1437bb03c7c95c55b682d69179fe024" , f "0x42c506c5f3f1c04b8e9f4ed8a0ca580e20f1ec385d4ae06fde0cb6bbd448ca26" - ) |] + ) + |] ; [| ( f "0x54d855e5cd238b6e6c3ac0e0e0f3a2a8196be15d8ca262498994253a464df336" , f "0xd5539b3de2ca2c8a816c2aeda3f360cc112dc83204ff864f6c17cb628dbd4137" - ) |] + ) + |] ; [| ( f "0x09c027210d861bda9136d9d8c8ef0af20c84977b1f5feebb6bc9e3c003200c3e" , f "0x8e36f733ec0baa8b1beaa49c1a9c52e07c5d424a672c88162efd8da69853a32b" - ) |] + ) + |] ; [| ( f "0x734d0dcc26048ba2e7cb55e5fc935f0ea16a3fd861af494b6005c861092f3005" , f "0x6a6ddbe0a7585830a0775697a90e54cfea18ea72480233ccc28e12cc1609b300" - ) |] + ) + |] ; [| ( f "0x74a30510cc95ff507bc407e7ec53ee74f2968f674b1c6c474be24b67a0c30139" , f "0xfa6c80973fd8fbe24dc4a3ed310a26d53fa3d0a0b342de95c085851105563f23" - ) |] + ) + |] ; [| ( f "0xe04c866a6336e7646bb46b17bec246bddd40300c3e859bd3547f0b213daf2a08" , f "0x69f65d4983788a246d6e3ca32d6d992eb6a99f491873479d7daa82b4428b3211" - ) |] + ) + |] ; [| ( f "0x3a6c8491c6691dd244236f9a6e15e54a8a9fb9284933e1762197b6f7f37fad2c" , f "0x645c7f99b5fc3d4295ab0bfada8be879d3d624588b64d2676d1e4b35d3eb3926" - ) |] + ) + |] ; [| ( f "0xfceffb5302d92d8ca5df8615b1754a541c9acbe6adc7a2222b6563875c9e9902" , f "0x183a0c16050bb7bd0da6680b61404c72c007a28350a8b0749581607357f05433" - ) |] + ) + |] ; [| ( f "0x40342eae20977ae038849b4ecfea05abebe91106a18fd8ab2335de391b2ab005" , f "0x76c962adc01750b0a12b4ee8660bb7b0057f830e52bb1bb7ab1eb1fe937c881c" - ) |] + ) + |] ; [| ( f "0x1a6b88ddc9f6f8c89d99d2065192229f8c4d307c224986f421668a938a7ece2a" , f "0x996b00eb4ded4d55479e5ddf7ef81826447d87290592846182186944b524d217" - ) |] + ) + |] ; [| ( f "0xc26b8b57af7906ca8262a34cda426d7964b3db1602d0ad3e06494151875d852c" , f "0xa862b241c025c3342859c57a305b209054470c194fe956cbb598605deb7f8c32" - ) |] + ) + |] ; [| ( f "0x0702137f77f8125a63b23a730a81d6529b92d29d3ba9998df5c1df1d08d7142b" , f "0x7572e48a4812dd7410c993d76484967227d820faa34cd6cc6d5bed3c6061e62c" - ) |] + ) + |] ; [| ( f "0x8b17f9e3e05e5162672627601d6c16ada5d49e8cfb8d8375f5d82f2728fec032" , f "0x6738017a10b8add68287b850d2411940caea36eb88006293eda37ca45fe74e3e" - ) |] + ) + |] ; [| ( f "0x0c92bc1903381820afe71c3a2285a119d6b07f40d01062dfcdc38575219b652f" , f "0xd0467836742cc0e43faf500daf1d516b1286427ed2857d6af0833f90a48e6f0e" - ) |] + ) + |] ; [| ( f "0x862d4a2f4d9a32d6d05ca133ab90ada3c95b01913f0a0bcd7a59ffda2a74532e" , f "0x9186ff60663d8891387f6e4a3f574b07f25735ae0cf29dd4ad5b248ae84bc03c" - ) |] + ) + |] ; [| ( f "0x806091fb8289239c77ee9b420776936af08b324ad7db8cc3e178e12972c17b20" , f "0x6ec069a3e75473064766fbbb9182656e9ca3839bef7bf8e7e61bddf3ab6b4a17" - ) |] + ) + |] ; [| ( f "0x07fb446d08030b24ac183650f6434487948222bd0ec98d17f4064df1f3793f15" , f "0x9b34a866657563f4e1967eb6951e7f9f41bdeec329535e572586881f4c0ec610" - ) |] + ) + |] ; [| ( f "0x943fcaa402d9cff59154691ba7d9b1987522596003acc8f741bc03ef7174a538" , f "0x6aa0459e57d64a6f758030d502d235fff046b8b9bcf9aa977c7300e094e05000" - ) |] + ) + |] ; [| ( f "0x495bce1c1041029feb3fddbec35765b781ce3033d546a3abc81c649c25a7a90e" , f "0xc8eeb0c543d4d4f3ec72af75c18cd2bacbfebdc595a43007c1821bd01e30931f" - ) |] + ) + |] ; [| ( f "0xa333717a594d553838c130a940b9047f1a6b55b339dc94898e2ef49ebfab621d" , f "0x57c1e5eff25794e55442a0577e9da7bf83413f2ab147029a1fa560a4b78e6437" - ) |] + ) + |] ; [| ( f "0x5ef2b54d082346e1d46fa2b8ed69a7879e96c9dd1ab67437e498c8526f143009" , f "0x291efc83baeb965cdeaadcee653c9d23cd8c0dafecc1df586e0dbca6cba1d230" - ) |] + ) + |] ; [| ( f "0x9b9431b07e19e184ed8a179261349b1ee470fd822672fa07bdde15e89f7ffd37" , f "0xf2e6e77fd80d2f9454821b0bee384dbbfd952e2e73d7111a93c9ec83e05f6e1a" - ) |] + ) + |] ; [| ( f "0x10c8484f6cf726bf60aae04de1b500ffed5f7814e815e174cfc603746940e235" , f "0x9606896c75d7c890bd08935e3aa141fdc02e871ae4d4f4d9c69c7da9958e4810" - ) |] + ) + |] ; [| ( f "0x6d9e0484876240f1f301b366e53073a49d789526d7ade92c28c4fa1bd4640506" , f "0xaed7ae14c1febd2e1fc85b8459e6808d814c1aca4d5404135414ab1e303e873e" - ) |] + ) + |] ; [| ( f "0xdb2e17f95ddce2ab926e3967a01a504b6339642515ffbe01a4f58a6163dad82c" , f "0xd809de9c22c0be061f7b305f5c0828bd0762ca44e93183e33faf0c633df9762d" - ) |] + ) + |] ; [| ( f "0xd4fbec062ea31ca75246005ad14c4e059399bc653b69c8a51ba436167f848503" , f "0x3b2194e7496639cce06c8d4561b1ab320a9379c5011d4e1f7148fc668cf97826" - ) |] + ) + |] ; [| ( f "0x6f0b643afa31883dd1ad3b5ae97e6b9bdac6ccc2bda4b8686e1c472e85192f0a" , f "0x3e24873fd189374cd2e22c46265baa767f092bd1f35b1a947ebcc2d38208f62e" - ) |] + ) + |] ; [| ( f "0x46b6ad0f68f589dde2dc85ce0a5a4ab3309da858ffc012784663efc2ac8bb11e" , f "0x207cf39134e1fe88c3b4e3514924d7748059200a6b31900ec460a77fa7ca6115" - ) |] + ) + |] ; [| ( f "0x58753a89206b75e30afe16a7ed3440066b7a69f0596f44182f658b1a35ead821" , f "0x3f33206b5891facd5b048cb278abd29a5f9910425ccf46aa2b650f11d8f45903" - ) |] + ) + |] ; [| ( f "0x2a79bc7c705343d66d77fc4dc80bcf0b31ef60672f1d54fbb95ea54bd0846101" , f "0x0eb43292b1facfceaf6acbec3dffce0286490e4ace5a5fbca2df5dce3e2f7323" - ) |] + ) + |] ; [| ( f "0x059d0e360aae74590e9c823e31c21c08bd423ef5f140f1721afb4a30a6376620" , f "0x765a0ee9d664eed729890a71841f8536941a8445c7c2ecbc046bf9a6cee8f512" - ) |] + ) + |] ; [| ( f "0xbce700f809c3c1a42cad359605e448c03dfbc16a22ed5fe1194f34a4d2842911" , f "0x7581aca8ae598363c64e9d028688b5b164744b90c8bdf038a16154859ffc3c1d" - ) |] + ) + |] ; [| ( f "0x0f752330b0aff88aca623b6ed6cce6c0b03d3f46593d6ef1d85b8ee08303e512" , f "0xecb2693f16c7afea71518af858c6da9baf3bce827ec9a589f3dac52faad42b03" - ) |] + ) + |] ; [| ( f "0x552ba37c138e34eaa655e4ebc536c24b23280712837426e303a6d6784ef5ef2e" , f "0x2330a7df960c9f81503348ceea55585d7514882275e30dec11fe5f6651e8c218" - ) |] + ) + |] ; [| ( f "0x3f9f792881104c95ccae58d6f28dee4c4832c6cca57f848d1da59138e5a01914" , f "0x5f7fb35bef5bef64a5a51692c5ecbae5a79548e873a7c16adefd93c47463d21a" - ) |] + ) + |] ; [| ( f "0x9aca8c4af87a76c4a82f3c26743d72e91b8d9b7597312a5b02b70f50aabe5933" , f "0xbaad4be25cd14ca119022c68075cc678a7211245296ca0386b2b180c8ffafa15" - ) |] + ) + |] ; [| ( f "0x77e1763bb01b6a03d8c27a925f400837a90706e1b2812933a4496677c136463e" , f "0x0731748e95bbb5a495f15b1a4b1d659e46d01a3c446eecf14877f8c1a630f505" - ) |] + ) + |] ; [| ( f "0xe8deaab005c8384b68f70ca5d65a3ffbb10c6230d9f07d0d21ef70691594cd2f" , f "0xb2bad26cb2bf90964394ea5e1b3e7a9dd4f208e6a1a8e8dd8a09621ac46fbc07" - ) |] + ) + |] ; [| ( f "0xe3e229001b724e59956858b99ee16bd62305c1bb7598f663502765d757731e37" , f "0xf6613e80e3403b9bbd36bdaa3d07482577fa47ad909ab7783d4b76639b511f25" - ) |] + ) + |] ; [| ( f "0x9e4321b6c1a26a919e1f0e603050cb4835b942b0f68285fd7cd37f0e63831b10" , f "0x040714c3ad63db814aeaca8a33bacf2ca5a25a38b2e0a6c64314545ec87f8020" - ) |] + ) + |] ; [| ( f "0x415499e8f45d0ac2cd325c79402f7e7fe70e6f86a3faad7743085cc674c9eb13" , f "0x4c3d9edbc9cd8e288137a84cb8012f913855c1bcbffe07017de31db58608301a" - ) |] + ) + |] ; [| ( f "0x19f335315caaf75e6918a9bc01a80bdd0db0e94349b63b3c76059586ad87a736" , f "0x732d5a5161172dec534d2a89867cac8194a6277d0d38fb1d0e622115d5222c16" - ) |] + ) + |] ; [| ( f "0x3c435288d485de6c0bf730064d96c8fbbdd1a6e18fb54dde6c419fab76fa4d3f" , f "0x44a6520e0a48b4a54b66e83be461bfc84470ef06e60cde42853e280db6f52c28" - ) |] + ) + |] ; [| ( f "0x0431178fbb4c49d471dfe3b5eec5cf889e1fc6ca9e75ac93cf7b8576307eb133" , f "0x5f41f4997f73d82b9cfb24bd94dd9f3da3c7b6c3e5b673f6c70dfd086c303b27" - ) |] + ) + |] ; [| ( f "0xd2a17fea0f1ba1f9698f2b7b650b89be88955f7e199cdb77a72d1388f2137a33" , f "0xb8b98e9398b734dace627e21895a54f9309a58442918685156ee7ffa98584828" - ) |] + ) + |] ; [| ( f "0x6e42cddecb0c12ea05ea48c3f91e3427dfa18c6f6e5548ffa70f42e94581fc3a" , f "0x5c55a58fde5b383d17080b0756e0330e5c3d20eb3d83606328f2236e4c11c617" - ) |] + ) + |] ; [| ( f "0x55735d53ce639ba923722af5da614ddcae0d4b36b99e7f02ce25ae28e11bc138" , f "0xf23128370573df91144905c37733d78538bd17df9080f38b5edb82c3bf2fed1f" - ) |] + ) + |] ; [| ( f "0x29aa6d366376c390412c77feefa8afad8db9ef5592f73eeb62e56db1f2db500b" , f "0x67c2f4f7b2e245f5dc2afe37e9ab0c849a27707d0c15b0e6ca50525f3b10093f" - ) |] + ) + |] ; [| ( f "0x71c292487de247d07448d7c1e79d1bfc90f2888b3fe609fc5147e1bc7c666010" , f "0x2720c797a334a77bd89467895b5e3dd977d6bf29bf5b57eb582d4992a8d46c1b" - ) |] + ) + |] ; [| ( f "0x85f8fe0dfb38d28f3c2be721c2000fc693912dec24d800a9139f306d3e984d0b" , f "0xda27ef3f12f8fb9a89a24bc0f465d9079a5bc236b351ad5badd8bf37874b7a02" - ) |] + ) + |] ; [| ( f "0x28284fd0f31e10ea4784c9ca5815de203d45bf6690a30aa80a815c3982849a17" , f "0xcded22e6951dbbcf0a527e3f421bb5b52744dfda7115492e4e7d7a4465aa2017" - ) |] + ) + |] ; [| ( f "0x3b59181fdce700a46dde4168fcca457f74ce962cb4904fa49da22649cb1f1a22" , f "0xee78f57dc1ad4c0c8b3b1f94d9e43ecab4b735e38c328a2f1de5167843573d00" - ) |] + ) + |] ; [| ( f "0x23ccebefd9f94941f9c03c4115e610966794bf89406b4ac3ef10dddc49dd9c1a" , f "0x6e0d487c16fa4f3b4f200680b1953b6184f3d6b89f502771bd0fe59029e47622" - ) |] + ) + |] ; [| ( f "0x5820a9b47063c1b05fed2890aba61e92a4383180e8a06260a6bb9ab51b24582f" , f "0x0549b193135c9d852557aef57bf3b09d99398cf314e1a8c99c3f02b98601f709" - ) |] + ) + |] ; [| ( f "0x860ed93f8248a418d9fd12fb88365ea3521116089ef49b80f50179f5ebd06600" , f "0x69127591c343fd0122da970f75cdc319608178255510136eae08624d5ef51510" - ) |] + ) + |] ; [| ( f "0xdabb9695b0bf1e48513d5428f3f4e967478cba7e5ac0d3382a71e3adcf4edd2a" , f "0x7946c248f75ce89e3cb412d9d9edc86786e1e8fa23b8570980d580d26275bf3f" - ) |] + ) + |] ; [| ( f "0x3c05a22ed88b3832ade64266f6b5170ca8718b61d8d215d46be0034818a54905" , f "0x72c62efd05b743bcfad64842b93f2fa126a437d40aecf97e76130cccdfd4873c" - ) |] + ) + |] ; [| ( f "0xca51b33da325d8ba79d6aa9f7e25d55c230faad4b93734eb365390ebc8283818" , f "0xb3a7b35ab8a2ef4eed8e96e2bd1cc226a38565e978d7c11b2be39c25b39a4c29" - ) |] + ) + |] ; [| ( f "0x6c943b0dc00b705e260e394817e79a3feb9ef3b9d0b6638c961f22a421194430" , f "0x345223bc57de887b2abbefb12ecb147311f6bbd80fea313903d29aac1665c63c" - ) |] + ) + |] ; [| ( f "0xa3644cb4321d601bb7bb644eb27fe9a3d9962c315e3ba163327e0dd2fcf6e103" , f "0x3cc3586337209ecd86ff78350c5b41fd98250161c4a45207e5eccbe6d404c807" - ) |] + ) + |] ; [| ( f "0x1395b1f530f5ba834554f6fef6e6e2b8bcb2422d097d01d25b19894edcf99f18" , f "0xe7724507ec50267016c4fda097ca0ba9a4bdd8aebba0048cea6ce928a670650a" - ) |] + ) + |] ; [| ( f "0x7629477b809388941c2aa3cccfe598eb270611175b70f42d9d85e23c8f39633c" , f "0xddad30dc67416b24bf9aca728c3de6ff337f49921afa19b14dd64e35d969303d" - ) |] + ) + |] ; [| ( f "0x9aeb1eb3de3805f123cc3e927bf17cc54ad62d4ce6ab22e11270e8d61fb71d04" , f "0xa25748916ceba3138c8b5550441c6361edfc4aff6c4b189a909d61cb89f3fd15" - ) |] + ) + |] ; [| ( f "0xd5c360285d9c9e6d7b415ff76730b49cadfb01f3f09dd3d2f2d1e5156a069f13" , f "0xdabd724b55aa363b2cc5d800767a1f65e3c4fc4d8507edf10e74af63af337523" - ) |] + ) + |] ; [| ( f "0xe7503c0a388f4e920e423cda126ad0d06ccbdb1af9ad1541d5b8d013458a3637" , f "0xc388dda32bf61de852d0838a57e37087e37c6b2046d64b0dd10666d58c471b1b" - ) |] + ) + |] ; [| ( f "0x65231a23921bc05f6a88ea1e84595df9cdbf5d7394927d6756e05b98cb551731" , f "0x26e1b51cf2d5af3ff5f0fa94a6edb317629b424c1a9b6ca1ab74e5adc97e773c" - ) |] + ) + |] ; [| ( f "0xf8d47db4fe6c26340c36933f6754323118ed8fb1cb42842f8d422e9704154e3b" , f "0xcf6189c388bee4f7c61100cab938597ca66c6fa732e46f644e26c21deb606f10" - ) |] + ) + |] ; [| ( f "0x13a010d662b9c3cb87986acf3d69b14390818cea028bf74277d46112e86f0b37" , f "0x5c6f1e125e21f2baec3357e27ec3d29aaf0cab367bce7ccf8172b44bcbe2bd33" - ) |] + ) + |] ; [| ( f "0x20ff1f00039edee2fc4534ee525a7e478969fb68bb37cfd860c10c899305cd34" , f "0xbab2bc920949cadbb90296c881189cb37f5ac7636505df0b8113cb338d024930" - ) |] + ) + |] ; [| ( f "0x6d63b708d8c068c180db9562a1fce26d3978fe56b5589c65225e9a773794d135" , f "0xf3be381eecf51b9a6fd78a2214d83507edb38e3206f206a781bdcb69fc1e453f" - ) |] + ) + |] ; [| ( f "0xfba4067bdbcccfece0765bdd9a076733406b7880c6eab196b0690e6fac75f81a" , f "0x76fbea90c38ab257f3af9d6e0498abfdc4e4533a0af6dfa08117cd02b5f4eb11" - ) |] + ) + |] ; [| ( f "0xf27bb99de81fe459732e5defc31be05651851eea55f9b33893e30506585d6739" , f "0x649d6408d154320eb99445da4932f8486e5a1a9a42ccdd6a6292718fd96a0508" - ) |] + ) + |] ; [| ( f "0x6bfde122c01f1fa12591e5ae06055f7d3c1442cf95ff0ea9a7d6d35773914018" , f "0x70490cc916628a1792807f18adde4cc8081989a81b03a04c02db212e03bfc300" - ) |] + ) + |] ; [| ( f "0x43880eb8ae8f75aa3c8149589a263ad10570bc438b4f31f2c935895d2c406d08" , f "0xe92b17c2d9d8b55a5e919dd9d9ca9cf0dca03f936cfb56261b2d476a7e6bd13d" - ) |] + ) + |] ; [| ( f "0xa3a9fe30b5eec26dfd0d15e5f93e1532ea751348d3fbc4492a2601ba57380110" , f "0xb5104d75d235dcb638f4240edf860bbbb3c07b382219ad74365645b4c2de2e34" - ) |] + ) + |] ; [| ( f "0x00fbde74f80521ced87845f363d2f974d639f094a6b7f96911bcd91056ff7d05" , f "0xb8013922b0a385fdd500c5dfc0099ca9886695b8c2ba78fb3fb2afdaf69b6719" - ) |] + ) + |] ; [| ( f "0xa379955b7e40e4723895b8e8b6da772041d8ca7eeabb8eed6a24c55457d1f817" , f "0x348d39db4ea9ca854fceddd490e6764023cec093dd63446f826504fcb45ab810" - ) |] + ) + |] ; [| ( f "0x29e6952819888cd6359bf07eddff7598e3343bc62e9030131017c010c858be1a" , f "0x2f004b9b2ae0c3af6316c885879a15733580b1c3a70b92529718cb8fc5719a01" - ) |] + ) + |] ; [| ( f "0x4f23b759c9547680fde193b14194d19c53c58d6fa888dde1a98669a21d40ef1c" , f "0x89b03289c85fca5945602ce3056d62f1b4bbadf3ae9a5612cc8bebcd5c53593b" - ) |] + ) + |] ; [| ( f "0x213f408487a02bfd6894887e06a57536e82c94a5f45a2620cf170d2a28360a22" , f "0x92daf1504490c0ac46d1a828f3fcb77e727c46e99fad2a54be60fb09bab55305" - ) |] + ) + |] ; [| ( f "0x5e766777e70f939a6a9e64048f39f2d6e2a19c48328a297e45b244c3240f062d" , f "0x3636fe129a2d4a1194fb562c09b9c1f331df427dc86bbbbecb8071e013239425" - ) |] + ) + |] ; [| ( f "0x56d6dafb2238ab2e9b5ed5be5b4df6db4243ce933ce2984c3ce64f3b5833ab03" , f "0x57228fdacb4f9e0abd2bc9907f00c6c75dbad2cc40324abb4cafdca9d4988f16" - ) |] + ) + |] ; [| ( f "0x8568877e63093f9258a940c90964ca3d09ee621ae6c5d9087bdc843bd81ddc0a" , f "0xc55e825bca9ff96b7cf7f79928c1651bcde66988f44a3a09fa2a9a004d03d926" - ) |] + ) + |] ; [| ( f "0x7952a0b3d5003377b7fa813a83363316683355db680b6131a6c1c805b006a524" , f "0x35584c1ad9a385a196b4afc35fd05ad9e1cec700c6288667cbb88bce09024c0d" - ) |] + ) + |] ; [| ( f "0xd577e81db95105ef775f42154e683746aeb81f23d26ff9c152547bbd4b568f07" , f "0x2a20252b8b70fb8f6bce0ffaf3f2ad4589dd2568f70cf6c06fadf1ebf7b1301e" - ) |] + ) + |] ; [| ( f "0x46e06cc38c3cb835f194cd33464a4f90016d2436af68250900e48d04bf046727" , f "0xf3fd14d7c9779f8b703f171d70dca6c224c127c044de3e2ec9fe49f244223d02" - ) |] + ) + |] ; [| ( f "0xbd7881647cc402a805d434abcd7777209164bc2816c31cd4c564d391d4827f0b" , f "0x1d213b5fe1920e23f097dea28e52cdaf1217e150aec4f7bf507aa1c602d75a30" - ) |] + ) + |] ; [| ( f "0xcd7180d53b64c40a9abe1dc044b3afad518f4a41c927db1f05af34bc5335c934" , f "0xe77e9a71afbdacff2b8376b7dcbbc0f3337f15844c5761ae68b5af3e998d9116" - ) |] + ) + |] ; [| ( f "0x235f0e840e9072317a992e282c1c48d32948860cd2cf049a7507540f074a3036" , f "0x69b323cc73b52f593558b04272e55504ff28c6b69cd3506dd154267b421deb0a" - ) |] + ) + |] ; [| ( f "0xbe963bfe9fd9ceef04d43bab3453ae0b9059dfa0893c536295a2df41ea98a11e" , f "0x6a7a4dbd5732c33e96eb7186146179f3509cd8a4fa30a69bd090ce044748321c" - ) |] + ) + |] ; [| ( f "0x9d29730190d3d387d80e74eb5d2762e9edaa8b456890ea1729bf2d4c1f584f15" , f "0xfdc46096b997586d0aa44e1845f989bbf966a9acdc5243c0b175c070ad9a3e0d" - ) |] + ) + |] ; [| ( f "0x3b34db7e67ef9f009e1f2016a4edf19ad31c180a3f3322e31a9d484e8b29b11c" , f "0x2f1f8c32ff8e52a08ed246a228648c55095b2ec53492806d5ae77dfce45a1a20" - ) |] + ) + |] ; [| ( f "0xa001048a9f86537fcb13c241e1ac97376b6b75fdc8f60c684e642eb90fb44201" , f "0xc71bc32db8f7679889890e5bd7139be3fee15808f16984fa9a55bb302288c033" - ) |] + ) + |] ; [| ( f "0xb572a0f4588979b29a193ac3bd239d624c91e71efdf5db04ebd53181673b403b" , f "0xff3e42018c3a05427af30cfea99d31ef809504221bc7e4cd5ffcacec07f2ee09" - ) |] + ) + |] ; [| ( f "0x1fec81cf84e1174e28c6a2e4effba70ec38617b3abbd3924d5124335e64a683e" , f "0x05a543bcc9aac2a09ff0417b38208a3adef1a533425687fe91b3db173248b41b" - ) |] + ) + |] ; [| ( f "0xa3cf32d0afb2b7388862822e4e32945d6ed0014ac8d7a504afedc3e38db25e22" , f "0xdbdcc85bad47cf696f74df1a25ea238bd3d62762a9ea83dfb083ba3ca6cef829" - ) |] + ) + |] ; [| ( f "0x4392943faf4b94fe79cd98ee692f029dcf2177d7353568a6285f9a5d333c6638" , f "0x84b3a884ad5d017eb2db7404de8797b1d15f1571fc87ab906928eb041712ac09" - ) |] + ) + |] ; [| ( f "0xa102c7ed07bdf4a607580051b1d00e9738d0c8907577ed88588c398c71cdaf25" , f "0xeee43fe81dea4905a12ad63ac280332fbd7300de1ca6bd2d39ff51b43687ac1b" - ) |] + ) + |] ; [| ( f "0x59f39e959be1d6fd0131fa4574c5184e2de7d9991af9e3db447235986f98b106" , f "0xf1a0ac33dd758bfa5d591ff249e11d012ecef7cfd006fffdead6f63c3d14161e" - ) |] + ) + |] ; [| ( f "0x22a2e99ec7df2980dbed9d8eed348ba48cb09a91283f0998347210918d470327" , f "0x20f90165ce2488a5260d18ae176ce680aca3c8bea931dda9f0e3a1aeb0581306" - ) |] + ) + |] ; [| ( f "0xdd259d6f641c509014c7c769fe5870ab5c1ca63057c5b911524fd82c16de1934" , f "0x6085508fd52f4b6d75e54c9ca698acf5bc8a7b5fe6f67615585878ad74b5df30" - ) |] + ) + |] ; [| ( f "0xb2de97862ad25d82b1245ca207e881b94acd79cafc837aa731a762f887beb61c" , f "0xf35b99647a6b71ec190083e73a7ccb468f71b59c5499981b4c98b6f0b76c5018" - ) |] + ) + |] ; [| ( f "0xb714652f90c3f3c6056edc67cdbb0a05295827a2ea9b4478619f874654e37722" , f "0x9b29f93e9a68fea251735aa636c5d4d8f7afcf8816f2e6de2b26d9cf0feec615" - ) |] + ) + |] ; [| ( f "0x4852091ff57e8bb5a4bd02a17053e350c7338325bce33e36f59981f285db2125" , f "0xb792053722d1c858b1b62d85ee0bc0d0973a377bf6601c9585195be2d5b52a10" - ) |] + ) + |] ; [| ( f "0x662c3cd760e05523601232f7deb1f22d12b928c21e3bf8650a1a2180574d9208" , f "0xfbfd445587b3909b8d5e7fdf313d64bdba127419ef12e8fc3723f00b3486e023" - ) |] + ) + |] ; [| ( f "0xa0a482dcb3fb5b31ecf8c5d21249a4d5c26d953f753e1145e979b85ee1791a3c" , f "0x026d702c09ac20604bd7d31ddd51af47dd9a245c17cd258f570c09510fb3c738" - ) |] |] + ) + |] + |] ; [| [| ( f "0xe5130174726a6fc737e7e78cd0089469cb1d2e6d1d2b71f53efef51f6125f91d" , f "0x9cfd5933b2d354f095b1d579eab477967b7587feb39da961a0159c896404fd17" - ) |] + ) + |] ; [| ( f "0x46338a3443584861bba86f3f7594807f1050a59f9539a3b0a7f3f695a0146c26" , f "0x7366b13bf9a111a3250c45617e4ed373d1dbcdc6bd72866416d92ba349136332" - ) |] + ) + |] ; [| ( f "0x542cb666935a2848586ca15a86a1cd44cdc4c8518e188f6195927c35cda0850b" , f "0x8130049bb242cb5dd01d4174e38ff13716c7e047222e64d304a8734652c49014" - ) |] + ) + |] ; [| ( f "0x96624b87eca225ee580f4bd8882b9931a8bdb89db111a73a892f769fd567910d" , f "0xe57d53831d06d6a64fc8311df323c65ea0a57e02cf0d56dfa784f3278022b13b" - ) |] + ) + |] ; [| ( f "0xa48629cfefaa0f1f2033a173ef2ee219a816908ef31186564bb82bc1edf12d1f" , f "0xf259a1cb6e5d067acafc68d5aa8b704f2132aca1ec043928fbc01c0cd6537a04" - ) |] + ) + |] ; [| ( f "0xb06f680531814eda8cfd0fcfbe45c74d4a0aef616bd632d0c3ed04eb7e9b212d" , f "0xccae1879e110d70be92a8bc5ab5d3f883d3bd99891503f3fbbc45daabea1f20f" - ) |] + ) + |] ; [| ( f "0x3a4a35069283baf8798da790d19e28c7b66a01b6bff9c16e02ba8c3626d8781f" , f "0xcf2a15653448e3f6a42c62d840f2fb2b3cf73e55bb633f17b6be13def61c3c36" - ) |] + ) + |] ; [| ( f "0xeb6a93f3d9e5b88d4d74204c124d95d4e601ed21692fdaca218dc06030e52534" , f "0xd094442f4cff8624c5bcd9bd7d553e256518c5d46baf3114f203ff378f5a0f22" - ) |] + ) + |] ; [| ( f "0x7ebc15abaa6214ac62c6fca96c4fa0ac249fa58d9c084fef06a8dc01cc445312" , f "0x6a156ba59476fec89e943a7beb39a50b64993e5eeac258057bcedd4300e39d1e" - ) |] + ) + |] ; [| ( f "0xc70fd851214655d04354d1e7c443796f3fba02a05e4ccf70ed107757b6551a14" , f "0xc4fe49fa2efa0e920ff9f3475a45bb6c6878f2d1c29ab9571ed41ce44d2f901e" - ) |] + ) + |] ; [| ( f "0xc78eace45b54398f2f789278c8f2ffc99266ac914d410abfe9ffee5a5af71620" , f "0x8794e544041886028e5589f70085b8909086208768c8ac11b52f4f70d8769110" - ) |] + ) + |] ; [| ( f "0xbc2f297fbc75952c4e7ef37b881cefa6fc9a8703e5c3d59b733c34b05773d802" , f "0x66e3ea443badd6c09d97afcea5c714623304b15e6bf912f204baba1cc333482d" - ) |] + ) + |] ; [| ( f "0x7374c52990b89399a05f325b8f3328b153ba03273c9b36c8d43931b7c26aa33d" , f "0x634c248496e41cd0882276d955a3d6c5f43e1e170eeca078e03558450d9b8904" - ) |] + ) + |] ; [| ( f "0xd1c3990c567eb2b65a4bdba917de7aa05180797075408bd851aa13645b57e61a" , f "0xdf46a73709caf4bc591fd4c7cd075a78bb03e6bdd20853bc55452445048a9429" - ) |] + ) + |] ; [| ( f "0x4c8aaccd9e730d19780a752dc7a1e71674f04a7f5997f5c48934cc454373b91d" , f "0x24409d5138fb3df54b7d5cd7c08c32b3858f6dc5e80cc22c9f4a6b36b7c5fb18" - ) |] + ) + |] ; [| ( f "0x792f8d0da3edf2d668c9a09621aa97fd0ecb82156c59b1ba7b0483bdd7db1b02" , f "0xf6435b72f11e163cfdb58fd9de230cb96bfbdb59cbcf0c585c29d1f419601a36" - ) |] + ) + |] ; [| ( f "0xb6a43fb3bbd8331176677d0c0ca2d7e1349c85630a492236aa6139cc27d06c22" , f "0x9bfef29ab4481160b581bc2d7106f8a7ffdf011c0054f5dad9c2bb9adc3a1009" - ) |] + ) + |] ; [| ( f "0xc6060027d83b55e70c4642f989d46ba541e5e07515bf3fa59ed54edf66b3b703" , f "0x17c9990667a43266cdc9826efed852d025ee622266f331df595b9c9b264a7600" - ) |] + ) + |] ; [| ( f "0x145416760d922e01c8254ce625d34be66736025c9b9898fc4f4032e39563c806" , f "0x0ecf76ee131b179d9f43fd42dec8f0f1bc65a39ae1e31ddc3eec3f565aa1040e" - ) |] + ) + |] ; [| ( f "0x4d51cb4dc958837b5836c2c5e89b1f08824d9dafde7be3d3f7a0317c9b436b2b" , f "0x9c95d4890b15e9159f863e324efa58eeaa9480a86b96825d24935bc7b920db3a" - ) |] + ) + |] ; [| ( f "0x299b354fad46b77515f41dce2d133b70e3eb43fec1f9435e4a422d1b25102739" , f "0x483bc9ca3f1dbecee3a8e405d48fcb7479ab1c4b590da4ada2a5c435c5c9121a" - ) |] + ) + |] ; [| ( f "0x840cfc672eb15a7389ab9c24ec9917dacd69f200e9f343ed2582b11f038ee224" , f "0x1e08c450f3b6894b14352e2f7488c2e5b2f49395e2f6bdccbb970be4cb497813" - ) |] + ) + |] ; [| ( f "0x102f3b155d8a6ba3a7b78b57f957f633efbec235cee4b2f21983c9855e433a3e" , f "0x7114195212be500e670e1c0e383b92e38eeea0df2465689b18ac68ff6c6d763a" - ) |] + ) + |] ; [| ( f "0x7b8234006b520aa397637936ccb21c0d8015f26541ca8ed9b70c9df5a56dfd18" , f "0x623d02fa49a68cd228c89dd6e3c7f183c76aa1ed2f6a82c13abc040096188129" - ) |] + ) + |] ; [| ( f "0xed8df3ea9229dee65250846c80c14ed948c02f1aa151ae297ebf4af8387a5408" , f "0xfa1ae52e5659ef1f6ac0fe9072e028ae53bbc04cc452b0415d28df3292c4d904" - ) |] + ) + |] ; [| ( f "0x7b00814f097b67797c707ce2e8dee3a022b66b9cb79b4d285ab17c24fcc18832" , f "0x83e715480e9e6875348e58f92b3d7a4b42b04db92050a6ab3e5db1dc1874f23f" - ) |] + ) + |] ; [| ( f "0x296cd3c2195a337c81682457c1729fe2446f5f11dd849a864d7f9accba14ef26" , f "0x8eee30a554f0a5557d180600322d965d5257abc616e9841546486d3437cca503" - ) |] + ) + |] ; [| ( f "0x2aa69daef9840595dbcdff80a9d345694e84774679f9ed7464147be59509b025" , f "0xb54620a5f495bd58a368d01ef6dfc156e84032656aab22ad04cad3fe0fe42532" - ) |] + ) + |] ; [| ( f "0x795d097e0849c44ea00b1d6c99664772868aa16ce28187cfd2123d9b98ffa50d" , f "0xd3b6890082ca1c7e0ff49f60a5f692f607c273636551bb359da868909ad42906" - ) |] + ) + |] ; [| ( f "0xa754ce0e204978d3453bdcbac805a1f7382e974198c1cd138650f7ebe2a1d807" , f "0x96b294c97bbec40dd20620b66a18e74ee0fdb7ecc9747a020a8f339c28b73a03" - ) |] + ) + |] ; [| ( f "0x00f82df633ec75f8b84446d1d603089d8f80e4f0aa392378b1324c4faefd3227" , f "0x17f4061c2a016005bad6e6a4b2e93b06551b8407b6b9f73da78aff581a8d3c10" - ) |] + ) + |] ; [| ( f "0x88aed3bd5f0fa9113220594c68d7e15dc80b88db501ce61998fb1ea3d52e4d36" , f "0x4dbe3904498a069f34beeafdfb77663e888ee83c46e1cf02e341c61ae0efb229" - ) |] + ) + |] ; [| ( f "0x4fb21d66acb233a6e4d880458a139f33e48259bf37f2f1aed25a1c3cc246b032" , f "0x95a9416944e9ce86302673924472cfdf88f0e03fea3b0c94fee3df9fbe99c410" - ) |] + ) + |] ; [| ( f "0xfce34ef9c63e6d09de53ab02bffba1f8759bde7300120acb24eda275659e1f3a" , f "0x4784919724a63d8d1cd8c1d957ed38e1dd17fb797a34d020e5993d08b41d8608" - ) |] + ) + |] ; [| ( f "0x5909868c031f36fdfb4696ce2b95f41c3b10af30f558114c58b95b3d15c76334" , f "0x7cc9cbe64fb1b4155021047c92a932ab454e7cb29d0375a8d201e953797ed30a" - ) |] + ) + |] ; [| ( f "0xca5d27e70043b97af4fb89c64d0451dc6a13176f33cf421210435ab666344227" , f "0x35de9dd73f06b6acf18f24a550b1dda8f96bf29cb05987c0c8d4fa8f30865407" - ) |] + ) + |] ; [| ( f "0xdb46f67ac8961325881d43d075b1b38ad105c6f9ec9c77f71eacb27f01864d2a" , f "0xc3983ffff832a764c63e8c1644dc5e71e191aa64e690f008f248f44cd64e9a10" - ) |] + ) + |] ; [| ( f "0x472005cf4496d33e4ff6bff2b207eca20f438a962e297653110cfd082966043b" , f "0xc303a72350e77cb1733beaafc1fd9e91372ee65caeadd432ae2d053a8c8c751f" - ) |] + ) + |] ; [| ( f "0x7ab226f53f7eb7d34dd8106b4c6c08569a567111e72b8a72a0c27cd55b4b8b33" , f "0xc307d2868646f0adade8ed659cfbe97a3b8cb02296ff8514f351e10026980115" - ) |] + ) + |] ; [| ( f "0x5fbe6585d05ecac7169ddabceb4b86f6b7fda115e4fe23f0c840987b938d4603" , f "0xf901495811fd4e552ac426f152071a8a8aa1fc059cd5c24fc80762b52983413b" - ) |] + ) + |] ; [| ( f "0x5c591e98a30d7908f88ce013efe209d364c79a045efbc54ace23bc13b7daf125" , f "0x2793e9dfd14319cd35077f4c1f82fb6181d1109d7510f6f97a894ab67d63f10b" - ) |] + ) + |] ; [| ( f "0x2d0ed898fe32f8853609c1a945c24277204bdb0da3d7ee20de65e598a396132c" , f "0x24863ad7138163d10cab8e78e5c09dff8a1c3a46726102f52c4b95eb83b1461a" - ) |] + ) + |] ; [| ( f "0xb0e26a61c3ac805f20b581096605d090b89dab7a51a8dbfc7ed62366c00cf10a" , f "0x91edadeaa83a51e99dbffdf2c9d1481c8dc0b02489efb0b46a6390e1401b7b2c" - ) |] + ) + |] ; [| ( f "0xdc6e49bea410d0a40560b7484c16318b26e49d1ef6326ca5de01b9b314779f3f" , f "0x9d3b860617aba0c5937cb9661b7d331bccb269c33fcddbadf3ad67b757a45e2a" - ) |] + ) + |] ; [| ( f "0xf5ed4b250a7e573a9c84b55c8e9731eea10e5c466c66d6e7671e99e8d1ee5408" , f "0xf00d954576a1f9c1b36551d11ccbbdc8794b85db751482a46de1f3c48c944f31" - ) |] + ) + |] ; [| ( f "0xc7b04b38c7143a45c312c45f1af167c724bb5374ad07edb762137957fc9a763b" , f "0xe34ec97737827774c289358a4836539a8d116aa7e3e71052b0d12613fb47e537" - ) |] + ) + |] ; [| ( f "0x1ae60834b040796360d8b9b43387cc08448f98753b094df59eb2710971a6d535" , f "0x1709cdabe0e29aa3c834fa5b13d7ff61cd8fa03bf3d01efd7435acf35667442f" - ) |] + ) + |] ; [| ( f "0xc364b1cfdbf1f5fae607b70b50760586e4ca1b2122a3ff5de3ed57f811a83e27" , f "0x77e4dec770a576f85cd2f6f2bcc592e1083527bf6b14a1c344dde09ef1dadb14" - ) |] + ) + |] ; [| ( f "0x9855ae5944a983b7e64e8ac13584100a5c99730cb8120b5cb02e2d07e19a8004" , f "0x7d0bfc4f35708e0ff405d08c947b3278e933f6b3435a79bc72ccf1e685719003" - ) |] + ) + |] ; [| ( f "0xc42e1b1d5b7d9af95928141f74960b8c4dd5f9696e72cab815e38cc963b3ab2c" , f "0x6b332102c896ede20dc32524d0c529a886423528d22135fe05d7b062e67bf136" - ) |] + ) + |] ; [| ( f "0x4a9b870e5b2b4f6cec2f731e604a0a4d7c592436225f0fab26cd03997da2b63f" , f "0x171c2f4f00d51fa1601a56536e26173fbab4258583fa602213c1c613b065ee3b" - ) |] + ) + |] ; [| ( f "0xbbd226c2ec5756cef8e19d37c10b2c3d05f012c8b9b49a433bc4434443158717" , f "0xd0d080aef480fcc4c77c86f6d3c30e23b39a19d089df2c6d9f355e94aa27d404" - ) |] + ) + |] ; [| ( f "0x4c1602a1d42f345c7869f942a0f4acfca17750f5b0d948c384b4f5dc25874f2e" , f "0x913dd034740daf99eccef88f5c191c0082f3ff04e68ff56abf85980c1f33701a" - ) |] + ) + |] ; [| ( f "0xfce98acc0ce486408eef3dd51dccd8a9fd9c1ba947dfcf96dd6e444aa7d5ed09" , f "0xd26d8bacd12d9223a45a45ab2e96cb89b05ee000fd51334c3c7ba89bab5eee1a" - ) |] + ) + |] ; [| ( f "0xc7884e260e4e2554e77837e489fd480fa7db26f4e3e70d879dcfbf02e54f1634" , f "0x66dfb83a73fa6ad66d482de98c903b094aaeeea5506a391f5c628b9441ad2c27" - ) |] + ) + |] ; [| ( f "0xf36aa949ea8e3e48887d49ca6e2cfdd3625b368feaa29c67f73f2648c7f0fb20" , f "0x9d07250eb56d84447a342e4ba5bc9781132c75afac597a5c5fd1cff2e224ce37" - ) |] + ) + |] ; [| ( f "0x32a4db7138748d172d38167e1ba9fd3683281d22003fb8851f75cf6df352ed2d" , f "0x70d652d91d2a4afb8b06b554c2315d42de899436411343ef3f351c324739093b" - ) |] + ) + |] ; [| ( f "0x4acb838052c3c92225725519a60f5ed12801bf2842cf9407ef8469eb77ab7328" , f "0x151c9caa8f943c759ec6fbe3ab2dea2a86ee3f8859d04735bfc2ecbf67dc9d1d" - ) |] + ) + |] ; [| ( f "0x8430b6d11a35975cc4973d17f836d59d5c6dddb0da965b17429f1d1f364f4e1d" , f "0x773f904d3932f22b3932b4f8c89c275b2ac7a1236e7aff6eaa2a1863bbbc2c1f" - ) |] + ) + |] ; [| ( f "0xbca66477d4ad8ee323cc4bf93edcb8bcedbf6cc9c3eca98db996ddcc0fa6ae0d" , f "0xed805917bab4c8dbfaf7a1f1e2fd7fedad7e6bb2c10e116e3a2a81ece61d0917" - ) |] + ) + |] ; [| ( f "0xc1bd4c678cfa3e1fc7cbe2b4c5aea1e6239c387ce91b3bc3c829664316ca8e1a" , f "0xac9bba07531f361e064b8ea072433dcd88da00e16afcec3e20eb21b738fecf12" - ) |] + ) + |] ; [| ( f "0x4726773e1fff90fe3fb8d30ea6c6d67c650d330b7470d9456597204e036ac313" , f "0x2807b5854a99275fd3a36fd0dcb6eea5ebd498d23994bc7a40e98f1ed82cf72d" - ) |] + ) + |] ; [| ( f "0xdef0db8247d47852ef51eb516bda98385455f2aed39bcd36e3d222659a35442a" , f "0xe9f7a99c0cc1710526f2056422c8a09e44a82b565c5b5daf3fd618bcd83d9e0a" - ) |] + ) + |] ; [| ( f "0x2d1b81b0389f40075601d496b7b4647ba089dbf9462f89be4abd097173cf491d" , f "0xa610738dc76cad287f9b079e645a024de4dcf4829a9a706c90dddaa6b328ab0a" - ) |] + ) + |] ; [| ( f "0x4152796f568cf745c4356b832300ebf78b28eb42c2393bf625c596fdd34ba913" , f "0x1275ccc1cc6f18fa4ad236c22da365577ba5bb8ed411e55cd7d9bad2b57e7d24" - ) |] + ) + |] ; [| ( f "0x17e163b9a75066e7dd5f29b72ed90a2ad59e6b09f06f678e9eeaeb32a64b2f0d" , f "0xf0930a7c45e57d0fc0fac37490b2ff856085a6e433aded5d72956985d9e5a115" - ) |] + ) + |] ; [| ( f "0x4b1a988c03ab459942cf7fcbf22ba77c6ac6672d9f7e6fd9ecc4ff585a5be802" , f "0x7e2885ba3811081b0967662b48c4c1cdc44a1088684022038159833fc38d0434" - ) |] + ) + |] ; [| ( f "0xcca88ca95f6320db2e0230f721511e884ebe54e31ca98882a7c720c618817d13" , f "0xe356ea2a17316ac098cc487627c6b59f3f6758f552c361f4fe83df40f11c3c2b" - ) |] + ) + |] ; [| ( f "0xe68f9b4d1cf28b204cf61ea8ac596c463fc37d9429413d5a1daaff627d6de11d" , f "0x967c04c240e4be473f120abe20d7184f5ee32255390a7f2acfed928f16c29d3c" - ) |] + ) + |] ; [| ( f "0x1a7dab05e2c4913bb864ac98ae20dc1be248e257a1913b2178c9628a12e5c63c" , f "0x2957dde2561fb12fe643af456c05bd82dda6c4dcb31ad5fcb662e26b3cdc723f" - ) |] + ) + |] ; [| ( f "0x2fb25448ab0cdbe6040675f75c92e83118756fd7d5201d167c7644bf460c8223" , f "0x56753c6f62b6944f4bed6ad828434d554cdf05f7723877b655a9c885312e120e" - ) |] + ) + |] ; [| ( f "0x17024b511e914dbeb7f949401e53e02c746fa8c31324a037ef4a616b12b83811" , f "0x76c26957cee578a3410366770cc733417e0126ff2d26e9ed63e5901bfaa25912" - ) |] + ) + |] ; [| ( f "0x5af495420b163523d94e9752985e762c0a0d21addf243156a0dbdf5d5c49a038" , f "0xaa14eec7e196e52753e371b0a47244811856f0cd78c4634d4f06d9bbd187f93f" - ) |] + ) + |] ; [| ( f "0x73b670cfe0093bceac70b90b706dd4452fd8cbefe8e6e3d9e623755211a1521a" , f "0xe9582051abe50ab69a44e1ac1532a1e2453af0904072ebf18675ac2912ca5018" - ) |] + ) + |] ; [| ( f "0x8e3ef4e6499716088f80d686e10c239cf27352ba08c153184186fda8864c0c38" , f "0xaaefc1cb5d315bc62e6f55acbce231e7161173ff3fad1980dee067be8a9ade1d" - ) |] + ) + |] ; [| ( f "0xa5af9818e12d6d40ec976fffc3e370b6a07e76b2ec97055e84918b23cb2c1b05" , f "0x8e27cc8d23d3b1dea075c68eb325ebfdf142afec1be32cdd59f4e37ae343cf18" - ) |] + ) + |] ; [| ( f "0xe77b3f80ab30378ed399b72cb965b47d82f24e2754ab9da39f8ea92aba164f30" , f "0x6dcd63855176a14be585e7dd6ddc2a8c315643b4f8164b4ddb44d3737b6fe301" - ) |] + ) + |] ; [| ( f "0x5823d166d6e9be708711c6bd530747d65e9a9f83d221a325a99414c27f16aa1e" , f "0xefdd9521f131fc3a4beb2013b05fc50fed9e6fb65c693dae0f5702b1393d0603" - ) |] + ) + |] ; [| ( f "0xb333d7111d0abb9405cb7b3c765bf154e3822ba2e0118f897adf6bbda8f74d1b" , f "0x7b73d17d836763d1742e539eef9750e2116879f2b4b2659d936a6e3741714414" - ) |] + ) + |] ; [| ( f "0xd9ffd664bdb4fa9fcf87bf234db694bc87788ebd4d5dc909a708766ba42ed03a" , f "0x83cf0b3a84ec9282e37e701ed8f8439c337e3bf86c99748537c580799c070c09" - ) |] + ) + |] ; [| ( f "0x6ebce34071fd73467831eaa92c5fd6ffd681fa95842cc2199b9035899aeb3e2d" , f "0xc7f4c7a91a456ead6b8b519eccc3e0ac2d92c445b5143678e466e9efbc712606" - ) |] + ) + |] ; [| ( f "0x82c8f19e18a740800df0204ed4b4f93531c56f27b22222a12ae4c0154d51d022" , f "0x19f5698e807b79da345a6f6d3b217e1cea7e09aa02988ac72a661cd3aa09cb04" - ) |] + ) + |] ; [| ( f "0xd70de35843dd7e251329330df249cb4e860b0160329189c0950abdeb27928b07" , f "0xf111aadb012abde7bb53d33fdcb5738cf211e637aa438c23606dcaaeb1de712b" - ) |] + ) + |] ; [| ( f "0x447bc4b7748323f46cb8b03f808ff6347aab1f077bf7e707378d957845215d0c" , f "0xbb4df04300bc1291f180e182102561841b63016bf58df97e12e27b88c0163a17" - ) |] + ) + |] ; [| ( f "0xbc0edaaf25d6a498996534006062cb2a00db591eae98cfc6ee78f8b489640c3a" , f "0x14e3c8c2dc747e95707dd4a5a56a404adb0dd08aff7370b3aff290720eade42a" - ) |] + ) + |] ; [| ( f "0x79f51426207ace1283ddb4bb747b5b20132289bebe1941ab7644bb38de94081a" , f "0x2f628594049422ff658e54e6d0e89c8ebe55cc2301238c89d9b80e6102affe3b" - ) |] + ) + |] ; [| ( f "0x2c0d7494c429ae3fffb7fe1f6ab3be1edcffafc4599b0d07cbfd291f28bbe12d" , f "0x0725b1214ef0e6e22ff2975efcd61297102c16104f0327e841a8ca96aeca7e10" - ) |] + ) + |] ; [| ( f "0x4b5396ea09574ed2fd55b16ecfd14fee4b1e1ba60db7aec6c65a16e2c9ac3f2e" , f "0x98f1ce12b8ae6890472e27702c72b25a90b7ced3dfa8aa24a81816a229022b2d" - ) |] + ) + |] ; [| ( f "0x5079d85d95cb2d0de0c4ccf97006175a39209930007a77fdadb4593d835e740e" , f "0x83975640ee72a8c300d9817007f2df816fa1cd6cd9655fd63a016d127f4f9607" - ) |] + ) + |] ; [| ( f "0x3f77222e9fbe9ac0a5b62671730cb24503a099c770dcdb2c94f2a4b43823722e" , f "0x1e9a25e5d730f2ef3eb1973f4aab47895b6ebe2dbd10f5bab552f2c0ce26c10c" - ) |] + ) + |] ; [| ( f "0x5ab80db7415e0c9329ae0082e2fa907b0e9cc48c9578783df13c7a6019c14e22" , f "0x554c1c961c19c250fd21a5d98b2a44a10b54a4c388397035aab5313a1e462403" - ) |] + ) + |] ; [| ( f "0x14d575063848161485e7430a178d786ff526f83e7634bc8f9610f7b245cd5332" , f "0x5ba3e94039a5ee34648e90a50b162becbb6a9310d1fbcc6f1927b1195574e012" - ) |] + ) + |] ; [| ( f "0xfc4b724baa6d5a11abcec0ac0cd32e407383ff089b107abdd4d7a52e3e9a753e" , f "0x9bb0f11f37fb72b7a3e76c1bbe105e41face6bc9605c1616136dbd94160a2b36" - ) |] + ) + |] ; [| ( f "0x15d82e527e95c735cdb2c83ddb74ae205ec8b274b3f5d9d035c395f9b631b92a" , f "0x2cc572b7ab015407b903c905d4101bde3c20f8a3de0a461a66e82a00d80e5834" - ) |] + ) + |] ; [| ( f "0xc5ab3c2646fbbea3a62afa57c6236e0295bbbe434c4fc138a1558b7da7be0b2b" , f "0xc19f64343d0b4fb138f759953958447936635242d2654f70a44f3e3939b8cc09" - ) |] + ) + |] ; [| ( f "0x1d08a9e57dae3b6ead48d50d858b2279f7b3026e68dee4920d6f3723d8bac338" , f "0x21bf211738c15b03543e8fe5d36d5e7b3a584d54daf5623b1d26e011aaeff034" - ) |] + ) + |] ; [| ( f "0x85fdcdf73107af852e9a4a9a8a4d673f9a1f35a17e6706216933560a115da02f" , f "0x7dd190f1f64b1f1896cf83df6e418264e7255c11b22975d56f278669c6e2a019" - ) |] + ) + |] ; [| ( f "0xbeed13d0fb2ed79109acaaffca404b176a66880b1beca4bd0f901499670e6526" , f "0xa6348667f6ea9f8d3eb6579b0865a5f0e6f906b802b95adc9d63f9d634042f1d" - ) |] + ) + |] ; [| ( f "0x114ff430fbbdcdffacd15f24119a8ad5223d39c764f74da00e546c8edccae91e" , f "0x2dc8a5d6008db2531f254289e27af000cc8122cc2c64d790947e0fa7c7f89a12" - ) |] + ) + |] ; [| ( f "0xe1f257dfb0c263ee25ee8fb9f072b21534396afbda98ea7e0112678a590c8c34" , f "0x48e5104bc6a98ba4c0791154cbffbabe21edf6732f6ad04989b0fd76a2f14407" - ) |] + ) + |] ; [| ( f "0x0012979b0926f3a04503d79ba29b9c34926b66203d5c18f7337c382ddee12529" , f "0xb5f19150609e670339ea1a7a46010b898b3cc8785d3ab88defb1eca3168aee08" - ) |] + ) + |] ; [| ( f "0x2f4091a705db1db5d85786877713d9964e909d1a726c7606a4507d4025d0973d" , f "0x7413e8bc43b5f82446f9cb919ee583fd950079c0942f901188b8c60b7375c11c" - ) |] + ) + |] ; [| ( f "0x720fd6f779a9ef73d106afd0fbe6920f6f09f14911e4acbf5b69259c6381ff1a" , f "0x782262f0e6885dcc9c8cca8e11b20a7e1693c867480f43f97344017d3d5af11e" - ) |] + ) + |] ; [| ( f "0x6528597e2453d90dbcf581852373aa238e536ce8a836df20797d02461dea1c38" , f "0x45644e1546282edef326891ef96000981135d7ce187595ff1481d5d368ef2d37" - ) |] + ) + |] ; [| ( f "0xf86c77677159f2bf364111ac4d47d0074ff524f7f9c9811433862c3917e7af04" , f "0xd9cd515adbb27852d4192a9b169ca9c65bf65213dd974fb92a65089bef4a5803" - ) |] + ) + |] ; [| ( f "0xc5ab74fbfba9c8baf3a49644fad6ce2c501d97946be5dd0f0b4183beef53a73c" , f "0x0c3581e172d68bc390aca754609bc58dde6b032954fe6bc2fb6738fac273e026" - ) |] + ) + |] ; [| ( f "0x4e6b3fe7103e65a02f457e62c6c27e6fc2369fc4a3cffc7566b04549a9ae4127" , f "0x167cb697b2cbb579081f0d048799b139c73012162b6bcd1147c26f59085a4024" - ) |] + ) + |] ; [| ( f "0xe27496215e534602f1c4b302eaf95b231862257d745418c071c104875b73e233" , f "0x929d49b8b7ab12974965f11f1b3b188fd509db1abc673217777a707288d8c831" - ) |] + ) + |] ; [| ( f "0xe8d19436f816e39a479c868ede7fe30ff3115d795721b70322fde29ce69f143f" , f "0x8b1b29c16b711719ef8a24dbdc30560208d72457050013d8241c2fd9f5a1eb1c" - ) |] + ) + |] ; [| ( f "0x16432c4a6bd3184d7be888dabb95f40ee285484b54cc553c42e653b2d816cd03" , f "0x1cfee4f650092d091a5d3124d7b9e09d78763e066e180f25c143e51284bb6402" - ) |] + ) + |] ; [| ( f "0xf47f22ad2ac7d6d9e63849a0ea298d0ebb92e7a6bb866c0f3d9711846fa8730a" , f "0xf1e9cfdb34edf4cd57dfaf21edbccbc1742be772a261036ac501efcbf5edbb2e" - ) |] + ) + |] ; [| ( f "0x740e569297b6a00303afce80bf80c8ba709180d1cdc067b38bc9bc9668e4ec31" , f "0x3a4b3bded5a19a144a21531edf986a9d5680191f5c8767dc4082a0732396120a" - ) |] + ) + |] ; [| ( f "0x3bd07e8ce71fed77a589bd1686a6850e85137904394bbb7c798a2e1f686fb83f" , f "0x7056483d9331e277875288c72f53c16913802ba7cefba6c0415a002b61728508" - ) |] + ) + |] ; [| ( f "0xaf26880882cd6d177a55d1364cad6d40a841774eb1250d214680121f8cdf6230" , f "0xcd8543abe87ebcc71a26a43f792ee772f63012bdced20ec64f28b3e68fd5bd18" - ) |] + ) + |] ; [| ( f "0xd3a3dac379d4946795161d61553fe5a1affdb038b982292230180f00823ad53a" , f "0x3d351995cde73179c48c7ef709d2dbd205ffaa4444c8ed2eff61f1c322c2c53e" - ) |] + ) + |] ; [| ( f "0x83e02d9543c029ab45ad558875fa4701338f18ffe1995043fd3a7305d22c622c" , f "0x43179d1596c8705f5bb1c37043985a63de165b38df0dad6ac37cd643e5c6b907" - ) |] + ) + |] ; [| ( f "0x9e9ec49ea863c6b94512e44601775026732bcde7ede153d36bf16e5da021b337" , f "0x57b88141f477a1a11619a5ad044c6b5b2f39cd8f61185bf82277a1712ecaae37" - ) |] + ) + |] ; [| ( f "0x3773b83340a990fd5f4a99129eb478e4e46aff22a161dc1325df0bf913d4db18" , f "0x3580bf9485fc9d6c3433b929b39a215cec2ddd2651073f7d5702536f3ed7fb04" - ) |] + ) + |] ; [| ( f "0xba69ad0a3cb37e7ea875aeac9e2feb52f3c4bab18d4941152df51bb448c33533" , f "0x8ab233142bb900a101ea3365b54acf98adc962ca6758075c086c89be5de94619" - ) |] + ) + |] ; [| ( f "0x64209cf3824bffbaa6ca10a2b5f54552fb639aefaf067cad9553c417204eff11" , f "0xf4fbb8170c2b5d104ade060ec1ef088b051f4e8daff153dc8c6d2bc2717ed10c" - ) |] + ) + |] ; [| ( f "0x376d6805dfab92c5a73d281a23acc3e8b04ebe4ec76a255458d952ee9b5d6435" , f "0x13f0c5968e64ac68fce32aabe71b84c20e73d53595d30c079dc38471bdfe0a0a" - ) |] + ) + |] ; [| ( f "0x79c8a0f9ebaa8c5d9cbbc5a5d0ac0680da2a2d87ab61ead61bda994bef2dae31" , f "0xf36126500bc7a4ee5b0f5dd8348b4e92a41c0502c024d582a3f2bd93886e161e" - ) |] + ) + |] ; [| ( f "0x4e9768ba8ebbd95e469eb9253b4962485d005319a546b9a0409558ac19fae90b" , f "0x2ad6474edc1656ed788fb1eb6a176925deb9b6c16dfea6a76eb233126823cb24" - ) |] + ) + |] ; [| ( f "0x9d7908c46df66abb76e7be57687706d23aea226e8277e6dc972a8fcce2e16a37" , f "0x7f47fdccadc7f3dccc8d15989bd74df56f31d6a7c5d610e1371d5c05e57c5513" - ) |] + ) + |] ; [| ( f "0xa3661f9690b52add210e682ce09e2bd34839692837b6b4447de1fd8a2bf4521d" , f "0xc726efaa343aa91654248c70af4cbca5c3040d9bb884d0a4d7d063aacca61825" - ) |] + ) + |] ; [| ( f "0x805f7b6cd025ee29722d4430919ad7f2cef92ad7222bbd37ad97c41c663e4536" , f "0x8ff96ec7b19672dab74878e27a1025374776def36345f9a5cf3987d87446f73c" - ) |] + ) + |] ; [| ( f "0x013180f90f6ec53fafe5c0b6a7a10e3c9e660f66bf88e4be31d830cfeae85f30" , f "0xf0927f6411e85bf1b3a069b6af0de3d984606d00b930617559abeebec9ed6119" - ) |] + ) + |] ; [| ( f "0x3a92c9453422c930f9e7179f62bf883841e951c042c75c020b3e0fa8bdf06618" , f "0xc25565b3b0a9315bca01f50c1f0ce88422d7145ca45029df52fac697e638d801" - ) |] |] + ) + |] + |] ; [| [| ( f "0x474f3e3a21fedaeb2d29b45977a1f1986f52fd51571a52653c78d23eb621da3f" , f "0x3ebfdb9cf45ce0acbd682e89141719e9323cd8fdb5eb0f81c2076de7bdb6f72b" - ) |] + ) + |] ; [| ( f "0xd20da6bca3af78052cf9f597cbe48a6fddf35449eb7d1c87b8ed5d8a23f3db08" , f "0xdddafe1ca9f1f98cc18a9256f841beb913ff29363a67192b3a9cc220b31e5f36" - ) |] + ) + |] ; [| ( f "0x542b0d14dac58452bcd54333e899ef3d2fa468ebf6112f3f004f9beaede4ad1a" , f "0x962518e292247d66ad766572d6accea2806ccc94aff61e08b30f090a22eb3535" - ) |] + ) + |] ; [| ( f "0xf3dc6cc0096825fda752a75e4a42613b38197ff5c8a0f40935159bf9a332f115" , f "0x685f677ed18ef89b2c22a88cade1b23fa1da910203f18142cd7b919f83b58238" - ) |] + ) + |] ; [| ( f "0x4ef3740e876cbf56921dcff5d81f44c684e64722968cc5797122012a1305fb2c" , f "0x09921bfcb1b6e015c4a65281e2ae23d065270f5b7e65f1be5c15c8d3e3ee5d1c" - ) |] + ) + |] ; [| ( f "0xd6c7b785bcbc90935733b4eda87252495843f04e0026cdeab4ef0687214f2802" , f "0x1f8f0138c4f08b5b857b12b75466d4b408cf2de5b0856169a61167bffcad4019" - ) |] + ) + |] ; [| ( f "0x3379b09ab77d70e298b736bbfb90fa9f4563d411dc85f4afeae8b341f5632a2d" , f "0x52f15f7ad5c20ee2becebf09ad4cd9ef8f67c958dabb2201c606c6d0bbbfc23f" - ) |] + ) + |] ; [| ( f "0x2567b3612c8ba544200b7b1b363e2c99f17849756064c577990d9c5871a9543c" , f "0x8ab84647cd0c81a789ef1fab052fd50c50330d002e2c42c2af060eda9a056e27" - ) |] + ) + |] ; [| ( f "0x58285ed62c87dfa519196d4001cd166dbedf66e51fdba2e3f84756cb99302e13" , f "0x79e1bced294d6bd85e26241c9e96063d8abfeda6831fdd9bc285b7c01c68d012" - ) |] + ) + |] ; [| ( f "0xbb76cbf61edc0aa8b4052df8d542ede38da9db18d2ae9583e1500d5e21a7ef2c" , f "0xd3dfb5f95c12098bbdd454f30fe695b69e0c87d425b468d19c6628503b440b01" - ) |] + ) + |] ; [| ( f "0x5bb997ae5bcb12e174b3c3bf38be2ec84e9b25004082ca528514e444a669bd13" , f "0x7e8a444ed74947e10cf488882e938e25dbd08433143917777683bae3f5b5b725" - ) |] + ) + |] ; [| ( f "0x0cc48e3dd9d14c2550144e5bc1ffec3b56eed930a40b90da4abb4c8d4d3f8524" , f "0xadfd9e193f306c1f7cbbb1c7631c75e78df2bf6c93860ff4293032fc3f6afc37" - ) |] + ) + |] ; [| ( f "0xf10e2d43c2032b59af3818e9f2d8d371fdf926b050466cb9c1ff29a572a93d0b" , f "0xd2a5ee15aab0c15fedc7b9bd46826094d913af3f314cb15878e306f0a9ed6d3a" - ) |] + ) + |] ; [| ( f "0x51472e73a5a4ce4bff3baa6cddde562b2ac69566d0110ce4ed68a0779e802c13" , f "0xb0306aaba89dd455dec78e3c50bae6b4f9e365dcdc4558352e32cf17b5e25224" - ) |] + ) + |] ; [| ( f "0x05c137fe59a332a8e439ea25e326c33db2d1d09396bdc640e178514b338dc236" , f "0x7dc2832349cc1fe54b6528412748da7d31a9b13d0a6f6104e4113d0b41d1ff31" - ) |] + ) + |] ; [| ( f "0x03512f8daecd5c08cb61604d9c51f691c3392ac6c380fbd505594af3205f0719" , f "0xf8a45dccc8dabe8f69cad5962a666b85009aed037b92746874f21b8d2800fb05" - ) |] + ) + |] ; [| ( f "0x9168a4f9aa4e620781c9cd17d16fd27b251635cc00b4430d5d40536177523704" , f "0x3783ab4ab94f6d84c5e4e116537c61ef50906e4f29cca58d0ec6ff3c28dd7c33" - ) |] + ) + |] ; [| ( f "0xb839aa2eb64bda388ca841d01ec7bc541318567ae7cbdf2fcdbcdd3b34ec7921" , f "0x975ceb325fcd4e0ddb4f760ec8c7c8cc06baaa7a0e84df9acd5211f9276afc1b" - ) |] + ) + |] ; [| ( f "0x40c05b0bdba34364d8b68dfe5dcb2555c51c84b1612cd4c6bba88a601ef5c505" , f "0xb6f603d798a42754911b99c26db6a6a8098895cfbf1780a49800ffb8044b9229" - ) |] + ) + |] ; [| ( f "0xa1d63efc6c3b544af1fd8bf9cc0854862163540e21d4e36822106dbedb134d36" , f "0xa7ae77c0e7c7b501e3aec4009de765a1ea3ce93eb0311ff6a311d4589a1bc204" - ) |] + ) + |] ; [| ( f "0xe193b51fd104ef4afbc7505b284b78153c3ebb6a4bc4c2110dba0b3b4e884115" , f "0x7082bcca9aba4e205f1c023d7bdea8fe636cf65f1aee9e3a9614de2ce5916735" - ) |] + ) + |] ; [| ( f "0xe0dd12cdf976de793dadca8254509ca4c443b5f8ab3a4400b590065b1f737212" , f "0xb0936604fe611f895a708326cc3fd9bdd86eadaf6317fd755fe23ef0c7f09f10" - ) |] + ) + |] ; [| ( f "0x22764762743b97d6b52a30552690ce55ac89762a5b6506f52f7b1ddc57b53714" , f "0xc746b253311a44fcfc3c61a005ea30c2d225bd5626eb8751e8286d6c63c2111d" - ) |] + ) + |] ; [| ( f "0x35bd965f9f17e259653c826428a210b7ef9103191bc9560d138582833786bb37" , f "0x7510691e0e6fc58a2a63010c5e28b1e15ad02cfc37a3b97ae2baea5575f8bb20" - ) |] + ) + |] ; [| ( f "0x93bf38f2b388aa538f42ab75dd32ca469f31603ded885b2ed7d6a4b08abe6229" , f "0xec91f54db9c900619e1731bda4dc39e65188b30cfecb3f4a6769cfdb828e7e35" - ) |] + ) + |] ; [| ( f "0x87a32f17dcb42055d5f1612e1ccc0f8d4ec7db636107e97bc12a9f8c7afd1a18" , f "0x7310af75c478f763d4c917a2e53980016c84a8bd462099895106d8308070b624" - ) |] + ) + |] ; [| ( f "0x2e102488e3829f140b243f35a5c404caccd602c44679a51f98c64af110918625" , f "0x8366f431acd9677456a3afd53121ddf3d1d89f929c1924299c68a894dd00ad0d" - ) |] + ) + |] ; [| ( f "0xdcbd981b269894aa2bb36afccdb4e5469ead4237c2bf5b309029b62fc970c42a" , f "0x76a9dc25130be06a3ab032655fab367fcd1228c9656b3f2d628cb7e1e5b82c06" - ) |] + ) + |] ; [| ( f "0xb832df9d63e24aa23bf0bc35f8dbd766ab1f346165a739c1315f1fb63ecc4d29" , f "0x796a70977ef423d1f0a5e18ecea0a0f0a5b2c78b068b4308b3336bd27e9bbd09" - ) |] + ) + |] ; [| ( f "0xf64d728b8bdd51ad3e9d1ee07a2b7902ab8b1bba8794d711c860732deb66333e" , f "0x13c72b84952b579f63077e58ea2ee326cce1bfdb0a0219051694895fb95c0c1b" - ) |] + ) + |] ; [| ( f "0x1e981c4047d5a7e9195d383cb81efc67891cc47e390ddf1e80204052774be135" , f "0x700cd462588cd268d11a87fc76ebcbb39fc5fdddf14a5161c5335699f1466529" - ) |] + ) + |] ; [| ( f "0x45ed9e77b834f808c98f57da57c442ef017a33633a218396fbba7b6d9aa68516" , f "0x20c6bc0e6330c897546e13e0fe256dea30090649165478895ab5bee25554b320" - ) |] + ) + |] ; [| ( f "0xd2204a4a15e31f1cbc527779016ee640b85fc42ae209db3e843d90c6d18ea41c" , f "0x6bb16a38cc6caba61dc56897730ae907f572b8253262c58ea126dc8e8d20501f" - ) |] + ) + |] ; [| ( f "0xd6b9a9ca37170ecdcd42c8369533f4548ff48307a1345d6829ff65fced20ae06" , f "0xd42b0500db965eaf15817bb93fb8411bec65e158c2f9a51eaeee5aa871c4c434" - ) |] + ) + |] ; [| ( f "0x2c12e59e650085b8de80449211038c0f324ddaf630a924d517a899456a2d5739" , f "0x2a54d942fc48ddaf5ecd0fa4f46f5d46670f3a0d3463123d485b204057ded23c" - ) |] + ) + |] ; [| ( f "0xaf5e99aad9862ba1ae8aa07752a4231acd79a7f0f5f58fc9bf5e581db4f58616" , f "0x8671ddf8b16a18433aa878d3c3a80b5963b3200ba8ca7512189667f13609c030" - ) |] + ) + |] ; [| ( f "0x180b7444e6f03d656dba31b7be294fcd07df4f2bef86d063741ddb4f04710930" , f "0x74538e59e9a41809ac90036f1f6be34b441baad881d5bc833f0e3e8c0e33f835" - ) |] + ) + |] ; [| ( f "0x62116efbb6fb1926d05206de29ddf7d44ad0402a11e4d0e7b01e5ea57953113e" , f "0x26789b97d381f503153da6b1b89e2980f9655cf126361468eb49e959bba2383f" - ) |] + ) + |] ; [| ( f "0x5b16ce9c484bb27fb4037744e0b337e8fb858f6989a910d05f63385f354baf0a" , f "0x9dd2b3986e2fe4f5b91083748ded807932d046b3e61bbd6fa2cc171e5bb78330" - ) |] + ) + |] ; [| ( f "0x4aa024e7de24054c053af47ed5cd09c8ee7a99f8b8b9150b8d582fd90a8ba816" , f "0xef5b6f9a3a5718de7f9a2ce1401f790ece7b474707291800f3b9b42f326a040a" - ) |] + ) + |] ; [| ( f "0x0b50a9282d228d5fdf737dfc9b160e8439dc618172916d7bf8e9372768a42a00" , f "0xc6841156837613bef85b63a2b6fc58c799aebe7fde62dc3e0c49efd31e97d236" - ) |] + ) + |] ; [| ( f "0x5d0efa3a9c1aabeccacdaccbc746db76ed3235deb7a8809accd79e06cba00a3a" , f "0x8277380b3e88b7456c17d27e67dcd7b24b9b1a2dcb0dc94f6f9f69d3e048d139" - ) |] + ) + |] ; [| ( f "0xa3375c698eb2045928fe1e4dea23de457cf3c16b83384cf5c0c8d5d83df85400" , f "0xcde3c2613ea83e4270850792f14b0cf91d09e896d9727a79d122f19db6768838" - ) |] + ) + |] ; [| ( f "0xe2222a8987f0f57bb60d92d3def7f1ebb698045eb65236d3df03ec7bb64b6a32" , f "0x7699ad94a8b017e902f51d38fdfd88e951cf307af5aba20ead359c0119bb223a" - ) |] + ) + |] ; [| ( f "0xa3b34b8eea2567f90136b22b2aa1296fe39f653b76b76ed2ebd932e375d9c217" , f "0xf98253214cff8b775a08cca924817c8952a7e0f04f719cb53d052351849f5037" - ) |] + ) + |] ; [| ( f "0xa26dbd64bdab881ec85ae7136b8767a45b1702eb54203a4896c0c51fa129db20" , f "0xbac453c4e682ab7ea148d6d07193698d1bd17b43c1fd532491b678db57b4ed11" - ) |] + ) + |] ; [| ( f "0xd95eb76d0406149f24ea9571bbbc4e4ff7350a18f2898022c0ceeb827240213c" , f "0x69af46d43d1f3724c997eb72e4d25d9ff4de8fc7c68a80665f901c3dc7ad7334" - ) |] + ) + |] ; [| ( f "0xbc451996d23c0dbf4cf6a9cb108eb2f7954a5179d99c07d8efb23f961cb37418" , f "0xabe3d153ee9a73ad520f92805cc238e3ae9d0b8622878ec6f855de5e6bca0b1b" - ) |] + ) + |] ; [| ( f "0x2c195ad9f9194d0b8039bcf5ea911cbb14782bbe2ebea61f72cbd9b8c6aa0005" , f "0x41e35efa90cabaf74d38c97eec20110ed8af67278de234b018fafd65c69e4812" - ) |] + ) + |] ; [| ( f "0x7e6d606254d884115c4c3ffdc49eae296ad40db05805a4e7b4a4766e51d2980e" , f "0x4990fb024970821126575f150195b6dea90b178d871d05a69c24ae1bb7881904" - ) |] + ) + |] ; [| ( f "0x27467f042fb0978e4ea66d1898d51df2ac8311fc7a489c437c0e47575fbd9920" , f "0x974ec1c435fee03b89901a3d5d3514462c3eed7d22b31305af05ddc0e8d3633d" - ) |] + ) + |] ; [| ( f "0x9a0f493d986ab098e7d3184812c6ceda75829d91b0adf311638de37dbd552f12" , f "0xa87a099543bcaa76f0219a63d030151e85ba764bf9c3d4020bbdb09158688e3c" - ) |] + ) + |] ; [| ( f "0x135206babcbc53090a1ea296263180a1b8184ba001326b9949220934a9f7b933" , f "0xcaf508d9f3abe3f6186be62bc1823c8f41a7197fda5d43695561da1a2d335b37" - ) |] + ) + |] ; [| ( f "0x09461d4c2e54bc6d2d2f1a4e3d5f3161f691d0d56cc19e3b890f588bc142fb2a" , f "0x697b0ca63343e7fb0975d72952f2e627fcc000f003be6ffadd39ffa65a7c2921" - ) |] + ) + |] ; [| ( f "0x4115dc962d0dc370d3770932bc6e8b244da5564db136828800c9396c3ab95b22" , f "0x8d7cde8822f0f767a09601a4d6d8607a7999d6893aa2c7fdca5d436e36a05c10" - ) |] + ) + |] ; [| ( f "0x3a5aaadbff2de283ada9750eb67eec321274ec1859ac2767472bfd719b254635" , f "0xc4d7a98a0a32db48990d924e97da01b22740fd3fdce05cb3512ff73a5df5bf15" - ) |] + ) + |] ; [| ( f "0x0cbb7aed3248c7335fcc5950836da3ee93ef8e1c0c6babae3d21720025aa5326" , f "0x00f28cd0857e46d4091b7e76227311b06f4db61dbf81c849e7705bb791cea532" - ) |] + ) + |] ; [| ( f "0x088f36c01bf12359423814e0501a7bfb45a97213893608b19ccf7cb94abdb11d" , f "0x647533a452f8b816979d7214b8a370c958065333b9ddb7b798a485fb2bf4623a" - ) |] + ) + |] ; [| ( f "0xedaa063715dff6a7304d47631ae46f0c4c219c00624f387b5550f882ef9f9f1d" , f "0x0918ff51184817bbe76fa704cad21fd13fa35a6a4c7d6ddf1f74ee5969f7d20a" - ) |] + ) + |] ; [| ( f "0x0ac47257145d5e7ea23c77d163ef760dab792a3973ff5af2a64e72c89a64160f" , f "0xadf947721265c5943b57e2289b588ec6a228501203717287d554d5542182d33f" - ) |] + ) + |] ; [| ( f "0x2d573d29d0a7d98314fde0b30bf36bdcc2d1f4beb667ef7979ee37b5f729873e" , f "0xaca0660cf27feb16e43d1923acd9496f8314514ca976d6ae3cd101a997578801" - ) |] + ) + |] ; [| ( f "0x8fcf32aabbf169b0f64e9b445f4a2676b972c74c6824f2077a093b764522b002" , f "0x4cdeb2c76ea1b7a30670b337f8b031ce096603c71eb76e62e8dff9d061724e1e" - ) |] + ) + |] ; [| ( f "0x7a661c4c06142d3e63e1dc7bda64b9b2cc24bef908edbe7b8fbfd7a66ffe4721" , f "0xf56515505c97deb039648ba5cee45a5bf5b888bef0a645b2b62f6f2ab4876c1c" - ) |] + ) + |] ; [| ( f "0xd049e8cfe00a71cbd0313c9916a6cb43985608088ac2913f1f4841ff6647ae2c" , f "0x8829c2232c485958597778fba667fd85c206404e267a1f853a628f8debb20113" - ) |] + ) + |] ; [| ( f "0xfeb9529980a4f82ede55b1192fbe479dfdb38b899402d89cfebc717c4696343b" , f "0xd1214e6e90d5b90e49c0f146439825afb6aa5d7da437c256ea3794ca0a17233a" - ) |] + ) + |] ; [| ( f "0x1e443dc4f9c19d0bc2b3cbf1fd56be470ec7215603c8042aed4a9d885e8a0728" , f "0x8a0d655930c5bbf6d41c897fe8c2b8e1dc82917dd17cfeaf0e4e286ab91e413c" - ) |] + ) + |] ; [| ( f "0x62a1acce89da61f08de5969b8e84344ebe56e42583d947842d4f3b1bbe2f0a03" , f "0xf139c53651e8750a9d88133e3ee1b34608976c409b1652fa75974bed1581a224" - ) |] + ) + |] ; [| ( f "0x41a73eee03b38da9d675635f0ae09e16194d42a9dd2448f0653f971764827405" , f "0xf3837d3ea81c8431611f854c3a4e4b10e3b15ca5f73fdf9425eea4ab380b4f08" - ) |] + ) + |] ; [| ( f "0xa19f03fa9a21c96ceaa333b055e052edf945957e93271368c22e45973961321e" , f "0xd24ada27a47680ca978292ec7e4fa37c48966e35f58cfdc5169f30a45bd28e15" - ) |] + ) + |] ; [| ( f "0xd5b6b59dfe5f483cfb17c8f6a4a0e020e1d75ff5e370bf9a92c52e1badd4d913" , f "0xcce3347725b7dfaa652d6cbacee1bb0a4c4f50685eb52cd9eba60e8681739037" - ) |] + ) + |] ; [| ( f "0xf3504618d412e4723ca71714ff32d22beb460383e5700c24cfed5d6e28c7cf06" , f "0xdc6cb57759b6ad9c3097f58da1013e1c440d0704f6ebd16a88906b03e7b68e3f" - ) |] + ) + |] ; [| ( f "0x95cb7d0149ea2a26ced241273e4ce87451730527ee37597f3ed7615e5b31d921" , f "0x7e7099146813d8bca50cb683cb8ca1e3440c8935c3709a99dce3531460682d0f" - ) |] + ) + |] ; [| ( f "0x0de7cc30eda07808c4c2371defd4cb65addd096112355bf3d3c770d94f05fe03" , f "0x97a313be045d4c26d3e5249f9362c6badf9a0e3876de7145d2ea9a2f740e8111" - ) |] + ) + |] ; [| ( f "0x862e60705f048d2c448e2ccdfa5b793317149efa832153725ff742907e18563b" , f "0x5b3583180404d3f00671be51199f9acbc5f6fc5dd4ae36fdbd03a1574254050f" - ) |] + ) + |] ; [| ( f "0x22e41043850880e8761307b6106a33ab4e469f69e2f9953929834e4c01819d13" , f "0x8af9cf4560ba5f1da32c8d070725b1a3cce294f0c99a244f4acc0621dc8e9921" - ) |] + ) + |] ; [| ( f "0x0afa71edb5ad972af077b59b50a1ba4340ef7a32106430b45f848f2a627f0d2e" , f "0x1cadf566d4d1ff18ecc8ece10d955045a1c8bbab3318acbe095e4d363a7d0238" - ) |] + ) + |] ; [| ( f "0x6695aef718447a16f59dd7747f85a854bdc03be58f7c0c1d58163329ff7fff05" , f "0x3e17047bd6b8cb7a59a47daf1fc0b3adff3b7de657f5ae379f1e5e7fb1239033" - ) |] + ) + |] ; [| ( f "0x0c9953ba6030093f10afca0f9b6dc28613b45c21004a3d11464e48b43c6c7e25" , f "0xc4a8a661d171dbd2d136bf23b6ea503b9c5b45421cf8bb6dfbd9e9dae3418a0b" - ) |] + ) + |] ; [| ( f "0x99816eebd0435dc78cc5582fc47bdae55ed29710b290774b3f6add79b07f753f" , f "0x78ec3de2368e158dbcb4148a865e9a179a297457e59edb6dd0fadb722bc29307" - ) |] + ) + |] ; [| ( f "0xaa705f4a8945412f2c9f34fae9d40ea703a14332f02fab320324134846450210" , f "0x29e43fa073fbbd6dfaa75e3b29938a000dd5076591537df9d59d8d429be8693f" - ) |] + ) + |] ; [| ( f "0x2b0bd00ec155a0e3daf92aee9c4265d6cabeec87939653f820efc68dd37ca913" , f "0x08b1c92dbd176b14f937b680a31864dbf0f1f482011cedac25e896be3f75ee00" - ) |] + ) + |] ; [| ( f "0x19b7041930db9ab78b184718d3cb00e728a75b523905d9891b5f99095afeaf13" , f "0x836f8aa298627808d84f625446d1d723f8d18056546eb3626750f3a65100b635" - ) |] + ) + |] ; [| ( f "0xbab01ad77b14b0905e48e76e0e58dd66bf95a524f75707275dd3ac6179238e0c" , f "0x7a35e8f90fbd62516958e30007a59a0a449caced275ff4519a0ce5eb5cb28527" - ) |] + ) + |] ; [| ( f "0xaa155c82cf931459c1676cd09c8d1e53fa8a1d12d6e71c4166ed93b4cf0f7b3c" , f "0x223702c21480bdf53b2e15b3cf101aed8bdd9cd478291675c66931b011d2f303" - ) |] + ) + |] ; [| ( f "0x092a770653a2333815eb398d08d4abb346645775202c1b8ba9fcf29d0acac024" , f "0x68f400bb9ef6ff20571593576c0c05ea0322493e9738bd1c279c025ba68d3c37" - ) |] + ) + |] ; [| ( f "0x7650f1414ca10b47928185345618733f1adca89ba18a1253b3a72fafa5ab0930" , f "0xffec62ffa18b6c3405dfed2b43227be54d06ec1bb2f16ed9a3ad8cc3a4465013" - ) |] + ) + |] ; [| ( f "0x71dd895ca2c0bb271645b7cd65df2c17dafc9054ab2f624ab8b452655e23a73f" , f "0xe396a9285785cde195fddaf847f9e443638ab38b95df5a7dfd2594f89bc3933e" - ) |] + ) + |] ; [| ( f "0x5e9e0cea9617f480b87611952ab4fad0d38c9b20f1bc4fb00406b191ec300a04" , f "0x4160705ceadd45b6b15e11d232d31146048c024ceb003b57c71e3f6afbea4523" - ) |] + ) + |] ; [| ( f "0x62e77aed13d291eeedbd0e5b2ecbf5d0e8dee6d57353fe00fa1b3da07a6bd01d" , f "0xb1189c6ef2b3c9c5b9cb792e75b6864997446d58544fc10b32f46e632df9a235" - ) |] + ) + |] ; [| ( f "0x4a4ab1213ce7d848f2f3f3dec503efb9c5fccf562eba1720cccd1c5f9f26c012" , f "0x2296e596da299901e731572690f3b52328c66d2b31dcdb9a40839bb0dc345f07" - ) |] + ) + |] ; [| ( f "0x1e043e6d08000f931a18c3cd45a35e89fc502d98dd447c4ed20a874aa71a6f01" , f "0xffbb5b9251450e70549fa70beb97665410810d9e1f068f1bd6e5384556c78d32" - ) |] + ) + |] ; [| ( f "0xa939bab896fae4420a18e3fe64b168391213e4cc6e2b717600344ab58804d12f" , f "0x23739b2f614d5537e4e1bc8d1de6197be6c622663faaed4fd86b321461f12a16" - ) |] + ) + |] ; [| ( f "0x8b622705935a1590d6b42d4064cb301ec272f9871dbf1f01cbb996cd4699b70d" , f "0x226c1ee20f3edb18da5f910876d2cc2fb1babb04fa3da8b226fcfc18d9111520" - ) |] + ) + |] ; [| ( f "0xb3fbade665cf9dd23c2cf17acc4df59a1f77b4b048e13ecd5f1f5abe4292120d" , f "0x899a81682cf5c777a62201e0d6ce157abb855c4c8e109d7d25029f2684e10726" - ) |] + ) + |] ; [| ( f "0x347ed869d0c609b3ddcad8f19c7b05ee7e613ea089cc704daec60cea863bd404" , f "0xd96e2cb07fc8fe456f0cb876b873b0ac8a84bc159a5a4354bc44c794f4b1c63e" - ) |] + ) + |] ; [| ( f "0x641af92f22984008501686ae39f6905e809cdf317b966380e570a1bc89dd6c32" , f "0xa48307cc0c0009e03dd7db4f53b287fdc6ad7e39116215792d668b934a791d24" - ) |] + ) + |] ; [| ( f "0xc314d5efd034e7faf3ffeab140366c83e196cc32a8f01dc30ac520ffa4db090e" , f "0x178881a45010998d7ee5a482533672787b7c4ac89c65a53f60d8c5a2b06c7030" - ) |] + ) + |] ; [| ( f "0xdc5d3e360007e43d2d5d7a0ca191ec7f69bb2777bc44b9e956b6f3e53fcdc434" , f "0x67bc9c8c44092245a99f06b2ee40930e9c8c359667cad2f553fca02ac3b9f61a" - ) |] + ) + |] ; [| ( f "0xac8e1ef6d6e6f2432cf7931d45c5f28fb64165000e0b8d0f5e4795bb225d5c12" , f "0xa7d24fe284ca505767d6801323aafb15a874ff9e131910d339d692989b1dd90f" - ) |] + ) + |] ; [| ( f "0x08dbb9092cc1cdfe142d5917123e93e2de3488518aee2a44658133bf6f2f6236" , f "0xdbc75bb6a8e2a235996a4168db348e1c3ee148adf85968569045db058311892e" - ) |] + ) + |] ; [| ( f "0xdac6c46974e874979451f7e97cd5a8d98f8a3732432da7f99a483e395c06bb02" , f "0xb7569387480d733bf8dd9c2c0511cb8f56cc659db1a758b5794dff61f8a9c900" - ) |] + ) + |] ; [| ( f "0xa431f0bfbd4b237105ac1b6fd9295125d3dc1f83310012824f9f2e45fac4b627" , f "0x5d02da06391f69c37a631a96f57989d87500146131e2a7e0e3a0472eb4b3d20e" - ) |] + ) + |] ; [| ( f "0x9cabefe02ddce2244d8266dd9d135e4b75d321a3b1e5c753717d725f2480ae25" , f "0xcf957282f64b8e5b4ca31cbd4a6849736d71372a2af16f86c124c3903b6f8b25" - ) |] + ) + |] ; [| ( f "0x2ac304305d10e728d00eb645764255e76233b658f63c0af2b5017778038ae03d" , f "0xf0e9cd816bfabf14d4a4d421bebcc1023cb8ee7009385825f42b3ba0c873cc34" - ) |] + ) + |] ; [| ( f "0x9b85e2147032651ba21c9e35534db110210a4183be9f9777fd2a7009a55db609" , f "0x81768eba73262314710c09843d3db3f9080d93219a90abd08fef0ff9abeaed10" - ) |] + ) + |] ; [| ( f "0xbbdb2675bb15568e4eac17da585329ce8a9eed34dda6cab8dd1b03f6a26f9233" , f "0x07b08d220cafd057f8d2e5bab3df97a37d87061bc5db16c7ff22897d1098ca2b" - ) |] + ) + |] ; [| ( f "0x1e8191f878d8020d512e6b9c467a003d703281f9e20ab59119454d3fdbf2422f" , f "0x71d00a381473c604da24a0de0639be0603115df3cfc9769f81c3140c43eed306" - ) |] + ) + |] ; [| ( f "0xc0f95d369dc7636fe89aff5504c330cd0cef2e29efe377c4345e0e452d7e9f1a" , f "0x603efa393390ede20612899d14032a096f8f74b0e12723c89e799f626c7dc923" - ) |] + ) + |] ; [| ( f "0xd01c3e3bcb16fdc689a3ec979937da2a9331abe17a6be200d866b71e16dbf014" , f "0x69ae71773c2b1d950487367e97192803e48daf159e8ac2c50e6a366d00e40a39" - ) |] + ) + |] ; [| ( f "0x32cb52829cda35fc3c1fcddffee15b606fc06e985742d7bd7664e613c2e0dd12" , f "0x228ecc25db8b9a11f9a775f8c8e1b08a5a5b0d6626a594b4c218c395e67b0730" - ) |] + ) + |] ; [| ( f "0x0afa06acac76c052d45efdbdc724f0577cff521f45c705d2b2e68e3554573134" , f "0x05fb7dd9712c4503a6db3683ac4ea1d007476eada30dc6842a3e88447e8fcd26" - ) |] + ) + |] ; [| ( f "0x4f0cef2ea70445f0abd137b15b34fd87943f98408f9766424e885120b763ec04" , f "0xbd31985cbc3a363fdc4197add5894c4bad77d85bdbae75a564442bb1defa140d" - ) |] + ) + |] ; [| ( f "0xc48959eab2c2682d799f0f4fc466e46ee1d4201cf4a7c6630a33ef0d5afa330b" , f "0x2519b35d491b339dae5eeb5ba4979be1aca14dfc887e971d3ef36b145610aa14" - ) |] + ) + |] ; [| ( f "0xe4fabc6a6fec1eb65460ce857962109dc5fae1dfddf01f316c609df441c79b16" , f "0xc1492d18ca18585b16d0a5d1f397da4e6bce2dd324670564131e83e391dcfb27" - ) |] + ) + |] ; [| ( f "0xe13bedcee97b51fadddc0def69780453fba0a3d57061f090ca16aa840d506f3e" , f "0xda4a3ec80eda3151f87559d6cf901235472af94e693b6aa8d3a284aa5548c032" - ) |] + ) + |] ; [| ( f "0xfd719b0a7ade73662214b67a9c6be6d47573b9bee81641115d9f510dccdd3025" , f "0x1eaa76376d236c26962ce1de3660853597a9bebab616aeca80303cb85c48691a" - ) |] + ) + |] ; [| ( f "0x3b69d55750999ccadc9ee05376e40382e30696e82c2a0a570906c3442c6ee32e" , f "0xfe124b705e490c12086fd42eb5b13b17fca38f710f4cadc205cd5d37937d1803" - ) |] + ) + |] ; [| ( f "0xcda4bcc0f3419759077b62b291b3c3147bfe390fdaf53242840792538fdade2a" , f "0x2bd882a34544c8cc68160917c1caa79362f1e52671d936f7efa6ae2f7de1a613" - ) |] + ) + |] ; [| ( f "0xee57ab20f37e69874191eee315596c6b19d81f0ed6089f71a0f7c1deba04701b" , f "0xf3bef589fda93501cf9d668867600c5e44dcdac8166804b684482db8a6915e0b" - ) |] + ) + |] ; [| ( f "0xa3b70c92c9ef527ab3b077b86b2c26e4c52196434f9928176fafc7aed6adf919" , f "0x598b13b7a0a024a4a603cf77e2b72bc45a0ee86c6655b4cd219fa08e1127e41e" - ) |] + ) + |] ; [| ( f "0xe53b7e2977cd713a211827bd2d2f3e509f554d2a5c05da285b4b5c359ccdf610" , f "0x4a86b22a434020dde3503417439ee7d90b7456075257f2f0dd0bd227a1407128" - ) |] + ) + |] ; [| ( f "0x64f9deef233c7b9f55d126a263792993b9dd25a691f861e895b9a4116efd2b28" , f "0xdaabc823648d48523b8fe4217344eb8a32f611840c8fa41b32d861c18e12a238" - ) |] + ) + |] ; [| ( f "0x094966653f555b416a7fdffe1f6b4ae582349c2f85609a3f5c62b153b2cd2132" , f "0xa0df7389769976ad94550e4e169d89a951344146258db22716e1d92ccbc10e1a" - ) |] + ) + |] ; [| ( f "0x5a9e9f3b824b568d157b7ab06949a08b057fe2aa49d4ff64a614d27566383c15" , f "0x2fa1374f2b2c14194e16fcd1131ab2ee3a37e6519d41fec153f30619afc8820c" - ) |] + ) + |] ; [| ( f "0x3d9d353af045117e746fe93232c1c3167765f77d7ae4db0e1ddda3b2d0375e0f" , f "0x4513b41593bc457af4df52a9ac887c3933d751681596cc78eeb113e86fbc4d04" - ) |] + ) + |] ; [| ( f "0x456af14067e8813ed79b1d6052eaa7407ef24840f94a94214c3e7bb9108dce2f" , f "0x80759159d654ab09e5b46392c1a964db6be8e47732d758f1ecfbd10e2e4c490a" - ) |] + ) + |] ; [| ( f "0x9268aa734e3ba75c242ee18da3888ec0401985ba4ce11d4f2ab1af3943e34d32" , f "0x72b7839e6266edd567524fe996bb118618d03562cd7801e75df2df2d29fbd72b" - ) |] + ) + |] ; [| ( f "0xd8ed416056eb2d81666341e7ae034d058aaaf9c4e2510b91b02a809457c2f235" , f "0xe4f9b363d19b097769ba23c11eeaec196a8e534061b466cd8889a6ce3e761a06" - ) |] |] + ) + |] + |] ; [| [| ( f "0xcbdfe1e691e494caa49b14caea9510545666c4369d8175900561af2bd03b910f" , f "0xa4bd223b57dd151ba875d2cd483ba0173dba37874e0d0171a9557292a5f20037" - ) |] + ) + |] ; [| ( f "0x30320127be5f7e423598387a8421d285379d220118e8a7f503dcbc90c21e5d1c" , f "0x49187dfa73b46ae8e7410854c4bf97a2698bfcd4f68156c7ac4287a27e6c8715" - ) |] + ) + |] ; [| ( f "0xed8c443494c508d04d29970770756956a64179dcbb48ca507d95c8c78d15fa05" , f "0x67686ddef76408746e5d9fd3ac8615d3d4f87c1cd9a449a91a3ccf70c6f43624" - ) |] + ) + |] ; [| ( f "0x1146b33ddd3ae87f3ef5e3c6fbb466b100c453d66180c33ab9debd33e3004438" , f "0xbbbde11513bf4916f3e083af911ffe8603ba37658091478fa8b568b45c4c782c" - ) |] + ) + |] ; [| ( f "0x9837bb422655a76f787c1bde8200af02885c1e32905dbfb7daf653a9d92a6627" , f "0x2fb33a69ccb6c2e49ddb9357a58343ce280aad299ec660398106dfbd23d1943e" - ) |] + ) + |] ; [| ( f "0xafd8ad3f51bcc3805ab3e1d703974a21a53b4682851f96bf19e32515e676283b" , f "0xddc67e22d7287b4fbbebf500abdac86d51689954d2cecb3e9c0c67f7c535c826" - ) |] + ) + |] ; [| ( f "0x778c19033b733c79ec841fd4b615b4e1019e76638840cac4ac8a5212f8229d38" , f "0xbdf372b57a04b54bc28166322d25b653e53e917d548695d0624561d35b0ffc2e" - ) |] + ) + |] ; [| ( f "0xf3c7971a479a934d007d651ca9060248b34681192eb4e5fd7a48371bded48a11" , f "0xb38d31f0db9f13da21f0d53b99bccb2b9d3fb2cd25f0f4abc3fb2dd60dd8ab2a" - ) |] + ) + |] ; [| ( f "0x40198b18662a5879c263b2385ec76ecfce0944f57c5f7b8e4172968d453edf30" , f "0x6f853e9e396e18a1a6d0d4fbad92b254440a5f91c200abe5b9e55eec4a05312a" - ) |] + ) + |] ; [| ( f "0xf2cf347289185815c476bf1927b1848ea44fe13a01f1b4d5f41b0b755c792c14" , f "0xe1c2af731785cbfa791e77868361b724c235342326b2a6f678d10c2c30fd3831" - ) |] + ) + |] ; [| ( f "0x86068f4f508b9f1fb9e0d209a0d3817649fc8fba060c7e6d5e1e3409f82d4b21" , f "0x68eb3e0f3c142d4583293fac24b48cef0333e38cd9d7434f049accc601caae00" - ) |] + ) + |] ; [| ( f "0x616369428032886614a49d260040d0a53eb5544b3386dba4b56fb3011bf3152a" , f "0x7b25325fb2a098052755152a5882bdaa42103127a8f52505b0a38577df55252f" - ) |] + ) + |] ; [| ( f "0x17b25445743c07400c384574c47658c3164d2e810c6c89615706a4cd9c185204" , f "0x5585e42c3f52497f6d18eabb8ba83681636ded9fc61e73b1e5a0102d07993b3c" - ) |] + ) + |] ; [| ( f "0xe5a11271b1ff5dbef04d6de97139ea7b114f3cfc75a57327c9855b5e34aa1e30" , f "0x4eab933ecea0b9abb9e04bab81accae56c3059a570ff8f4f5cff0d84059aba16" - ) |] + ) + |] ; [| ( f "0x0ea64cbbf3103a9e4308dd0f8acbb105fcde78d693fe11d6c2c8c75b35efa73b" , f "0xfca45bf3094fe8cea56c90ea1853f0d79ea5eab97762a153c91f079ea9eab434" - ) |] + ) + |] ; [| ( f "0x43769b089370808b3e777965715f38be94dfd023c7950c5323dd50fe3f98ee00" , f "0x136f9c7a600c7af8f3bf974a5d32349830954af8e4d261b2675669608a6a831f" - ) |] + ) + |] ; [| ( f "0xe22c5c3787e47112c774c1a9e890d915619350d7e34d5092f6abe92d7ed2b328" , f "0x6a71c037da37ede0348ef1230f933184cfd5beb0d34ad490efbd59a4560c0a26" - ) |] + ) + |] ; [| ( f "0xd4f40b80373e42ba5e87942195cd29b01c77a68bdf0ee2e7a92ee49619b6ff25" , f "0x023e9563b26577161b87a12dc7e53f073e01be8b4d881dd48f3397fe238f8535" - ) |] + ) + |] ; [| ( f "0xce544c6ed67371ca8087948a8513fa23fb0999ba89fe4035552317fcbfc3320c" , f "0x2bee492232ec84ff2862458d48cf3485172af88804269062f93d6e142807882f" - ) |] + ) + |] ; [| ( f "0x78cc807fcf9a3c23df699804d28614ac6746898356f411ba54d6493f0346ea26" , f "0x397aa36dc65134ed2b07a88c2412c9cf6164f24712d0874174c097ef1635dd05" - ) |] + ) + |] ; [| ( f "0xf84107f4a521ed2a6f86c424a2dce329eb05c5a5a0341038496becf9ecacf83d" , f "0xa82ce84515d012e045925abe45fb33e07e3e524c1d67e7384e256db9e0dd7d36" - ) |] + ) + |] ; [| ( f "0x646def2d6db3e1ea995fe546e17fc941804ca1aebc1128090c30c191a0579039" , f "0x339b87063bfb9faed3b73c1d7e8d6296c9716a132d9781e325f6a05aadd8f42d" - ) |] + ) + |] ; [| ( f "0xe0669a33814e1181b453d85c2918ddc1326a30933ab53a4121b822b2bbbd0c19" , f "0x91a8717a3ef90dd9f86efc21b5a7ddd43ecec23e94c39e4f0096cdec0eeec120" - ) |] + ) + |] ; [| ( f "0xd080cee83149aa3e5fc69e875bf9bfd8c15334d7e0c5d308940c2f0e6f2d090d" , f "0x5d3465ead734ed66d0def6fb005ea4e58e7e4f04bd1125c0f31c7377d17dc425" - ) |] + ) + |] ; [| ( f "0x1a5316ffea4be67893ce45e53fd5c3ee8fad2ba2f2c8fac2259832c6e87cfe12" , f "0xe920cc63fa58bd72e61a6da073b30a1b3a2fcd772bd6ca39d7848cfe30436517" - ) |] + ) + |] ; [| ( f "0x57ff9f7a0e9fb238d4d1fbbdd74053e89ada8d56862980e5200d461a37f25b21" , f "0xd73e023ecd9496d83c092f7982a0a5f849407fe8b0962fd62bbe6d500b44e412" - ) |] + ) + |] ; [| ( f "0xac183f05e35057f420d6e7e2a9e675d9edd8cc3b7be8445d4a3c6df7d6eaff16" , f "0x7d14de6647ad8d611b56368ea85778dfc443e55225b8974a481e4d873b552112" - ) |] + ) + |] ; [| ( f "0x060b8a4fbf1a3f608084f7cd2c65e3ce00172c00f51a72439f8e8517c3ecad30" , f "0xdd38c3b723370821797a40d5add7d3c2f9a648af1d00146ced3773891de7102e" - ) |] + ) + |] ; [| ( f "0x0de0a321dc5789d08f19919181f116ca8427875a73cb77c668cf1d6cbf44d804" , f "0x3f536dde6502dc65a03c5981cb043057e633a7662608d7dc23f2df09e9cf7d21" - ) |] + ) + |] ; [| ( f "0xba0f7151a8f05bda8091c2771b54ff5a311aca50ac5b79f81d4c9c5bb5c5ab25" , f "0xdfd4f77f666dbf1ec919526833e99bf199aab940aa61dad7d0b6a0c62de40e3b" - ) |] + ) + |] ; [| ( f "0xb2ed0d111da32cb9c842baf726668f3a2864bb89ff79d98a79ebaf1a3542da3f" , f "0x9fbcf25f24948a2693593921e24906b9b65dac5fb8e2fd235ce21110fab3cc1c" - ) |] + ) + |] ; [| ( f "0xb4f968afed7b8a209d05f978e1f5d892868caa5733b4e75625c8c8fbc560a417" , f "0xde58b7caea1b0d60628cf2ea64676fb14705629cd749efaeff92ba4ff98a9a1f" - ) |] + ) + |] ; [| ( f "0x726155face366bff00828a1a553b85d36d9c4ca9e5ad5280ac7105c7740f2329" , f "0xa1c2f2221af7c06e15cdee9562fd7e4dccc2825295f5b09f90d3684693175f12" - ) |] + ) + |] ; [| ( f "0xa8509841822e1901878c1ae02ad1d9189f6032da91eb731d9661470908932f07" , f "0x5c5a15e2e7c37ed4f2ea76c7ec4a8904bd6bd3bb7efd5fa0e9da3998b666aa22" - ) |] + ) + |] ; [| ( f "0xedc26c1fb01dd39ebce4aaf6dc5ee0fe10a80a5912b4b581cbd9937d50f4dd15" , f "0x4c4d67a55351d90ec5dadb49f7fc6a228db4d309dcc8995c4c07e47d3158182d" - ) |] + ) + |] ; [| ( f "0x657d456dbe93f268b032377b88a4aace6770c3be86e231c921b6f4249e4f283b" , f "0x6a4788f95d24bff6f7d0dc5d65938a5741191d0dffec1d0f3e1f181906059a00" - ) |] + ) + |] ; [| ( f "0x55167f5a201996a5bf6b8c609b2a1fd6affd469a89cbb042cd3f6ad133e59f0b" , f "0x4010389691d85d897f61cd2e3b8302e0a2eb617f08a37604360dfed3b8a97607" - ) |] + ) + |] ; [| ( f "0x1f03321831dc03a2e74f73aa4e89ff4f7bc423f64622b430c344bf41bec05c1f" , f "0x5e99ba20956fad72768cc0b3d2e6f6d20c80ea8853d6a17877f6750b12be0b1b" - ) |] + ) + |] ; [| ( f "0x17b9138b240911044de8a8874634870a3f751178eb239d44746c08d6c380a000" , f "0xf0dae5de33dbec2f60ee9b89673a14773d6c8f8e7e9e1b14a13025eec788a716" - ) |] + ) + |] ; [| ( f "0xfea458ec4ae8caccd0b0fc78f98d695f99adf4bd26bda52f81ca31c0b233f53b" , f "0x811b8e964057d71d38e33550c2039000ed268dc521aa91ca8fbc377e0a4f0907" - ) |] + ) + |] ; [| ( f "0x1cd53cc50ad7fda9ebd6320ccef3a33f3dad32408bdb5bfd10cdab1f4baa3437" , f "0x00c3e2b4c0ad653e37b087218183a011e6ceb5446b5ba73292f0525412319714" - ) |] + ) + |] ; [| ( f "0x489d6c3b1367c88fe79fe727fc51220420f052c740ed7707b83753df2cb0882f" , f "0x8d6dfd50d147cb2170093e85ab2e18f4d456f90a9c77f33807648f152639410c" - ) |] + ) + |] ; [| ( f "0x9aa477b1539640765490ba3308ba7cde36e101ff7faf82a6aa08639fe9ace404" , f "0xcc0e22a278f85adc16c8432411e3f9edef65a175b6c2e9351662b86e6e98e412" - ) |] + ) + |] ; [| ( f "0x1b48eaa0fd75fde91fddd197f25f969341864e058e909cb74a5b5f5c44e10117" , f "0xf633d64862a9f13d7f7fc61e5913f2cb3f24cb2f91247d3d145b485d0d4f7e2f" - ) |] + ) + |] ; [| ( f "0x4bf97af126d61d12f111f006a6c4bf9b81be05b9dad1d2e387a1870495153600" , f "0x7c223c32d9287ac037eb010376bb080954937d07409f8c97a346de20f9156301" - ) |] + ) + |] ; [| ( f "0xf228c0a2c3d98b863f77fb716959f0c8e6b4b0e846c7b9ead928ac4f2b267215" , f "0xd8e31843dd001dc646e3c9a646d2dfa598f9d020c0ab02de742e990858d7b938" - ) |] + ) + |] ; [| ( f "0xf0e4e62d74da298f8cea9a5a1e94ff86e0ddf840689d9fad8a7dc974c5041918" , f "0x8797c88195106a018059ea8e771a11e213a6de5192538307fc2c68efbac49f39" - ) |] + ) + |] ; [| ( f "0x4a3468c6b1d46c9b4e9a5b53ced1a87fabc6ba679577fc09056648b2fdc53817" , f "0xea0e0788f7c849d051f16ea00fbf04acd0a7d637057d131d493f744a61fa960a" - ) |] + ) + |] ; [| ( f "0x1b6504ffb330db6a1ec0999d176e244c6cefb2d9f17e281fea74e87b78adda07" , f "0x7cbf53a1dc8054c5837cad06510c90d56496143d522eb187ce1cbea7a7d0393c" - ) |] + ) + |] ; [| ( f "0xea3ccb53e5afa26105c056112ff6adced9ac9ea7b61bc42b9cc2f43ecb639026" , f "0x663c5c006ba08c78fe2f59ba771d8799bd1fb9dbf5eb157e6ad470850e97df28" - ) |] + ) + |] ; [| ( f "0x6f7d8a0333b032b316e116a239e7c53979eb5d5d9dfe4dc499ce3106f6cd4f16" , f "0x4996670118d1f0d1c991a0fd1d7e33c5d58f03fabf67064a22a99d59db024908" - ) |] + ) + |] ; [| ( f "0x79abc74451200b1aaaa5725ff2f416480d92abb40b8f7f59e53fb9db15faea32" , f "0xa34073ba4cd975bc3800d4b9c7139f0bbdfc8715a41664a513849541315e9936" - ) |] + ) + |] ; [| ( f "0xb72aa089b70501c2d7b5ef56ea264401e635426c8904cb7e4538d960b58f6c23" , f "0xdef234765f4b784f63928c4c025ab6694a62774b30de534ebf74a0628824961b" - ) |] + ) + |] ; [| ( f "0xae52dd94d341aa4ffc5f97379675fd6cf415ff1a3125b0622a6070d22f7dd82e" , f "0x77cddae5051d6d3511c607559d4db9656a5a928c7ee3c35d25cc71a6cec81918" - ) |] + ) + |] ; [| ( f "0xcd44ca847a87309f84ebc324acc0bbe60e7ac2c993b4203f3f3089ff2f0be536" , f "0x41ab3da668512b6c8fe274b103eecf92b82d89f5ced0beb2442e22dafcb72a3f" - ) |] + ) + |] ; [| ( f "0x520acd137274401aea13baaecb732400db8b53bd1108aa94445b63de069fc70f" , f "0x5db5f10377d5d58b248e81cd26e59a796a4205f53b20005b4d36bf3878668127" - ) |] + ) + |] ; [| ( f "0xadad1b43ff681986c4380f7d2b89282bc45a5b0a98e58ba1acc02e7e49a16f3b" , f "0xfda3c9551da3d98379f69ca1b13934ca1acb62f4f819b60f233935477ccd562a" - ) |] + ) + |] ; [| ( f "0xd13d7555f84777261bc046c960fc72f47af47c0d76e688e61ef6e590cb538d2c" , f "0xf9526397a08a5dbf0fc2d71024496f3951ea1cedb7c0423166a964b55a4a6912" - ) |] + ) + |] ; [| ( f "0x2d2462dbb08a01099b9a03436b02391faebe25de593894c130d9f036dcd7e427" , f "0x273dd13ebdae7281f7b66cc310608f0ee4cd3ef41d7501018bf1e1ed8dab5601" - ) |] + ) + |] ; [| ( f "0x9f8e321b2c8715717b39e265b4fe6b7532ab45c04c813abeca6b13382ba8b520" , f "0x768f120b7ab8453c1e2d159702b70fbd54d27d1d089e5aa82cb7e10a1d82470e" - ) |] + ) + |] ; [| ( f "0x1f4a40c207ba77d4cf0a1ea7542b5116174e871b0fa75356734dfc55d133573c" , f "0x80774fdf0852071824c4ab18ca35b0527dcaeb3c85b85259bd3810471ad2db11" - ) |] + ) + |] ; [| ( f "0xd01626f4d89388ae8e47599465be627eda3da5e505818503e864055579f92b3a" , f "0xf345111bf8c552e1664a0c327fb316c4a617b5fdf40aae19e47d4645483b0625" - ) |] + ) + |] ; [| ( f "0x3c65254e2031ff03ba0545290be0cf4656cb94d97d9f5a2e064a07f4551c5205" , f "0x43a73c35867be90b18add1378aca1818c9bba0a9a4b77946c1445b1565476333" - ) |] + ) + |] ; [| ( f "0xf0e05c0722fcd7f231609c8b6331d23de00c60ad7c15a89a124a524dbfb30437" , f "0x0863c5d112772750c35de939e2af300a74079ef53d6ed8301cef12acca9c801b" - ) |] + ) + |] ; [| ( f "0x106f9fe149f7f8535d7c6d71f2a7448eff9db38a7c4d6d1ffaa5cc179c89ee1a" , f "0x567cabde991abdf4cf26642fe78ddaeab84f2f3552618681a060b14185a1d12a" - ) |] + ) + |] ; [| ( f "0x48fe64feb847e416d6ddf6ea10a4e0c2de06a625756469f2de473f2c69eb2a24" , f "0x052eb76a509aa851b849b48770102078796352d0ba6e55a6f66537da9ac6911e" - ) |] + ) + |] ; [| ( f "0x4975d48843fdc774f20da3379e46303d40a7a32e621ee4e1ddc8a1930dae5018" , f "0x056098fe9cca0f0db221a1cdc792448ee2150281c0d64531b908be747df04719" - ) |] + ) + |] ; [| ( f "0xb48422793d859a9d31b4dbeba37920eebb6e0696fe50947b33bcba9598a3e026" , f "0x7c5953172a48b7d3a57d2bea3aa3494c34525b46fc6ef6c06f795b226550471c" - ) |] + ) + |] ; [| ( f "0x64d6eb09240cf818b4f5af44309708cd8534eed8f32cc1ea8f62a2f50965570f" , f "0x92a0644200e727472f954935468d799ecce5054035ca20d86c0d9d12d7aa4b17" - ) |] + ) + |] ; [| ( f "0x2618c58586da02511e804659c9f8645eb6899625dac6e019b95874edb4177707" , f "0xe1069a98e4f28f081b0f30660e1e2447e8f7882053d5169a7665a5100ef73812" - ) |] + ) + |] ; [| ( f "0xa7fe87fd3ada6fabd2da3a19ab015616bc42afcffd157def6928c59d6a7d7d1d" , f "0x41dda00b14ad219b575fb26d1cd014ccea3a65661596881a7defe5280a949605" - ) |] + ) + |] ; [| ( f "0xa94a1a1421e3efc39edc2727f3b984b39433377b855e9cd7af4d9e8459058f1c" , f "0x78437ea0df64c2633d48e66be240aeb8ea2234d0c1de956726feddb6382d9120" - ) |] + ) + |] ; [| ( f "0x15e29c2e1e4047d5e659c133d0e1e462bc79890004180ce07e8b42d028bbe70e" , f "0x2bf5737fb667b6969a966947e9563c4f6636ebfa1499d67d8c33b843d8af1802" - ) |] + ) + |] ; [| ( f "0x31528e137c5f1d158fe435c86e9ba7eab70399284927e448bef0a32eca921c3c" , f "0x0bf70ce3d8ebb9699448d298502ec3acffe08777026088206ae76eb06f5f4a2d" - ) |] + ) + |] ; [| ( f "0xd798e178b49057ce7c1a8a2f57aabf97caf9bbd3cc094635d775c7f824d58827" , f "0xc82467456c4ae3004919354a994cd9f85f0cc0ff28d709f2e0adebdc356c922b" - ) |] + ) + |] ; [| ( f "0xdcd64c3b84b0f8da0faea08d764980374f43e2512c881f2ede86232f2bdc5612" , f "0x58fdca06c08f0e474735cd9be81d7d69bb38bd78de4de0e604758bf4cb550b25" - ) |] + ) + |] ; [| ( f "0x26b7d983024c37431302b261d3ae5a5f8fdf74599f9853614e2538a22f96fc12" , f "0xaf475615bd7db2024b48ecb84470e816fc0f2b0a93f00925be3059af04ec6618" - ) |] + ) + |] ; [| ( f "0xc5144324745810673387d56cfe9dcc51a93bbcee7b3d0e045fa3c25046384827" , f "0xe9d96c836a04c72caeb951095d724fbcb824ca3c0dbd155bc71b956d6034a62e" - ) |] + ) + |] ; [| ( f "0x5ae8c8e778ef948d95556decfea299d86cb2793c719580bfa10ac5526480472e" , f "0xd426b298b4ef24e98586da32e523d63114ff99018f69b67a0391484d884b9331" - ) |] + ) + |] ; [| ( f "0x6482bd9fc3bee34b1ce97180b4e6cce700703f765e03d2469e057371dda3e104" , f "0x599cb7f8c7319f926e299be6200f3fdc90b089bcac483e610be67601423eda1a" - ) |] + ) + |] ; [| ( f "0x43f057de7f2391ddf46f7a93bf12e04e76c39500a391bf45e9ff0974b8934437" , f "0x5eeec4eb2671f870417af0a2c9bc5e3fdea51be55432a3b8a0856abc461d073d" - ) |] + ) + |] ; [| ( f "0x3d56bb47cc5f9542ce3351bc970d2c9b5d9381be4c28fdb9ff43c194d9e70d18" , f "0xb861eb28238bedf6634d0530af6f6eb1081fa89b7388d7bba31bd2c0fd636208" - ) |] + ) + |] ; [| ( f "0x42f071f4b4c1baae1f17ecb65dfa5046a75293c38a0820a19db49b6efdc0fb0c" , f "0x2dc1df40363cb433426d5cb759cc6b99bc5e73b07dd9cc6a4c16cdc66495f60b" - ) |] + ) + |] ; [| ( f "0x1db548004c146170dd240decebf3233f854e39e26d7aefd2f971c239eec1800d" , f "0xa588a02f73f997bd7ff55a4a0440f969d4995aabb68b806b18a127b3bd6d4f2c" - ) |] + ) + |] ; [| ( f "0x8d535be4456fd57e09dc7b2da985d0b8979600e877dd40e9245f52aa11a41c1a" , f "0xcaf1eb59bfd880b717a77b2c07264d0c2f3bcf50c0ddfaa8ab106bba9a85111e" - ) |] + ) + |] ; [| ( f "0xc33449410fbca06d43d3f47fa56c75312b6a038eafeabdf7e61f8581b3c5683f" , f "0x93e88029188c047c6f953f16d219f3c1850519bbd78bde30f0dc87d7ddf7332c" - ) |] + ) + |] ; [| ( f "0x3d024ad5adfa9d3afa0503e4a4de4c7db32284c610c2297f48a3b5773837982d" , f "0x5a522c7169ff16d19c4cda3c842d4ea92ac7ce2e248ea3cf9d928b8cb379870f" - ) |] + ) + |] ; [| ( f "0x3d9ba7a9202cf63d8046e096bf4d0c8ea92d999a549dda95f34f739fc5d8120d" , f "0xc153fe66d688ff69382d6fc14a521c6d90e70ec459e7ba5a85807264c97d0016" - ) |] + ) + |] ; [| ( f "0x54a843d069eb734bcbab35afd2c2c69fe14903bb6b5942ab8ca173f8c688d934" , f "0x4a5975e1f8eed567a8df1bed0f1d543452c3231710056296cd5557cbf3a2e014" - ) |] + ) + |] ; [| ( f "0xf79e66fc36dfbc573b44656b191380ae47304948e996d2172e34528fe58c4d24" , f "0x1cf06deb6190892f998aaeb56c0105cbd4dec58ae7e877d4be8facca9aa50f33" - ) |] + ) + |] ; [| ( f "0xb1bdbf342d7bec5ce09a4e0afb9ce77f658e8ca22a8e0646bc6577e5a4ad0e2b" , f "0x9f68db64d6b1c096c57336207585d1fc43359da971a26d52edaa078eb7e42001" - ) |] + ) + |] ; [| ( f "0x92519ab9ed4e9b269ab41d6d50222b03b487c079e757a2028a351774c6a2c134" , f "0x52c05d63e7e774fdcb3be7538f422527fe63455801e7be4e907aaab50f20a938" - ) |] + ) + |] ; [| ( f "0x13fef409519c284df1745331ea84be42eb294513a123cd2844e081b86916781e" , f "0x2ac75ac8caae7a0e59597b799973631d8ca995a7bc9863eb4336fbefe2f83c25" - ) |] + ) + |] ; [| ( f "0x723c37c09e8931f8c9d77c53c4d8fea9e2cfce15e2776052d880e766571b7109" , f "0xa514e6374a772978d94abad01ee26710a94f766fb47909802206fc74d5fd9d37" - ) |] + ) + |] ; [| ( f "0xe9ec763d85a89a2b5482f2f2a8360bef6689f9096cc5259afd9bf82936d8c108" , f "0x85f0e187c36e23ec089ce93035e950e97af159f7fb38bb10db25a5d2c2be7c03" - ) |] + ) + |] ; [| ( f "0xf5aaf9c07f74cf08e3009a43371ec86699bb80df29f3f59146a7fd776a49eb3f" , f "0xc846450b6d7864a582c779b0cb74cad6e1b0602fd81323078aaf5ea9040cf612" - ) |] + ) + |] ; [| ( f "0x20735b665e09206cb36533608385b67812e0d13b1b90cc215f85ffbb1893a40f" , f "0xbaf792d98fc18c2193a1ad7798e0b00573d38c4d707c0d2c5faa5cc7a9a8553d" - ) |] + ) + |] ; [| ( f "0x7ab9c7878ebb8cc42bb823510b8a870a96819089c23ce3e6bc4079cb2983752a" , f "0xfb5a9d14a8536203a5f0a0fcf829ec7df6982fb6d80a7bbd460e2b8e78c39129" - ) |] + ) + |] ; [| ( f "0x89799b07203bb95d95515689eff8a7ad30524092d98ddf97a93b41779b86e43f" , f "0x4fc229d45f2640dc15e2e58ce24b7c964b7f3a29501358773d5a1152f788cd24" - ) |] + ) + |] ; [| ( f "0xcbc0a35740d5e952dadc0dbdb07eaf6a93d95b929ea605fb9708f21c495f4d39" , f "0x11f63b62d59bcdbdeabae7114167ce312aa307909c320c01cc0fae062146d81c" - ) |] + ) + |] ; [| ( f "0x0b13c4b16e1200dd123d9ada99db58e5933cde019a57b2777e2dde4949cc580b" , f "0xf2fcc1101fa5effab784e70b84c6b0d709f97d4268a568f81ddbd9663e051c15" - ) |] + ) + |] ; [| ( f "0xe5854dbf9ae168efaa6d458083e4eba2bc1817a21ff9e69a60ca7d7fdb37871b" , f "0x91b19f0d0e05f8895f051b4d41d87281af7f49c41352ca731af2a9e6dfc92323" - ) |] + ) + |] ; [| ( f "0xa80e5c291444f6c098900867ed7f7e46bc4707ff5badb0613231c563da6b1700" , f "0x877ff2b4618658770453919b82ac502cb8cb58be65c910f2ee195f5552ffe729" - ) |] + ) + |] ; [| ( f "0x2953998f9ed6c134b65f709627a0a37b9e702d42b9ecffc63cd6361a8f2a0b3e" , f "0x546c16d1b43332cb680c28dfcd360a1c223fcad72f070870e27df8e3e26fe40b" - ) |] + ) + |] ; [| ( f "0x982c1dd7144c212e6b1d2589b43746e72f2f84499a43ad678605d7762aee270e" , f "0x464a10d151b1b1a6c6f4528b5fd0d65af1e4657084c7bdd9579bce675e431d35" - ) |] + ) + |] ; [| ( f "0xc9bb03f5681746872a0fa0a148d9322ab86c94d2379179745c79c4f187b41c35" , f "0xd179d749561cebc6d853c0e4bc428ec8b684bdaa3bff0c71909eadeda23b9606" - ) |] + ) + |] ; [| ( f "0x2519ed47d2b46764a3824d306373b67efdbb31f4ac8e1915b1bbea72799a4e38" , f "0x6eca114b8c1e27ec5e2a4e19bb7cc1cc899a4d47f97664abd8cd6e638bceaf0a" - ) |] + ) + |] ; [| ( f "0x4f5be250b9da35693529f3dbe74d2c8600ae3522afc4b6983ddc8e06286a2625" , f "0x3214451b6eab28f52bb1e5cf6ab0f55d30b8f4c3e22998a849486d20e4259611" - ) |] + ) + |] ; [| ( f "0xc7d59c1a5d8c69854486bfa3e2b8e8961510ec896bb3c586de5d1540d0a54b30" , f "0xb2de7de35ffcae3f36c7dca5c89d8f5fc41264d0207e3257411345794a0d293d" - ) |] + ) + |] ; [| ( f "0x5745c0c28850bb3dda8150c00867cb007b454da80ee95c317ca68e7dcfd48300" , f "0xc8b0e897a5e861d30fb5f0488fd0e7e37f985f35963f45b1f390bfe4f6b0362f" - ) |] + ) + |] ; [| ( f "0x527cfc7319853dba49c3a8bfda4b36e11907a59dea77200ccb2bc17dba94e705" , f "0xb331f38c6a301e26391c21f68feaea1af386f43a94d7cb5c9d6bc19682d72f05" - ) |] + ) + |] ; [| ( f "0x93ee1045ef36fde56297022b56d0d324b080d248a168aa4bc43ccce63868ff36" , f "0x0728139eb724e2993eb13cb59b51b60916a33378d661961a3edf4925e99f2b39" - ) |] + ) + |] ; [| ( f "0xc68b450268bab13b1e8b0eb416bc4b489bf907fd12ed1908b9464485a57bfb3c" , f "0x290d7f3902d96fd219b8f98177b0278fd9d41b8144ce11015a8b14f11c48ac3b" - ) |] + ) + |] ; [| ( f "0x0b8912d8110e97c68d12f6eb127bddde654d6db7f22fb38e41b2b8bc2aee7824" , f "0x2fcdf9299be59123252c2ab33163e456544ec27017e9aeef0a15e097e6d08c16" - ) |] + ) + |] ; [| ( f "0xe36ffaf093eda9a2c90edacc059fd0b8ede4e466f87389b8598c5230098c3b37" , f "0x994ffd3c7670a120298cf60c2f54a62cbd4ec8fe1ed4a7a26f572472728c0010" - ) |] + ) + |] ; [| ( f "0x4b360500ad1dda66bb29614eba59f78a3d1520c3d9738b0ac85257ea60cf801a" , f "0x06b63c639861d85ccdfcc4ea3de51b54ebf97fcc1c884eafbb6bfe37fde43324" - ) |] + ) + |] ; [| ( f "0xcebc431f9f51864a778b1e74e72868c83c463ff97cb9871c0f0bfc70f2df4600" , f "0xf53daa22480b0b74fceffe55e41c7c90f2e2aff9f139326e91fe9b8247030a0a" - ) |] + ) + |] ; [| ( f "0x09d8c4905884800394f6c7cf34480579887ac8fc34dc6642cfaf8d5deca28932" , f "0xd36289b220036b5fa81f469edcae6684becfbbf86780b9715447f62bf6626c19" - ) |] + ) + |] ; [| ( f "0x8f3d6510f8e83aaaa7237b087b974d10683a611c17735163780812dcfca02b22" , f "0xcf82068321d9ad614555e4f76982c0daa294148470bb22b7e96f95c97e9b2f07" - ) |] + ) + |] ; [| ( f "0xdc0287a4a57f24d81f080c8110f8a12f892bd21bffd2b0cf79b7d6a12f753a0b" , f "0x3730557df1cece6793fd79ccb035d53d38075afa13ae5e311034e1e47cb50f2b" - ) |] + ) + |] ; [| ( f "0x89835075b3a267ac75147ed8eafd2158e19d98aa55b19ae0166a7a8169a26712" , f "0x1eb4ecdb0c371e03323cdeb0c51607dd462dbbcb4d2dc5307e3221f51b53b337" - ) |] + ) + |] ; [| ( f "0x8bf082a5150f046d302e170e1588c6936bb78acc85d53605b839d6511f09212f" , f "0xafbd66a7494020d60bb908eb3d37aa06c441d6ea55d7986e21fa697f350ce51c" - ) |] + ) + |] ; [| ( f "0x78ded9a9420d270536b2b0d66e3d8c27adf32e34d08e3ea5e4b0fedb25a39e01" , f "0xda751ff359c6fd4f91455573f5a2016bb9ba91e27832607dd79ef7ce9623701c" - ) |] + ) + |] ; [| ( f "0x127882ed9845f135a76519d0220a92d925d35425e10fcfd7aa3975393fa10d26" , f "0x4ac63dea6dd5823a3277e7bfaba005230263e11362c672e1bda7931a9201a327" - ) |] + ) + |] ; [| ( f "0x03d44bbdcf10ddd340b75c36336d67060e7fce9ff1ac29d12dd35b62e8475413" , f "0xaa7ef51810a54c49f9acce6208fe0c9e85f0879d9f9ecbe4aa8b9d17057e790d" - ) |] + ) + |] ; [| ( f "0x990eb472722896696d1c2f79f1c08b4afadb1404c375b5d7ab285345378eba22" , f "0x8a482d96eaeeb24b7cf09c25dbd040df66f39b5dec1dcb249ee4727d7c2f2403" - ) |] + ) + |] ; [| ( f "0xd43ffe12537e3ee138c1fce7688b946f1829660a008f3b5537fe2523f8078412" , f "0xfd0d1e401d5b208d674d8333e5fae11e7c907a67e480a1b4ac63129e957fbd0f" - ) |] + ) + |] ; [| ( f "0xc56505fb5bd7219e5ad0505693b95831742d5e3d64caf6b9508422e3964f4415" , f "0x9a0492e2f2f329b1aa9923cd7ebe39bf51751673f46a3f82e54911ea2d6c6421" - ) |] |] |] + ) + |] + |] + |] let pallas = let f s = Fp.of_bigint (Bigint256.of_hex_string s) in @@ -8324,7670 +10005,9222 @@ module Lagrange_precomputations = struct "0xf3ea7359f0d7b7ebc106234ed8dd59d753a344fe432d455c00bf9792c2fe1834" , f "0x2bdbb0fb56646ec2814c65907e82d089eefd0435b558b747d2806fcbbd5be304" - ) |] + ) + |] ; [| ( f "0x7b67dc7a650ed63b15e4518bbeddc400ffbadde09780a8f2836200bf5bc3a505" , f "0x38d109ac93dd66c4eaff3646857af1df7b131648767b2e96674f708831579507" - ) |] |] + ) + |] + |] ; [| [| ( f "0xc7d1952d66997de3b81578156d3303c951fe2982082a9ebf252f430106b66d2f" , f "0x92187b611245c50944ee57e8423f458c345853d0efa58654cdba177c737d5c0c" - ) |] + ) + |] ; [| ( f "0x4408f5e70fb9cb0611977adffee8a164a513f9e6f8ca57b2d52b363e576bac2b" , f "0xc55a920996523ed0ba1d88e31e7e9090d7d5ae023f6eb617e35deb9dbc8ded0e" - ) |] + ) + |] ; [| ( f "0x51d190a5d897ea65c8113862aa5f5ddf6e556b594fc93658d590dcabc8aa6b21" , f "0xf428459946e17ee56b2ff55de480f4f4cb9aaa1e2a36c65110ab1ea332fdb313" - ) |] + ) + |] ; [| ( f "0x4a84d527752ed2b5e677ca8ab2bc70c0435c4822a673a59f3a08cf6ff3cef926" , f "0x6b3a104ceb2c885e92ab8813292ffc87656ff732f789d51825c6d31c5ba01e14" - ) |] |] + ) + |] + |] ; [| [| ( f "0xa1628b4eebaa4a82fc8bdbfc228abcc03460c4cda8b90748f885816b0324c808" , f "0x278bae326bdda88959279b1883374757977530af716f9318efa4db851adab10b" - ) |] + ) + |] ; [| ( f "0xe479cd1ef1cc4676a65a3b3d33270bcf3f117335ed028fe1f2a2f03c702a6305" , f "0xd46c82dce8b25395f8d9d0f910c9d72facb08a610ecb267207ce52747a863206" - ) |] + ) + |] ; [| ( f "0x1d386b7bc5d5016dbb4b2b6b7604fb5341b9b97d6e98066f67c291c9fb81100a" , f "0xe46afef41e087ba871012dd88db8cd438cdb6c8b12ee6896dc1a2774ee23171e" - ) |] + ) + |] ; [| ( f "0xc0f76253f38dafe4a09f70e9017c217df234a163d7c20a1abd06512b96a5cc2f" , f "0x7f37efeba5a149125fea076df2ccd9078795d157a5f488601b81cc5d033b8d0a" - ) |] + ) + |] ; [| ( f "0x72021c3596d32b0541fff669ecaaffdd87d042cc029293f76536144f459c9116" , f "0x8d627611f33287a582bf16b69512e3df901e298876be14a390efb4e43f773f3b" - ) |] + ) + |] ; [| ( f "0xa361516a64f7b1d662a110bc3ee6b1dcebf7cc7c6f7c3d54c8100f49d435a10f" , f "0xe4827eb63230b07f19c3d9073046e73428aa0c8a86d06dfd19230c5057e36b24" - ) |] + ) + |] ; [| ( f "0x4f99b60ede47f4298579d8ab4eb511ee75ea27d9ae24537d3f4126aa0c9af12d" , f "0xc98c1ee0288f00a15eb7e39ca90fa1bd2939a6b2b347b74b0be2f155e219ab0d" - ) |] + ) + |] ; [| ( f "0x0f9d5e8de37f51076956e8f7751c6432df9c39a275511daa123c903ca0b0bf30" , f "0xb339189e97c50d6f299a50cb71c4865f4063516990e1bd38f3700cb4a9d0e033" - ) |] |] + ) + |] + |] ; [| [| ( f "0x7d66ac866a82f7b795f34e7a1ae39ad76dc2bc860ed212548010764dd48a7927" , f "0x44ce4e94fd67a8b02825d40267edc38913a2a868c7e094eb49efd3606141d133" - ) |] + ) + |] ; [| ( f "0x30cf4478d00239489ee81113491eacc41beea9e3629587b462fd998305b3fc3a" , f "0xa3155ccebab7205e3eb56f2f23b61741cc3b51bc8cc25a8342175f59929b1a2d" - ) |] + ) + |] ; [| ( f "0x3231219ebb31a2014a132a6ab621fa247f5625195a1989ff79dc91d29f50820e" , f "0xb2f04ee19f916097715c273b8d30332219fba9c2d86c6135b09a9f65efbcc629" - ) |] + ) + |] ; [| ( f "0x2a97d2d238bd3d353c4307731103ca365368c39b5ca20abe95de969eddf1f101" , f "0xa64297a1afe94dfecfcc8879e4b8e199c65ec5328d49a6a984723f66826b410a" - ) |] + ) + |] ; [| ( f "0xc5ad5dc176f3c03048e76f0fdbd31bb92a105e1ab83a5c098e054746307f1529" , f "0xa44d44eabb34a3c54b5f8f25ea94fca61c85732bc505df74628a0223ea44c33a" - ) |] + ) + |] ; [| ( f "0x3ea43d2663bc08c7ec6c1d73c3f480b8826cba71869dd046d123c35db44a9a16" , f "0x4b46927552ed148b975ce6d2d416f5e5277dbcb7d4c0dd8ef32206281fde631b" - ) |] + ) + |] ; [| ( f "0xd1c47acb90b95c0d2209d2aaa3225a9030d7a2d0520e3a01c504a3bff9690210" , f "0xfa1212ecee5278accd8ffdbfb3f810bae65d0a5fd31cdad7f230d2f157aec936" - ) |] + ) + |] ; [| ( f "0xb8977182d4bc25275c999a047917a8f2d959f44924a84f2e81692d9d9414332d" , f "0x6a2c5072e14ce294c392c7d5a8c010f08c678bf4445c8797424707a5a53f0c38" - ) |] + ) + |] ; [| ( f "0xb80afb6e436796f2e3c4867db70b71bb3efc002c5c8be5f0ca4e951c237dac0b" , f "0xdc2e3e3fbb9e0db1be644321dec3c63338a2ef6c8c4cb8b9ee157958d40cc70d" - ) |] + ) + |] ; [| ( f "0x80b02557409e1ff102f2479574fb2769fb15a2956e4bb14f92ef50260e45b926" , f "0xd8bf584c165be8188e631c13ae90a58169a053502025903d96b8cc9779d0123e" - ) |] + ) + |] ; [| ( f "0xb2a280d03e6829f66feb66b29eaa5f19d20af27eeee4acce80b0605f1170ef15" , f "0xfca2a3b4d37a386a5c3bfcfc74c1403f9aa1b41765ffdf080b986bc74e97cf0f" - ) |] + ) + |] ; [| ( f "0x13ab3a2cafd6ae4a4de5f995a21ead8babd6cfc78736ac4efbd48c3f4a564327" , f "0xf72ff93a8b629ca36a7436990cd6c0742b77e017f3d78fb122093c978154b803" - ) |] + ) + |] ; [| ( f "0x48f58c19346f5d9e09058562c57a049d40100aa6758de5fa6dd968bbb3975730" , f "0xa60340136d06c542162af36edc693a7297948993066186334430c6216f4a5a0a" - ) |] + ) + |] ; [| ( f "0x62ab21d29e846942438a5080ad5846ccd8500034088c1f3dc0ce207cc25cd521" , f "0x544b314fdb544e396d77122ab39c93dcfcdbaa66e1aeaa8b75aaf37bf7aa953f" - ) |] + ) + |] ; [| ( f "0x4871af6bf6aaf941ef94eec7ed5ccfef7c18c6466fc8786a5e3c448fd1ab3318" , f "0xf376a25401309d785118b367e0e28e1edb8dda3c50ad6a3d0a21f864f2678a28" - ) |] + ) + |] ; [| ( f "0x1d59c2555a9a43eaa4dc0f5942b675083f9c74b95c1e38317deb90f0d9769e09" , f "0x0d6bfdab82dbf035f845d784b2630664d3be15105860cd0da0c0dc589e67bf19" - ) |] |] + ) + |] + |] ; [| [| ( f "0x1cec639c7ceee7e482960c39037fc3f859a5c58a6dedfd0b6d08037c6cc17904" , f "0xd9c8633531e8068296f41d71da46ca2431b7a9f7ae4a5782d8ca9ccdbbad0a3d" - ) |] + ) + |] ; [| ( f "0x45f098ae151f90eea274fdf6c8fb4917eaf76d7d1d8dca4ebf15e00e9685a539" , f "0xedee5f16417d5fa592db8adbaadf71585e7d911a50109d0e185e9e5d7420ac13" - ) |] + ) + |] ; [| ( f "0xa8d9372f8316ac361ffd7796665b949ade77965577ce1c9ebbad172528d71936" , f "0x75d63acb70f44eb45a4ee660ff6448aac0e9a384a3848bdbd3678099ff77a425" - ) |] + ) + |] ; [| ( f "0x21fb6297e9b0cb47f839531aefb48ed626bccee08cd86f42719a73b81f81aa33" , f "0xa95646080fced780749c1a8bac7591665bbf13af0bb6679166040a624fdc2302" - ) |] + ) + |] ; [| ( f "0x4f1f64e9ba97460fb8aabe7831509d83814804887fd538757f3d2a8970072e1d" , f "0x3a81aabab6ba65d0bd2dc535e599ad4ce087880f5becfd191b202f6cb833b130" - ) |] + ) + |] ; [| ( f "0x4ecd38dff0eab15dd1f824a529fc14588cc9214d8288b2316c1e1a30b6f0e400" , f "0xde2a70128efc1bd4f7095c29573189e626af0445680a07f3cdeaea4c9540ca2e" - ) |] + ) + |] ; [| ( f "0x6463957f28b4bad3a9f0d7defe04ea82b8189611d1495551923535aebb761b29" , f "0x76211e2528f3b0b4c354cdbb59dcca77783ce58b89feb5b56a82c33a52062325" - ) |] + ) + |] ; [| ( f "0x8d4fec9000444fd514ba55c19103a02b1ede602e23b28c7bd3a4a8f8c387e330" , f "0x10240d56a260a65d301373057f82ff8ba90d923438425ee13d119dca9163f90c" - ) |] + ) + |] ; [| ( f "0xaeb294f211eebaf51cf7902e433e28102581340867ffbe11698620de4f812109" , f "0x715df733cc486faa6ed4453ff1fa9844c5893fb82afce3b6faed90c1a0f53f0d" - ) |] + ) + |] ; [| ( f "0x634ad01f034c2015d49e03916a084a39f0ef69871b39e879cdcb09bb4b777804" , f "0xfe2e11f53614a345f25fdcf3de900cdaba9c5cc5ecd7f013bbf45093bb9ec90a" - ) |] + ) + |] ; [| ( f "0x95b49a100600aac3267a5f2405d5a01e7d139363c843c8d9bdf154db70578c01" , f "0xf8e9e0084840c5eb1843ade38629c3aa80eb5b726dbb1d724529e7333f97da3b" - ) |] + ) + |] ; [| ( f "0x7a9ebfa4e1017a0af49014d555469c7f32ed309cbaf2f03c14d29f582cf10a3d" , f "0x9ec1c563d5b3992124c51e3cf0738827a4f2ae319e533aa6d83faa0b4312d409" - ) |] + ) + |] ; [| ( f "0x9283488c38263e4073aa9e35cdf3fca0b848ee5b479b805174982849b6b34626" , f "0xb6ed172bccbeba1ed59b9c316213da894438800e39f5df7965cec71ea6c66629" - ) |] + ) + |] ; [| ( f "0x413525240f67147b04c122a16e661d4e2149af1a35550f8783b65a740a39361a" , f "0xbfff5a5d5859049dce882989fa3da9f990adb611107f21e8f0877e5a72f9da06" - ) |] + ) + |] ; [| ( f "0xb03ed9d416ca709dafe1f6a0e9d17d551affda14d448ad5f1fcd8b829f90e83e" , f "0x26920db2c665b10a99dc63a276932b50a5c67effdc461393be32e74a261be705" - ) |] + ) + |] ; [| ( f "0xb5b412015e33eda0beb0a050a83fb46f5e34f04a06c53c87ee3045ba2134de00" , f "0xf5121828c5ed806905830c2ba9aaf7ccb37353d4e6c9b64f4a0230dd1932fd16" - ) |] + ) + |] ; [| ( f "0xb841ee64ef4cd364843f6254bba4cc45c42642205765e8baaf15d2fb01283b03" , f "0x6b53bbd8bcaaf64ff350c5cd736f4cae8e6e257580b64a22300d6fa5309eb623" - ) |] + ) + |] ; [| ( f "0xd92c4aae5ee3f6fe9b0d17ec90d39743a0c6e0a4a648f740f17af86873c6001c" , f "0xc460d899e95cab6538ab54b789ee7607eb06681d96addadacfed7b703ae5c302" - ) |] + ) + |] ; [| ( f "0x1add4196f72c54f3eeec646c69b0c215e3606ff2a3b6335fc136dd489c855c29" , f "0xe036d1e629ca5271618e7e0f080e91a5a883ea9700ae98999778c436b288c70e" - ) |] + ) + |] ; [| ( f "0x0da80312e303b95700c1cf2dd5185eabd7a6f2698001f1555a3f6485c1dcfb34" , f "0xf6b15ca7fbfda3ab1a5d41cc53b8fadd66c2c116433b3e46370ba84cc9ed9f36" - ) |] + ) + |] ; [| ( f "0x855eccccae31b9e0099a5be281b37d914bacef40f8a1a6791baa00eb4ad47b1a" , f "0x9593ce3a6d52bbdf0eb928e18a738286beb6a5fefc03120021ed1cc6e63a4d0f" - ) |] + ) + |] ; [| ( f "0x07656ae0dfa699052ae5e8350b433c4559d8827821e834a18ff27486995e9413" , f "0x3d11b9c4f8a5fa88481a741771ac9f2a83f58fb78e381873704943dcf9813435" - ) |] + ) + |] ; [| ( f "0x938292cc3af22106b007a1db9e2812806859671f7b4a06f6e4cba6296179c93b" , f "0x167452a61f4e21589affbc886f7d56f1e5245ef649cc43b2cdb2646f0dd16b3e" - ) |] + ) + |] ; [| ( f "0x92845c1bd1a753745b0d04a0c5bb175fbd64b6b776b1170577e1794606dcd71f" , f "0xf94278e69406850a570fee52087981773c0f3c32211547e920db08b7dd0f9b35" - ) |] + ) + |] ; [| ( f "0x682bb5b24c75c1fa38a31481419eabf4ad3a252d6f2786be52e24ccfc7f57a26" , f "0x39237c42c20cc83c96be9897b042c3f922e582c38367c37a8add404b9460cb07" - ) |] + ) + |] ; [| ( f "0xcf3a764eeba11dccce9b71e9edc003be2c425c9479b93a85f48d558991117b03" , f "0xd77f9ce58f5778b297c416673f59500eeae0428c83bb78a3f4e174ccffe6a219" - ) |] + ) + |] ; [| ( f "0x6816d259bbd468dda4208445f5cb52fc788f73eb26a7897f0bd54fc1a468072c" , f "0x7147b9aa91fd91b2629e0a2015ff0be9daa1e7484d3c7dc0d5c5eafc01940127" - ) |] + ) + |] ; [| ( f "0x64f82a938a7b83f1660c1d8f864de97f95591c877bf6f38fb5332a2d78a63d14" , f "0x960aff3af3cebd0c81357c80ca4bd2f349576d4246159bd86b8355f6c2578006" - ) |] + ) + |] ; [| ( f "0x945b8ed8df545a1fc133b6bacbc6230d8a72196c7c7581701a5dc47f2c6c0434" , f "0xaa179355c4547ba0b01b0b15f0322ab4a2015912e66b3cec70fb41c036eb2300" - ) |] + ) + |] ; [| ( f "0xa930081d452296990d8b554daffaaf587383a0ff523d8888bd0362b984c2d803" , f "0x233f4298915956181cc2df1d5a4212e8a19f70990bc0874e2630bd0b17904214" - ) |] + ) + |] ; [| ( f "0xb39af72e0e47177aece27e100127ae94a1e9647bbf74a217409997b666ff1c12" , f "0xfa2af9e60d8558e0019c83474fadc755b1adaacc1f4adc4feb36068d0a7d5e2b" - ) |] + ) + |] ; [| ( f "0x4a3b3e1cb2e1af601f1fa88ce8f5aeb634a841fced84fd6fbac30798b1356303" , f "0x60f53ab69c3bf2231b46864490deb9be985ae59d8f19430e55517e6d39d2c015" - ) |] |] + ) + |] + |] ; [| [| ( f "0x248e73feaf40a59db34620870e0821006f064a32d99b84861262d858332cf537" , f "0x556e3aa09496ad0384ea45f1c9173c5b366554d979f60a0adcfc5e3b3624cd2c" - ) |] + ) + |] ; [| ( f "0xd6ee58e8260085fa16175dd707af7812e79b224f0d1484ca17eef52b06b65a32" , f "0x208785dba9bc5553bbeb8f0b0b54a6c5a4b418ddf327c760b4e56a84a0966b17" - ) |] + ) + |] ; [| ( f "0x5377a50715ac5857ad5054a6644a9fd82398d29078ec6d992dfccc401ad0f605" , f "0x15c18df28811406920a37f5a0ac52e5f7a21ffa9d28e4894dde0a1b1c0c4d701" - ) |] + ) + |] ; [| ( f "0x6069a26f0b886895a6c4cfb531773419b4af6f99311ff4af0b6d3ba1512e4f3d" , f "0x72442bc2497402606d24751b124ee38ef4d748de7818187feb9c6c6edb647339" - ) |] + ) + |] ; [| ( f "0x9e319de963f9a2b446e208b6a39627527a55c890ebec0ad8c8ceb0a504152f36" , f "0xa11b211b485dac0651fb6838ca096172e5f48f360c932de0b2946f80fd6db525" - ) |] + ) + |] ; [| ( f "0xd668626331a8a47ad4d54f2f33af7e8173eaa8e0b7ae8bd9170b1b909e860732" , f "0x70928735d4fcedc5defd7c001a6cdbb0014f49ace9325fa832908923a959b22f" - ) |] + ) + |] ; [| ( f "0x44814c98915d2b4b4057716e6071e88671f0e75cee72ecced5bcec9a8635851f" , f "0x08f0efdf548eca57c13193d1f1b9335f0aadab1f9c84deb4e7db509faeb82502" - ) |] + ) + |] ; [| ( f "0x4b0a8ced749c4b7e335d3399d5e3c482328752fa3856f86b8c1df3a420340e03" , f "0x7ab08c70d6d0fbb7e51f6dbb0dc4a095726ad95668978218fa04e88117f85e32" - ) |] + ) + |] ; [| ( f "0xbb839927ed986aaf6f357ee0a431248e55c176964f09044c9a70fd1e0a2a4d1a" , f "0xf755daac980b167c7037d793083d70e1d0d94e423db847e67608f77543a2822d" - ) |] + ) + |] ; [| ( f "0x281dd6b63053d92c034fc695ef21bc90e98958bbeb88f68ce5614745df12512a" , f "0x3092a85ddc144dd5093d2aa1227e326948b33ddca7d5361c4ea2151d822cef25" - ) |] + ) + |] ; [| ( f "0x78593188df6d6c11679d572e8b0dc51a1ade809ba5b440a11262b837f5fd001a" , f "0x8a4c7e49cae605c516da42297e6b9da083113986869b7d76144b547b7d86290f" - ) |] + ) + |] ; [| ( f "0x51dbb1806111832b36702908a734b1a1e6269410feaae930c66300c33766de0e" , f "0x04c99a9eda5b4ef75d0fc65ed5b45145b4868f1229587d8862e77d20caa23b06" - ) |] + ) + |] ; [| ( f "0x155443f445d7dbfbae63d0b501d409418363b4f029b6fbcf4462cf44f426ab02" , f "0x3d8c936621f988fd2ea9d5e344754a470c73bc37e17fd46be0674351dbc10c21" - ) |] + ) + |] ; [| ( f "0xc7cedb29c8b09ced2f37c86c8743e12601d26fb6f8c58e08aa364a6898d7a60d" , f "0x5a0b724e55b2df4ea7222452e39ac661cdb06faec2544fe65442d0733a42cd09" - ) |] + ) + |] ; [| ( f "0x919272a17c84717f343419c46d26ba143e255b3ce67c1f915716f660a7173b0d" , f "0x1753f96dd8fb6f748fb571cb9548670e338c74ed2aaf336494d4de869d29b538" - ) |] + ) + |] ; [| ( f "0xef3fb9bc64a965214465e7e0b775599d9eb6c9b406d68627d8d471c70c07b800" , f "0x5db0af29756590a2dfbcf651db24546a536003fb75298fa0a7f7241e55bfdb17" - ) |] + ) + |] ; [| ( f "0x8e28500c97e241c64123d0020b1f7464d1f03aa9c65ba01ab74fba2df0602f1b" , f "0xf7d4a45500c77b98c07e769dd8ed5db5d3b09e951b0106ff0e0de82725176f26" - ) |] + ) + |] ; [| ( f "0x87b8fda3b454dd5580ec6715bfd17e4e106f45feda18e20c8b54d36f84aa360d" , f "0x00fe1485dfa799634a1cd8ffedfbff15758c103f4328e5015bc8644104765906" - ) |] + ) + |] ; [| ( f "0x7cb118d81dc68e37e62edc16c38e18d3016596a2061a81759568a219e0b49a0b" , f "0x137cd014cde171e0defd0cd533ba1ac04413c906f32e935f3acd99c5da9c4f02" - ) |] + ) + |] ; [| ( f "0x1f01e3e9c3e44f57975438496bb64f6e34835556ba33dd1358b77824ddbfb02c" , f "0x9247a7ab4807416f436c690baf3d539c1f98fd8c886a9db8c1f6c3fb5c289914" - ) |] + ) + |] ; [| ( f "0xc8c290893df25913b1c236dfe372eded1b0be69e93208b8e799e809b36589507" , f "0x296df20f1711923fe733224c83f4e31f8d339b183658df6a88d76dd719e3760c" - ) |] + ) + |] ; [| ( f "0xfa1d83da396efd181f93a05206907cd170bed017a6bcb6dca9eab76b4ff02b25" , f "0x60bbceb0cdf6167e8161a48df22e814eddff517f37cdd8e862a84b96fbd69f26" - ) |] + ) + |] ; [| ( f "0xe6b90c24a04970c0b4539010bf944f2bbdb7650a08ce1ea8f0c50c160857ee0e" , f "0x9a886ed6ba96528a55e89356970d9f0bac6fb2edcb0a09a3efee5dc67837dc0e" - ) |] + ) + |] ; [| ( f "0xc822b67fcb74fa9ed0a768dc6ac8571a7bc730d0ded01d77183cc5b67d50b109" , f "0xa39673032dc94520d489284c32c41dac1f537644267b5b5dacd505b1e131ad09" - ) |] + ) + |] ; [| ( f "0xf6dbb985cbd343d7f0ff946b6940c794b2afd08f20261adc923fff1d3b73f60a" , f "0x043bc5c90df88020bc086b83bbb0ea9c2f18a5246009227a85d5aa595951dc2c" - ) |] + ) + |] ; [| ( f "0xd18309b21e2e28eada4f07fb68b0a21a0fdc3397b3bc26515e23a0ade3098d12" , f "0xa7bcd5541ab34310c4541854abf1ffbd1dbf410b404230dbcdc52d0d80f93a2c" - ) |] + ) + |] ; [| ( f "0xfc6874a7e852833435054e1f1485a66a8d3b2ac62ab332c5bede0a3585c9d30b" , f "0xb3f9c209fda851979329bb3effc24134dba8a118bf5bb8fc87f96d4964c5912f" - ) |] + ) + |] ; [| ( f "0x0fd1716321c8b20ca298d2c521b5254f65d8b9fdbeedc80c7f64cad57590fd04" , f "0x433f2bb74869d4b4d686af2ba661133c8c613bcf3b12eff0167f2ca12fdf1c39" - ) |] + ) + |] ; [| ( f "0xc0a4639bd490b83da031c31f2eaddf9aac59c18f38dc3069a4ba283f63f77f2e" , f "0x93d4574dc62f1788d2af603cefd0e3b0899644c196c19babb730ff8721638500" - ) |] + ) + |] ; [| ( f "0x40a30f22f9b6aef7e0c6134ea365e255cf5c2c15397a97c0ad25fa3d50eff629" , f "0x078bea20a76a702ea85078b00d36018c96b435519b86d4bd31768c134b2db833" - ) |] + ) + |] ; [| ( f "0xd1570c7a9fa5c7e4fbe23423caba507b97847053eb6e359ddaa8fe0ed5557821" , f "0xc58a2083852081d1ddcdb782b9cfc33952d7004f2079eef39540afa93c98f707" - ) |] + ) + |] ; [| ( f "0xf24774725e3e7dbbf9b79510166ebc14b5a8c0bd3eed1a4c1f3a23cadc1a9220" , f "0xa7bd74b7e08058167b0e6a1b5dd5a2bef25ec72d9ed45606decb92c773294a29" - ) |] + ) + |] ; [| ( f "0x6e7ab05ac9515862a618f6dfdc8cddbc586994c6945c5776cf7b5a67adc3461d" , f "0x69055a40e6b48fec8551a7bbeccc694cf715d0d065f640d0f7953936955c763a" - ) |] + ) + |] ; [| ( f "0xad0a76d3e1ce115f06360c72e1aae0f091d9df1559e91725fb455c05cc02dc0f" , f "0xe14ef230323ea38bdfe983f7209e9e5d113ab0fec528845d146cfd4f7b5d731b" - ) |] + ) + |] ; [| ( f "0x61ea4595e98037d6aa6f5c1ee9afa4e3d907112c912baa87edf2356069abec29" , f "0x6458c6893ab27f7cdbdf3cfc1f1a5ec40614c07f9de48e91f83f5afe7cd6c32b" - ) |] + ) + |] ; [| ( f "0xd5d9196b74442022d68cfb872a5011494d13f6abc4a908ee555fca621eda4034" , f "0xb17b43126a2d00b6a958d8e975ae11b6d76cf9094bb546f6b480615ca0f2780d" - ) |] + ) + |] ; [| ( f "0xc0d6ae3a0c01ef67b387c9204dda44330dd2030fc2626980e69ada19e5e9b602" , f "0x3d20cf4adbfe2734bebce993e78ff72ad6f4dc199f4254a1cb817573a4ad2f16" - ) |] + ) + |] ; [| ( f "0x74a99bb8770ba52030dfb34ff12bd79c1c75c7ffd3590c2b32bf27885eb6b613" , f "0x24f9598bd1dbed87b4b4c2ea2784e4d2ac26df2d0ff699442441c6d6a44f260d" - ) |] + ) + |] ; [| ( f "0xc2552c5e603fcba0388e213280635f573a18db1ed33e92813616d3c434b79c30" , f "0x43459bfebcc01136ec52f7db0b5d743c30479f8bd65a9b9131ee7b8373d3a213" - ) |] + ) + |] ; [| ( f "0x6069b3035c24bcd6f872432af02223dcee230c06deac7269433a395c33fd4100" , f "0xb73917cf19b49d6afccaf9268b7c096311e655acdca9e73fc712be90ecb47a12" - ) |] + ) + |] ; [| ( f "0x3d5136d7cec34feda3a9e69e3f027795dc26459942968ae843d201175896812a" , f "0x4cb3575dc189cf3419fa5684e024f0d18f0fca5cc0ed643652042f8d74d29632" - ) |] + ) + |] ; [| ( f "0x563ccf216e45b58e594bd67d1532f0a3b9e6a0afc5f32be230b595bf85b40b03" , f "0x3d3a1585746d59636326dc32a2208086f34894815111b4268c2376df6bc9061c" - ) |] + ) + |] ; [| ( f "0xf94f7c7a3701fd6c76a13c363498283e2120adb4ff8717b5682cc653cdd4f93d" , f "0x170d15a77369af25fa374fc043527b9fea29850d846a2411c22044ff7c69611a" - ) |] + ) + |] ; [| ( f "0xd7ccd3660ba2693e9814bc7ddaaface278e93352e662437b2f6f2529146bc83f" , f "0xa3043ddba1da17aa2ebb6f87060826a65b88c316bbd7c60a762a460a1479fa18" - ) |] + ) + |] ; [| ( f "0x5fad066781c63bbfd0963591380064af95b03d50aad9eb14f4beb9c3a1b6a924" , f "0x650b5d8b34d083804a3dbbd25a5a9139be7831f92e7d196fd4e2973337c5ff09" - ) |] + ) + |] ; [| ( f "0xf3a4aa08ac76bd3a770ac269e670c07c3cf8c05f6fe4d011cf920257b898640a" , f "0xc51f97b477be30166ded09e9786575a28c6cb94840045968c04eb2aa2e5dad15" - ) |] + ) + |] ; [| ( f "0xa8897cfd828255f6b51af7765324fcc1b17e6ad7209a40873a24a679f34a453e" , f "0x96d74b8190361eba4855305845e72bcbc368ed4c725965a69f6ae77063d5b639" - ) |] + ) + |] ; [| ( f "0x5a41f7224e9b61e44e5bd6375378768d8d4e0ba03a3587f04d80cdd53ee24507" , f "0xa61914676166205e5458f09ac36ec9a739dcb24ec4c99677a3f776c5abb49c3b" - ) |] + ) + |] ; [| ( f "0x3cc9f9de4efa3b0bc2f45b23a9d69fffc935b8d97b40310653f796abb83c7637" , f "0x38ebb475b51d10f60648ac41f797eb3f880a679cad10fc6cf06c1cdbe609ed25" - ) |] + ) + |] ; [| ( f "0xa9b5baf3ddc3d94b3bdf1d40efc9a1e246d99f515bb13ca746c51d7893ba6127" , f "0xc34f2fef690dd1a748ed99339acc341eb915b13772ed61a154b56e1bc870f838" - ) |] + ) + |] ; [| ( f "0x3fb1503167708fd667d9cd46ac5334cec7c21ea2e53fcfc14ec647c3cac9451c" , f "0x944687f95b232fc770e3030b1c088464ac12a164f28aaee60f86b6e8022a3410" - ) |] + ) + |] ; [| ( f "0xf941e556dd84945eddead511404d22f4bc7c35298f4b5d4a98a14f203a348936" , f "0x22a0c170b5ec19b67b1865c40b5266ed5147225fe28e7514448f22c1c11f231b" - ) |] + ) + |] ; [| ( f "0x200b2f40340d6df4152d42a4bb1f4c64d30ae8049bc3edf22d5c7f793f5f3a35" , f "0xfff56ff930c50a98cb302bb30d955ef7a4c1d665a6bc58e8a2e95cefc1b84e1d" - ) |] + ) + |] ; [| ( f "0x37ae7ab3ae9ab311959faa442f8cc094d287b748f57af875b704f24ac073f232" , f "0xb9499bc8ff0cd4cb8a9498f9293ac28c4a6e2b19a4439d3008fe3fe400d75e20" - ) |] + ) + |] ; [| ( f "0xb93d768bcc998ae13eb4a2fcea6d2d2ee67ef0d2f93a4bebc846a26b925caf3f" , f "0xca7ad62c29e5b32cd641a9369fe2c8d75523b750d071ebce39945b4bab50ab1d" - ) |] + ) + |] ; [| ( f "0xece39a502cc6d6a8a1d52e20cb052bb9cc1d3614d2b525f2095c8713890c2c0d" , f "0x8b7a924cf86c821545f3d7f67399eefa15ae4605705bbafdd37e1019a5f8cc03" - ) |] + ) + |] ; [| ( f "0x9d1fa7ebd35bbbca5b204c81076a040fffd91caf4c4ad740c9ff62884c73ce16" , f "0x212c7ee300184b20a644f33231502bed5e5799f27125a7f1600017ec30650b1c" - ) |] + ) + |] ; [| ( f "0xab8cdd3cdfaa96f4597da4e51e458ac84fb529d1518d087b7bb8af43693d2b38" , f "0xcb4e51ebaee490dcbcbe5a05247acc1a36b39807f9c5ca7f6926a4267af68e00" - ) |] + ) + |] ; [| ( f "0x18c92b85f3e83a1cfea079cfde7d00c5a1e3a07938c198219781faed20681618" , f "0x21d9ce2a5d777b6e58182fd0a6f0ca2193d0711f2b297349b84c88c77ba0c12b" - ) |] + ) + |] ; [| ( f "0x211b1ef879b043dc817ebcedfa1c8eaa5aa36edf1c4550d6f1df66d7587f1601" , f "0x01243e87d5f2a7d08ec79bc949f938a942990d027a5da06a3e2ba0d3e51d1a3d" - ) |] + ) + |] ; [| ( f "0x233dca039d5e9c042ad6c2e597fdb4066d1bb994351b7726aca3bc73ab38790f" , f "0x52420c39da33707f6314aefb268c4bba31b9fc83541c97cbfce4303feb40950a" - ) |] + ) + |] ; [| ( f "0x19ab9eaafad31bde7ec259f4b2ebddb44b1cf83cb8eeb2351f280ed0aa81100c" , f "0xada38ebefdd0d7e47367e491fa7ac5159b1337ad354ea3c143a91534600cdb37" - ) |] + ) + |] ; [| ( f "0xeaca88a526bf0a32dc78e81ccbd820ea8018d373850fe70b3063f8ba96e6ba16" , f "0x7ee4d47f5039988e033a33fd848979f8bf17e842f414ab9b0765d59fd02f5f36" - ) |] + ) + |] ; [| ( f "0x7905bab0579aec0f746f126b4b4da2199727c812164fa5e997ffc7c3ddf05201" , f "0x7dfcb78297e745ef94810b5d4086c394a5d8b472073212096acf99202ca4d819" - ) |] |] + ) + |] + |] ; [| [| ( f "0xd83f4197e72111574544a36501d5e1ed7121ea6ea589205b53b9d76a39d1191a" , f "0x5c23d40c28f0249a1db4c74b2f12087f84ae997b20caac0b686539209d8cc239" - ) |] + ) + |] ; [| ( f "0x9b18c90e6a3c4335c7a7711bd6221939732fffd30685fe1bbc7f517338b4f711" , f "0x706bd90a6c06622b087ed5b846a701a1df1077c3d742667586d7af6b9b3de613" - ) |] + ) + |] ; [| ( f "0x032ce3f04c33b21a93996b16e96b41f1aa1be6a53d3e5eb8c5ff637d4b542618" , f "0xc969c59790e52b0aedfbf63d38d803d72c67d040a3647dea9356180c5f3fa52c" - ) |] + ) + |] ; [| ( f "0x8372e823465fda8608b11dea71bed0f870eae6967b6acfa43930427af0cfd320" , f "0x9559a16c297b7865ebc8deaacb4e1289e8719e263622653565257af505939907" - ) |] + ) + |] ; [| ( f "0x145dd850b745b1aed8017e3a6f8b40bc54bc67fc8ecfb5d684c2d74d6951801d" , f "0x94015d7fb65f0b1ca6e66813fc138352aaf2ebaed207899dd4a2fa67790f853e" - ) |] + ) + |] ; [| ( f "0x1ddac6f3565c41e611ee596eb5d94068822fc872ee9201338cb21247d66e581f" , f "0x0aacb096eb665e05f2ed0f4064f1c7e8b8d1dbcf64422ad54ac2dd39c035750a" - ) |] + ) + |] ; [| ( f "0xab3556accd40cfaad6bbb1188172cf6f352e56abf3d069d0e3dd266dc799a520" , f "0x7205c16f71d522ddb7870eeeb4e89b66059a1372d36b7e4bfbbb19870fcd0a0e" - ) |] + ) + |] ; [| ( f "0x97842e86a29548e398051afbcff5615f43fa72e7627194782687a405fe8c4439" , f "0xaf2967e00213c0e6c08cc87c02ffe37e6b5ff27c603d5bd3372df7771bc9dc28" - ) |] + ) + |] ; [| ( f "0x8bb77d9a3e663e7eab3b1e353e6397099b78ec64b2931bb52da39f03cd00a529" , f "0xfebc87c8f9a6fd5b0e334c25cca1c2f5d26da1efb18bf3d006f723d5275f5728" - ) |] + ) + |] ; [| ( f "0x9e0e1fd59ad9e8a6aa9525dfe5878c0dc024143ed9b3540dbb899694adc4cb25" , f "0x06ab13950450c1a274d3e5d64a09b8ccce91cf8e1a80f4f7ba93382834ba8b34" - ) |] + ) + |] ; [| ( f "0x967b17898427bedf81976ac1f63378cbcf9d166f279d9a6de888f2b124463e19" , f "0x0f454ce2a792eeb2e9779df16e0896b298b6237e9f4d771c409245ea540f1e2e" - ) |] + ) + |] ; [| ( f "0xe6d49ae2ec8a19eb2a80adabc3e3f28cc305de6375392e645aacd4b8720bf830" , f "0x1e95a8157386f84fba94df8668f67e5568c0ecac2f454c4ece08d5196958570d" - ) |] + ) + |] ; [| ( f "0x29080ed79f672a7dae58847bff52477d219f46c4a0dbe8d2add6cc3c94b67303" , f "0x934b9093da245d8a4198308c5753e916aeb527ca6adacb52129b673521147c17" - ) |] + ) + |] ; [| ( f "0x2e3c3483d83cabcb240346654a52d6347a681d8595b0b9fdf211e5c84f91be1c" , f "0xc12a97993cf3e9c10b8e7cd21b1e4cabbc938673557a5022809d16ff15d2db1e" - ) |] + ) + |] ; [| ( f "0x4bbc8c7470356544917172927343e479348fb976a01ffba9af49781de82ab824" , f "0xb817934be99172e8d30d4a883a69a78664da2457478d2cee201c5bf435653c12" - ) |] + ) + |] ; [| ( f "0xe278e0c9b25eb582ba530d0ef17c594e32a062c0f252cb50ac2df45ca12b320e" , f "0xd492f7d50fc21d31a6337e1dfceca58ff13b1e154b18409d9131235a3b849f20" - ) |] + ) + |] ; [| ( f "0x2c4c92e50380a256df42b4ba46f3f6a4593be9c37c6f39b746e3d70f86b7122c" , f "0x01f402ec80b314cd98a5a53c945f2ac52ad75ac24be229dbddf04ac94dc1fd08" - ) |] + ) + |] ; [| ( f "0x813d55537079d82b260ce9e9cc63e19c3ce6d0f1d6fe7b776eb1339ff9495618" , f "0x159a4a35fb711a068d4f382105aa08872a33132f7712550ffb893c651ac84e16" - ) |] + ) + |] ; [| ( f "0xd45752a4db2c78ca0d6c3493caa75b8f8822e68ed16b02693242bae65ea18a26" , f "0xe168fc926f03a58665e110ac6454426b31008156862a65ea2d1600c1cc204339" - ) |] + ) + |] ; [| ( f "0xbb969da34a106db0e1058ecd5b95f0e3367b4ab24df704f860179e4dcc53a317" , f "0x9a5f4c92cdd642dba24fd92216cbc43c87fcc03f949c4b54f1626162fb4f4d39" - ) |] + ) + |] ; [| ( f "0x5d2d78ac08d08f5f8e8700099e43513ad21e0612c7bce9f4bcc6581c25831531" , f "0x421f9dfc7cee4c1d497ca09672b9ae3dc5eb936b5e9365d6d3ac980bfcb8b518" - ) |] + ) + |] ; [| ( f "0x61d1ac3e26d4105ddbb0770ab69245d0f88d6a454220c735bd16b9e142b9922e" , f "0xe367339495c862e488e069d9fc69c84de3abe83b80b5b40552c7353371278209" - ) |] + ) + |] ; [| ( f "0xb3acbe7c4e26fd0cbc3dc40ea6657b0b90eaaf9563fcf6c5de1c06eb40a6e63e" , f "0x0d23f781df9beb21ffd269649b6b02d2628d93afe3beede2c09e8a51901a2b15" - ) |] + ) + |] ; [| ( f "0x90aaa711587dc483ee40dc309a46a9326df491cc36441ca19e34b4f552c7fe25" , f "0xdf841aeedf93c9aa0f99f8c0e83a720da69cc3e8e5890140cc56581e73bd9037" - ) |] + ) + |] ; [| ( f "0x3820d27ad9efd1ee3d400a5bba09738315a175e3ecfa1598e25f5e1a4baea22e" , f "0xec32fc425a21d3f541c6372260d66e0fc445ebf97997492da8303e74cc9ab032" - ) |] + ) + |] ; [| ( f "0x23398a5d83ba29148683e40add82c5e9950b31adfa6d3de734677688ecc8a703" , f "0xe76138146c2f6ba82b095823ada8ba116634359eec8a9a6274f359d050f5ed2d" - ) |] + ) + |] ; [| ( f "0x415750ebc5dad48cfbc6052b0cd015a9fd40595a968828ae0b4dee72a8420c09" , f "0x773c74bc5ff70745582d51516dc8a2272c66bd79ed80a390a0c901369183192f" - ) |] + ) + |] ; [| ( f "0xfd505a3b10ea42a4a14b93eebb365d994d8fdf1519760f7f396e94e75e9d6b37" , f "0xc919a98f322d49d16cb6c2b8143f64c057549cd075fc0c7554726bd340b29e04" - ) |] + ) + |] ; [| ( f "0x7a30d3213382befb17a4971c308fd04247476cae9d0f741352b6ad492253f70b" , f "0x1e2da7732eee609817926296476ecc78668d2a5aac9e43231864d1e51ad53028" - ) |] + ) + |] ; [| ( f "0x941024c9f669845f1e5dce1b5f766334e1e599e94ade7244dc90f9a36a615f31" , f "0xd812e7718bb8db9e95a4ee9741178f597633aadd78559c7edabc36e853c09039" - ) |] + ) + |] ; [| ( f "0x3a6fc17eb38f4b946625dc33979ef17c3352285303cd64462221d3a6aeab8807" , f "0x098a410603dca3442644a6af42b5c20243607799bd2870c958d4fe36a8b9e431" - ) |] + ) + |] ; [| ( f "0x2418ca2d0810f42c025b7ace052fdfd0aa7d56f4d8d382988c2d6837269ea227" , f "0xf804bed370159734f835fb4db15ff9e378df52fa119ff36b7e32247f7db87d37" - ) |] + ) + |] ; [| ( f "0xd23d12e1f3c0c91bb5a1e55fa67797afd1af15653f3064d9c567711f36601a2d" , f "0x2132a9965b4827b6e7ecbb651f99cdaf1571b75618fd093d0d8feb346e5cc50e" - ) |] + ) + |] ; [| ( f "0xfa8fc314360cb06206efc0eaa759121fa06f36f6b9bf23f1ce0a389fe6b1d71a" , f "0x4b704e9bf6ce8adb28501c84f399afdf59dad77efba3a386de0b9d3396534b0d" - ) |] + ) + |] ; [| ( f "0x021617372527b0872419aa34357f7bd358adc2557ebd66f65f1136fc9833ce09" , f "0xbf518d425b2b5c6f9391e376fa34fdac1e9c8e398124e96a676da3791a6d440a" - ) |] + ) + |] ; [| ( f "0xfbcf318db23bfa5e9ce5288944c0ae4b9dc6d07aa52ee122dcc75cc37af04417" , f "0x5bd54828e7a448db70e4f154f541f60accc407a7cfaa6936b0d122e4aad62c2d" - ) |] + ) + |] ; [| ( f "0x1d3bf8613ac912bf2e565abbf4bb6aa430c9f668110d7d04ec01ae96a25e341f" , f "0xed000cec881c1b74a57b14e0420a0a7668f3efcd050ff0b04dc18daab5709e1c" - ) |] + ) + |] ; [| ( f "0x52aedd6d20fa83060e20f728a8d85a23255ec2d8a5c2a476445e7dcbaaaadc01" , f "0x8df586d1e6353a407e6db121f4f3208999c048348e37787c914a4186a2bd5424" - ) |] + ) + |] ; [| ( f "0x82532a2759e48e01ddaff68d58d997479f18f1fa027e43d7afc8ec13fad7e700" , f "0xe573b455573550591537e1921bb16652e8df736273ebd8308a37c19d7d7a8924" - ) |] + ) + |] ; [| ( f "0x88c9792410fd08d44ad000e957fa8cceaca474f397d24702e1ecb4bd1cede328" , f "0x37bb756a90e8629410983d7ae81cbb3dabbb6ed9ae3565e50134e5edc8303516" - ) |] + ) + |] ; [| ( f "0x4bfd5f6a6e27d2250e3c44f2605624dd4bec29532004abd219129eb50371992e" , f "0x8fee0cae770364c5c5672066178ed9ac66d4b982e623b357a9366f434cb8ba22" - ) |] + ) + |] ; [| ( f "0x7448790fcbc1a1e4faad3141a37b763cb6cf36a22ee828b30e6fcf0e42705803" , f "0xd70f546a08a6c1a081e7789d8b9162564467c4dd0ee2fa5ee8e12cf59495580c" - ) |] + ) + |] ; [| ( f "0xd5f26d73ef2dca1eabfd32cc7c6ed44b5d62448fbe9330f704b53c8f99f85f27" , f "0xb1db2de8ea7b439ebc7edf3850d275cfd2526d202f0c1cdaeb67f10f27ab9c3c" - ) |] + ) + |] ; [| ( f "0x5a7bbac1b8ebb390f24e8c55cc82acf829ccef0a4414b645fa12b4d9b11a3331" , f "0x8c277b7e12ab24f59feb49a26025d460f9f09d82a090fd167de491298be03737" - ) |] + ) + |] ; [| ( f "0x7bc3fa011f484c8ac28a4b27c962b9cff8022e3f03dcdd777a9852938206fb2a" , f "0xc523feb73afdf95037b9735d36832821613b6d92799bf42f56a0af783b4f3739" - ) |] + ) + |] ; [| ( f "0x4febeb0a84a017442b227bef144ae22272897b529bcd124bcc2e394fa870cd27" , f "0x55113be6dd4abff91ccd4de259d4a5d7a997d2bcc1e62712971466109ec1ff03" - ) |] + ) + |] ; [| ( f "0x9f66e443ec7b8fcc86f9278410c71a3b9a52fafe1141426ad310a827adb69e27" , f "0x551374d8accf19eb7a4a192f468d1781727426196e6d85feb92d573a190ca122" - ) |] + ) + |] ; [| ( f "0xda77ff1f0eafd6a497dfebb9eb3c752ba46e8ec4f7ab952b55c79de85a22cf02" , f "0xb253d72cef06bd6068078473c742e4b3f7ba3aae1eaa83913a10d8e31729ff35" - ) |] + ) + |] ; [| ( f "0x83bb32e185390fbacd89b4a2c989186aebaa9621b089d935e595072d37d6932d" , f "0x647f4efbd0c8797e0ea921f1cc25334e6211d2e0f757a87e59447e00a3b1232f" - ) |] + ) + |] ; [| ( f "0x7fb00df454721531da5bf4124d17c39973aa013085dcb1b4058fc0be19faa532" , f "0x4fb6917cc5182df350f8bc2914a27a0094d5ebe3c00c33a87acaeabc0e93290a" - ) |] + ) + |] ; [| ( f "0xfa58279b181c0677c6cc7ce3510fc5a81336f9f2b5e10a866cf984389bb7a805" , f "0xf6e81193677903da92b4965e044f5702ace12c8e8e86963c728938eff9a45b07" - ) |] + ) + |] ; [| ( f "0x0f324e08cc2da07497343d4773883d1f62be7ed256e5e3b4e1d8dce3ed8b5607" , f "0xcb6bf8d2fb72fe7b9bfeef979ed0f245ea4da3512ae510ed8779bf583065c403" - ) |] + ) + |] ; [| ( f "0x063c01c3884f52ce9bb46012af3547397f8a68d073a3ef43bd873789d5e7be10" , f "0xcd10132d9b752e98d32249d6d708ca086cc9e5dccd1f81621e5de96f69f28224" - ) |] + ) + |] ; [| ( f "0x654f6fe079d76bca7b2cfcfddfb9d92de936cf1b50fa394d109d7c0c7469db17" , f "0x101e6645c5b81829b9ed1411fac9ccc88daad89c13abc825d54a2931d1693c1a" - ) |] + ) + |] ; [| ( f "0xa2d816f24e12b559bebeb2bb1348a34f60c9581baac45f144e1fd75f4a825e32" , f "0x9970b882499c3ce39356d7d9e6515cbc7f4994395f449160ac223243c62da406" - ) |] + ) + |] ; [| ( f "0xc82b4e5bee021e79cc051b8192efb8812b3c8777a92fa1b033ac816742484622" , f "0x5c6afa055a3a0bbad639a49aa97956b86fc5bc85a85f331a95a412a799943915" - ) |] + ) + |] ; [| ( f "0xed330d8c6925d69b369224d734b0fe5da4def0806fac48e88f03d78fab776719" , f "0x4c2e7354e77e0892272fe0b8048752c837cc9fa5edaf98fdc78a0285bd5ef104" - ) |] + ) + |] ; [| ( f "0x1bf3b8f7aaf1a074489b3ec8f7d4a4dd496ac4d41a6057486e17ed2218bd5b11" , f "0x74e9cba02736f90240de875783b5d27450bdd8760562a5382b01d1ab11552c1d" - ) |] + ) + |] ; [| ( f "0x76943ec154f9f5f207f2e331e026a49420abfabf0f13f3fcbbaec263045f8215" , f "0x904b627ea925fe0936230e8fed2d19d3efd5ca2affadb7e8d1124b109c8a8226" - ) |] + ) + |] ; [| ( f "0x27402f68c2df197714a886ae652f057fd31016289db7d7fe8ee1a48f08ee6013" , f "0x4dd9a90afa859d983c22b01dab6856ed2d95325dd6d5cf6c5b92ba6608cde813" - ) |] + ) + |] ; [| ( f "0xaaa78125189ca1e7bbaa68eee357a1ee3d975a06fccc3c7daec638728829c50c" , f "0xec1341064e249caf8305aeedb3197665655d891c2aaafabf3362a530d66f203f" - ) |] + ) + |] ; [| ( f "0x525bfae074634131fdd1be0c4e056cc660e13c54b047105368326207f589a427" , f "0x1ee02e3f68d1486220cba4e507cc31facc5a6f45e5bee5b69dfe2e245ad9440b" - ) |] + ) + |] ; [| ( f "0x720bfcbb48553a9dfcdb27f49d7549bc69c2c6eaea8bc70598c60c7441010c18" , f "0x44e243592b3a0697fdbf5764fc28e55faea3ef4ae6c2fd78f3461608f259cd0c" - ) |] + ) + |] ; [| ( f "0xad252848e568a762f7f037cd55cee0db473d2940884890b779b7b1cb7d464e17" , f "0xedf301553511f4a40a4ea8b4442482a24253ba4bd5e1c517e56b6f6f9aa48c2d" - ) |] + ) + |] ; [| ( f "0xd2e7dd487f3d0abf211813b72ca5c819035cd7500653fc9b065e51b91bd6272e" , f "0xc64a119a4f9714be0a681ff02864ff4f2be965fbafc5641268ba22b3a418061f" - ) |] + ) + |] ; [| ( f "0x06fcf9f6acf62fea74145cf212f9f81554d1c2744dce86904bd92a5b4fddae25" , f "0x3bd34796b671eee60d4effd0b4b76cef806b72b4e874f97cf229b99c9ba09f06" - ) |] + ) + |] ; [| ( f "0xd687f58bec2fe185399143095ab6c28d58918ad18164918bf846d48ded22f812" , f "0x73be60e581df69e084388fe1b0dfe84589662e4b38659fcf8b212060b485591e" - ) |] + ) + |] ; [| ( f "0x231de7238407160eb906e693defa7eea1a5e87913caf6ac1aa02ef274e77ad0d" , f "0x6541ca2535f68a8d213ec4a23f9a073d7e952cffb5d0c69947a65246cdc24208" - ) |] + ) + |] ; [| ( f "0x6c6292698ea8cdc1359d7bcf5437d4c3b25b93c7476a4b2c57f12920e0cc6719" , f "0x9c79e713c1bb6455e789605cdcdc437bc88559bf7bd3cb1bf008880f5bd9091a" - ) |] + ) + |] ; [| ( f "0x391c7974fa47f09942dabed984c23dc679e754c9ccfa4b89583513fa4a3f2d03" , f "0x06b7851c5d170f82c1ef5fab55ce45a442665d2d3703dc4326d06061fd1db911" - ) |] + ) + |] ; [| ( f "0x54178f0bd58fb442003935caefa53953c22a1f1321d872b8772822894cb44825" , f "0x5d1e702c1807e9b7cb0de975b5dd16a6cb1d1d32136ff4ea47987d770861d118" - ) |] + ) + |] ; [| ( f "0x5bfc08d8ec1041cd4903d2c6939751061c68969d7fdf35db549c348d99b8c52d" , f "0x323ccc86a472b7f1cbee8dc898f5260c6bdc29fc7aa435cf118a57bcbf47103c" - ) |] + ) + |] ; [| ( f "0x9eaf6704ec39baf6e877dd85aa90c686e7fc37f71cd5c57c2e57fa1d00a29c08" , f "0xc279814507bad6fe03ce52d884014354137ffd4ff1da7663f0455c6c45e69029" - ) |] + ) + |] ; [| ( f "0x6a7d47de2c164fd26d60499cdf3583454e07d2981dab8dcd0be367533ca90a12" , f "0x020a96acde24eab4ca05500786ad4b80557e1c13d07aeca4acfaec2f873f2316" - ) |] + ) + |] ; [| ( f "0x37ee20dfe6e554fc7d35655ce2d6e39a71320dfbd20d3c06abb536b5a3eee605" , f "0x3e38a27d9da448d2a98c8d3e0d4de91563d0a74c630203c3d523b7e6fd17bb2a" - ) |] + ) + |] ; [| ( f "0xaa20c589e4a544b70a8f61360c766ccf3acec39c33c6da9a3d4b0a0f9f00df32" , f "0x2d1ef8e398c0657ad52068361bed90123557f1ef1c61e36b6cab7be63ce66b36" - ) |] + ) + |] ; [| ( f "0x33a61c36ea4fbeb31cb28a17ce9d50b6fc396f15d6fc7dd976e69759c0745638" , f "0xb53433f71a059b64881963c1e3c9a670b06276050822fcaaba834a2b5b7ec822" - ) |] + ) + |] ; [| ( f "0x47d42c5595f217a35ca091bc0f25822025b2d1c5163eced2104576228b20db00" , f "0x00b9dde8ce7f1e98e02aadf01fb4016e4585f9717206c36d94fd9f44238d4702" - ) |] + ) + |] ; [| ( f "0x568e026ff600023abee9250f7b6094593c0a55cbd8c69ba3cf5019d5ac81a329" , f "0x2c3de8f6c6797df9a8f6d9217f86f016b99ed162a942bf99a7f82161758ab43c" - ) |] + ) + |] ; [| ( f "0xb7347c795146684eac1184bcb1bb6645959522f1f4b827c0389f9bf9e805482d" , f "0xce4e75b4b305e5f909ce79ea0c2acff9426553f6fe55e199993c5c4bf87c0724" - ) |] + ) + |] ; [| ( f "0xbd77b8788cbb31a6176f5301bb4153ffed23f2504e018a3d1195cf138ce08e16" , f "0xa057ae23e4c501316868602c490b0cb16e64f69a74b6e74ea5078232179dfe30" - ) |] + ) + |] ; [| ( f "0xb28311978da822a639bb808d12bec248a65c7197df248d57c480ef75a781722d" , f "0xba31ad53182194b21191dec90d74aaa0e2517bd60e715dc6921453c58f44b92b" - ) |] + ) + |] ; [| ( f "0x1d90a7cbb0b6177f1929cb431f9e8be393fdde5200c80ba11cd0498d3124aa03" , f "0x1852fcc8ce6ad4bd422c636e23316ceb31fec267ba7ddc8f4914c50f0f1d9b29" - ) |] + ) + |] ; [| ( f "0x8e69a1404b3ed94135511b2b5124af4558b99d20bc28cac5629e48d06ecaa717" , f "0x6d2817c33244c62a433eabf4f2ca79c9ef65754c634891749824d3577754dc13" - ) |] + ) + |] ; [| ( f "0x6b9580683d4fbe53dd491901bb471ce68bd51dc6b5b25e932b9325a6065b6e0b" , f "0xa03d5c7f995b1389b0ce2d6c59c729199e226590a820ef679f4a75e77808de15" - ) |] + ) + |] ; [| ( f "0x9d8edfd45e956f2644e311a7a54bd227d4b7a9f0a4a89ade752b71d5f906aa02" , f "0x2f10e429d80f15e36a613562484d7a4925c30ff99551b62b48a31e6e71a87d31" - ) |] + ) + |] ; [| ( f "0x85cb869090b9bee9a8f1734d0b00f61740ba64ec96235d3cd78b403c20ef960f" , f "0xc4b998abd6cbbcb3711cdc2bc3c702ce41c10db4abe993212e940adf541ec91c" - ) |] + ) + |] ; [| ( f "0x10d60259d99c000b6137df62b43bb427ce2750ff6f3554fda40850ad81bf0e1d" , f "0x4159f19741aaeb461c6bf81baf1db273077075bef6ae2eb4900cb6020c3e3404" - ) |] + ) + |] ; [| ( f "0x2ff38b8d053c71a2c711b1edce88ac5b1d7c40096eb4d54be8a142f9dba75f08" , f "0xd60d2bfca86e3785277312d19cb4916ef91eb978ab7a615280889d2200a15032" - ) |] + ) + |] ; [| ( f "0xbec97a5bb4e0931301c3bd5067debbf7be6552795e13d979f0ed992cf99c3614" , f "0xf73b710cb46653bd24650d0bc35b20828fced1447e965511bbc0d9f47d62a63e" - ) |] + ) + |] ; [| ( f "0x240deeafed986cf566dd95a87ae7f398f96cf72a9c1a327c04558da06ee8302d" , f "0x56843c4c6d486ba2f75a9d65e278f86eb96cc268751c564e7a276a4753c2371a" - ) |] + ) + |] ; [| ( f "0x205031674b86385689ab5ed08a2697ebaacc963561459d36f653327244609137" , f "0x31a5d6546578107d2d5fbb435c140907e60529b141c49e036b7e91049c6fe635" - ) |] + ) + |] ; [| ( f "0x6c0c986ebc2dd383e2c844fa7df89e80705de65068757bf77f5bb4ff8f1eac26" , f "0xcde02d96b19105877aade89af219395bcc13b84fc8792cbfa0f1ac1fe21daf2a" - ) |] + ) + |] ; [| ( f "0x7ea819567df6e452b977d2de1466fb2338d7ee00a953a2090d2cdf53d4366011" , f "0xde7ae1862f4c8bb9f2913bb37258ddd0a36d83bfbc5cc3a92f9c2d1b6440c208" - ) |] + ) + |] ; [| ( f "0xfd906e4e77362fb822cefee5438273b1ca8d7f9d754bed4b2e9ee038b6f6fe02" , f "0xdc64eb53aa125310d8492fa91a5693c76b0e9df909936e1cb939caf016e96612" - ) |] + ) + |] ; [| ( f "0xf093e3260b97d7d788b9c834944abcf2f4dffd9e2bf5c628f46e9e58a5f0721d" , f "0x1a487cf71dd39b4af93bc0c9ff1016a25c7ed4da2fd69a468a26ab4f8ba1c112" - ) |] + ) + |] ; [| ( f "0xa92b6e71ad68696553dede5cec2467b0a735bd70d019deb6539b1703c3b93325" , f "0xf1c394f7c096681aa680ed04fd7cd4858bea3f7bea611188b1a9b731f4e47d09" - ) |] + ) + |] ; [| ( f "0x90b4adc38d3676c18797a8868627aad18b40db8b7d95147aa6d387a004f5223a" , f "0x7b3d33e5473fc6fb0ef649c4957906af8b59d2caafed5d09d0239dc558ffbc06" - ) |] + ) + |] ; [| ( f "0x24d19ce2cebfa3eb984f4b8c415a067035ac7a5e8eaa74ac26275151e06c2510" , f "0x6139e483d5d4049b9e19bfc888ef4cbd4e08b669194248f54b9c528c3215b41f" - ) |] + ) + |] ; [| ( f "0xf9cc6c4cea39b72e9f3074a8a7eab09743d4d54d1f22b7c3dd3fcbbee4e90020" , f "0xca8c16e9a47a6d1b269aa3f8faaf8aee8da50ee67e0c43330af01eaba14b862f" - ) |] + ) + |] ; [| ( f "0xf9cd84ce4d16ab44d70c4f17fe74ac16d1f28e1d69c8bfc23dfc0dc29d6f3407" , f "0xae2bd3329af1bda719f642b036a5e6eda21de5341551d111e765860a2ffa5016" - ) |] + ) + |] ; [| ( f "0xd54ecefb717c630520d13d3f4818abfe0dc0131788620752f5e0f5b1f9f84422" , f "0x33b2d4ed0fb6508064947729feaf7ed564ba6322b997215951b840e52be7832f" - ) |] + ) + |] ; [| ( f "0x697c78b5c2aa7ed3b238ddfe1609236f9a38bc49f5a8e29f4190fdc5782b333c" , f "0x4ccb908bdea6e6f9eb10d0f24de52145ca121f75e568e6f5cbb2dd3072928c0b" - ) |] + ) + |] ; [| ( f "0x312bc95cea6c31781e7d62dbc7ec9089d6bbcbfa58e14eb673c4ce6845e89e2c" , f "0x206d8c91c2bc36c37fc4829837c2615c765c8b9c3c0a94a1ac43b44c0c99143d" - ) |] + ) + |] ; [| ( f "0xe0ef015a5f421dc480223d7ebbf8bbf72a848f5adb1390bcb3f0983413ed7a3e" , f "0x65e686d7c76218938d28cf13149111c33cad8d8e49515d9a054fd5e2a67a6a07" - ) |] + ) + |] ; [| ( f "0xced0da7ee3773970349343b387fd8619da99d42fc90c3e83101b371218f90e30" , f "0x2f33f380ac099b9fdad077ff67fe1f87802fd569e89d18c49201846b5e52d014" - ) |] + ) + |] ; [| ( f "0x4081b6ca04f302225c85ffdb20a5beb8089a4e5947144d95f89a51304df08520" , f "0xa6eaa96a4e5ab0e62a77c8a54fb6407e5032dcb0b8f8d2177baab42f42c91709" - ) |] + ) + |] ; [| ( f "0x9d00f1efca4b2f0b918dbd66a1cfa844f28152ecbe1bc775e63e88b59936a838" , f "0x9ce2ac3ac801bbc15176927f4468012723115e04584297a392aa6e3cc5074d1b" - ) |] + ) + |] ; [| ( f "0xdf2caf4a3031faaf8aa8b8def1213e23de770b6a178f63737f24e0aca9de5324" , f "0x8ccd1d0ffef0a2002b6f4d6ea6263c9de3ce23890657c1f905259df93226b42f" - ) |] + ) + |] ; [| ( f "0x5b054bb1f290d6f8de9c50d5ee184d082f8aade18eb57a3d0affc5ebc4fbcc35" , f "0x3f806511d3e5898560f347c40460e9d4068d26f13cd5e0028db551b3148ec109" - ) |] + ) + |] ; [| ( f "0xe7900682288c5bf21587e4e89b6d94c1c8238246b33e543a5ca98e42b42e540b" , f "0x9eca4615b7b2e2a2cdf1223546178e0133ba1c61d05fd7c12e44ac16d0cf3a0e" - ) |] + ) + |] ; [| ( f "0x4e0761c7641dcfd34562d7704898475fc4e95c977855195771a35f55787f1f0c" , f "0x513291ad96db3d66e4b6529722efacd8fe10936c048d93599da36ff53e859f16" - ) |] + ) + |] ; [| ( f "0x798e8691d942c37995268da9072be9cf67a61ff3d532ea45a2af90e848d5931e" , f "0xfea915e6ca753710b69b12c018921df43d8b647905420b000fd37481e243b629" - ) |] + ) + |] ; [| ( f "0xbe7e898b8f479e6d45de82308f31b76b048cec83c2b7801a378d04036574903d" , f "0xbcda9729fb49ea2ce444d965cc7c4bfb25614e88a4c873b66273198937564510" - ) |] + ) + |] ; [| ( f "0xbbd7a3438855d91370ef0af949f694219e25b98976980a710778b11cf1978d0f" , f "0xaabab9b769c2c7a3774052644b3696d64262addfffe657e5228840e72a694f0d" - ) |] + ) + |] ; [| ( f "0xd66f7287d0ff7dab644865fca0d7ea5d1da6264d2c60a163bb2033328152f630" , f "0xcb89a22c5e8a3fe249675119e0b2255e4bbb68b6c78faa1fc85e3ce459177822" - ) |] + ) + |] ; [| ( f "0x4974a7db840af71ddde8749d84e1a94bbe13e2995fce24e8746ab8c6732e7d04" , f "0x259f980ccb1b5d8dc33d45fd09186b5ba50e1a481c8b666b74b64f9bdd03111a" - ) |] + ) + |] ; [| ( f "0xf003a0ab059c4e5808a0bb52bd3f90552d64345ca18e5471303d1b067cb89f16" , f "0x32b0cad273eb01a6549fecac95bb155108b2ca278d3faa8e0a9c74fde6bbcd30" - ) |] + ) + |] ; [| ( f "0xa85e940bf4e2d91754aaf513e6e09750c46cd67a5953663ee9e6a91f789dcf00" , f "0xe75adf8897fa66d11e53fde20bc133b58582cbd3b51f2360bf513dc5fb510c14" - ) |] + ) + |] ; [| ( f "0x451f109ca2a8f5d67bbd61d56a9e3162ce50b10ac1ccccb5fbbe341372e3913a" , f "0x28e6865046333191c810a2fb126e962af3c8e8a71da22ab4391983866dca0d09" - ) |] + ) + |] ; [| ( f "0x9a4f56926304e0908d04dd175bcb34fdade270c3e8bcf7d8dbbe545761407b30" , f "0xa8e236b0b2aeab4f1303165e1755f062cf8362ddb498c651ccb545c61202d83d" - ) |] + ) + |] ; [| ( f "0x531bb7a82c7489d82f42990e0e3da70c7ac997b6fa1dc7d73801b3168f820a1c" , f "0x6e1a6863cc043c91a371a1bb9e9b0b19184ca459b35ed793d2962cdaff82100d" - ) |] + ) + |] ; [| ( f "0x3a4172a6d5300aaa109b4541020ab654c16d7527321ff18572159d8b87525f2c" , f "0x1fc410b81aff2652fd009c53fb52766fa8546bf394bebeab9a56506f70990e16" - ) |] + ) + |] ; [| ( f "0x909fed4fe77ddab558bdd76a6acd1cc3eb51c7a19f5610231211a2e64b958a1d" , f "0x0e2ce4d0c08c421468a128d492090cdac7a588d60a8bfd1bea136256ad4d9f2d" - ) |] + ) + |] ; [| ( f "0x1473acbc056c30213a092465a7a848bed62768477547cae8b08f22b19a2dbf0e" , f "0x955af594fdc582fad65c6ae26bc2e8aa8d453193a3a7b95e6bfc6f6fd40ad305" - ) |] + ) + |] ; [| ( f "0x67db1fd8d686e34d53a01adaaf8f8ee8b6dabc8d936614e37cccfdfdc497fc27" , f "0x1ac8c9512ff4bf476567e24076cf248096fb6d35f08891ed5faf99d7a5b95c28" - ) |] + ) + |] ; [| ( f "0x316e9172a508387f094dabf0ae9f2f3c0ec975c5b3e685507434861563a06830" , f "0xbc959cda41656d3d2d9acd0880bf7fa901bfa8cb0d306b76e3de3f96ad5b1838" - ) |] + ) + |] ; [| ( f "0xaef50647f5ec8047f5f8bd7f5d6259eabaf782e2f6bc4eddd0f970f31a7e3a2b" , f "0xec23b64d438f8e8c05876576ea4b53f279ce1bdc5d7deeaa3d2e8a3110291a2f" - ) |] |] + ) + |] + |] ; [| [| ( f "0xa4ee87bee8ff09bcf61c6e688eaf69282c5940c94527381570453d2e4ff33619" , f "0x366289f7a45ab759451752a8bf0e01f1f7c83ed3a74f250501a0e4a22dc6df37" - ) |] + ) + |] ; [| ( f "0xbd5ee51dc338b6b53380edb4a94fc9d716e03fca10157107cf4d391a3e7d440d" , f "0xef9d66d75be36bdf5fac26d4c9f00034829f2342782d31e8384749b7d4f6331c" - ) |] + ) + |] ; [| ( f "0xf1864b681a06c4399f4afdcae0976ba117951ffdc042f508f02a4310f303f40d" , f "0x30bb53510a069a1037a42ea8ee59074658264446de19c1b1d2d90c2906e67f38" - ) |] + ) + |] ; [| ( f "0xc0e2fdcfea9e8077cf9e1eed3072f600e8be8a7fc482e4232c963b13a9378a29" , f "0x61b8b4bded976772f65cb970e1ed728ba4f955712f88c6f44c709a26bbf40f37" - ) |] + ) + |] ; [| ( f "0xe8b5a34fdc7c92b689fa9ee9b7cac83d21fafc164a5804cdd71ffedd7c212533" , f "0xb696aa6895c094041e817c2eb15995a6e3e71ca5a9045b34e04b0ad278520c17" - ) |] + ) + |] ; [| ( f "0x6af95ceab5bcc06886b50fa791f65817d9b5bc9ebb58e30330977c255bcbdf14" , f "0xf7b5ae4eb7be8112643de309037d68071a518f0e681c6928efc6f680eb555338" - ) |] + ) + |] ; [| ( f "0xe0220753d30a53a3c5c3cfac6190d02e9e16ba81bd5b71ed0b2328623f968018" , f "0xc0cebd37c6a29e2bf6ad598161801791416462b8b62db144a985707680d17033" - ) |] + ) + |] ; [| ( f "0x8b88fceea5d55eeacc66faa8e52ebdacfd6de0b1b5e5fee13686ee083cd9311c" , f "0x152d083e18e92b48ff11dda3336f9f1239f1beaa24907af1a90115f9136aa13c" - ) |] + ) + |] ; [| ( f "0xdeed6df4dba86041278a4894c37c2f382dbe462cca1eb49a6971358071945705" , f "0x28e286c60d3f64cdcc56dc1b10f9e9d62c2f7bc5a527e8b0886f906a76750d30" - ) |] + ) + |] ; [| ( f "0x97a9ced3669804e0009932e066130e52a03b581827c69c51f17c9aae41d43338" , f "0x148753586a8eb9a2a03e79a373ece040294d7ae85df128a0ef25684bf8077f24" - ) |] + ) + |] ; [| ( f "0x0104d5d5b7b9580ac6b4e43c2f7cffacdbc9191f444c496b2a189671962ff011" , f "0xabc52d09eb1fb18a66f46699174701c8f78a1974a206e3995c9848b5f630d30e" - ) |] + ) + |] ; [| ( f "0x5d60a901bafc72316a5411834cbe7abd90b3bc4e4a382de7976a3c9028b3df18" , f "0x18552202b1616a3ff302aa1ea30d07f64a759bf00e6c1729e41afb36ef16890b" - ) |] + ) + |] ; [| ( f "0x2d8b8378a946f4e5c424eec0b1741be336792fe9de059dc7f7ee5c62d7fd040c" , f "0x52bdd5335ca23f6495e4a08cacf6d126cf3713de1dd6c3ca7539098238310f3b" - ) |] + ) + |] ; [| ( f "0x29363001bf887c2e81a7c7e9812b172a0ea53e88068b4006fc7086c272ab3a31" , f "0x2f4abc0b1f5bec44dd49660b9a10d2814bc434e7040d74527445ef60c3c0ab3e" - ) |] + ) + |] ; [| ( f "0x0255cf26b4cd5e2c4b02030224c273d90e75bf23b27e7e71260dcc9336a78e2b" , f "0x2d270d12eb224f5f41bc49f24a40a82562dddf22e08ac802edb3a31b0d1d7a2f" - ) |] + ) + |] ; [| ( f "0xd8060b4695528dc3ee1352062d6e7fb56241f01c861c03ef8201dc4984721619" , f "0x9cc818353ba15babc55fd9bad48d953e3f49438bab5b66d7cc58b96109b6c928" - ) |] + ) + |] ; [| ( f "0x923390fe2faef8952fbcce5dcd274d50edbe6af975c847bd1b5351509f74ad2e" , f "0x086d0928118b74cb4d77596f214fda9409ee04fc6830108d86ec0b201530a139" - ) |] + ) + |] ; [| ( f "0xb9bdb63b4e91162fe3035eddd5d55353d3e783273ac95b0e8eca55cd6dbbe620" , f "0xd0e5f5fe5f48a1959d8fddedfa345741b5313478df59177e69f70cd2cf0fcb16" - ) |] + ) + |] ; [| ( f "0x1303f54ceb07c33be8cd1536e34865f6d73ce3a1045455137726511052104637" , f "0xe745ff858c06b4f3d6f0a35d63e38a9b1b411be8d48d1ff1072f74e9c329632c" - ) |] + ) + |] ; [| ( f "0x9f1ae396320198a242f4c0ca5f1eae5a0ba3a45eb4642010bce68556ae6c140d" , f "0xe9c5486141b72c9c38a841176c520af492ba1df227fe7120946c4139fa41e62d" - ) |] + ) + |] ; [| ( f "0x7d0e57ac0fb1ed671654a20d94a5f9434723d9bb07fccf4fcffdf9df9e74ba38" , f "0xdd2ed45afddaa9f92bb5b179cf8f3a622a29b16a363d79d294d81e1b1cd8bd13" - ) |] + ) + |] ; [| ( f "0xc06221aefa6d5bc572ba3a153647b415298179d4d2f5101cacd70a840d1e4501" , f "0x573537a45d66efee5eabf510cf273ca601b107a726a01be5b8f8aa0399b90c3f" - ) |] + ) + |] ; [| ( f "0x536cea748a4da1ef9de10d2d82470053f75b4116107766685fc8e4cd8461010a" , f "0xc409df111c67a2453333479e1bb4300266da6e775281dbee8f51076de22a8721" - ) |] + ) + |] ; [| ( f "0xa907e178a9a05fe862a5625af6fe7491342f271a1122103aea8eafb37b462931" , f "0xb5a2188c397882d3df51a1c2a1c64987bcacd1167ce1b7d2e2324bb4f9b11024" - ) |] + ) + |] ; [| ( f "0x779e22aeeff97161d748d73cdcc780efb12408aee6833a7ec40a9d27c2648d2b" , f "0x59b4385300244bee3ec07db7fd4f68906d861a185892bba4a1a3fce7f189ac2f" - ) |] + ) + |] ; [| ( f "0x12cbf79eef45fbc8295e3c288df06ca9acef74f1a410f40b7e04e2ae8e476301" , f "0xf832d2e16df069f9906054eb319dad6c88fb64255d29f52af3ceb67d7d55f437" - ) |] + ) + |] ; [| ( f "0x312fe15b077fa44373b78653c5444c45db2876ab77a7116ebb7cabd43ff30b20" , f "0x6f73d77d9e57feaa891dc0a0adabd4e9a68db1e81d954d184be433f3270f322b" - ) |] + ) + |] ; [| ( f "0xe5504ce6ac1a187d3eb5c839dc3d8fa4650506ad0f475fd5db3b67cc1074c83b" , f "0x333dd012eb208f5c93bac33da2eacc39dc64cef5cfa27dd3e1050bf52515ee10" - ) |] + ) + |] ; [| ( f "0x6e608ccfa3a557d979dd5a317b463fb26b1bcc24370fdf5b6c435349c3aa673d" , f "0x12b4021e4a254a7cba6d4486abcd1512d638680bf8c9608080f6ca5bcc41a51a" - ) |] + ) + |] ; [| ( f "0x35a7be31633e54d5ae1e4608f906bf40d75472a733d1fbb926e828e59455bf01" , f "0xe0170bdb38d04f78b0cb681189627bf7ef8ff15fe813bb0da1ad40538dca3d1e" - ) |] + ) + |] ; [| ( f "0x635d1532f5029a7af32a492985668d5858b14736f8ec2df02e1dda9431229739" , f "0x5894c30975d5d1c3c96f718b793afbf7f3a432c0d938463bf9cb0540ea8c4b20" - ) |] + ) + |] ; [| ( f "0xc2ea3371c0fd21c91baa07aac66a9b0c8cc6efad99e96865fbac6bcba236f228" , f "0x90009f4d8c657e5230917560936fd4bc0dc77db6976f472e4e2e0e37ddf57f04" - ) |] + ) + |] ; [| ( f "0x48837816177e24328e04a052f325fd864d835a6f9bd79afc35347ad630984f29" , f "0x21cab68a6091f78609f1449c188ae8818b7459b114c5e912875d30d985ee1300" - ) |] + ) + |] ; [| ( f "0xc074f1a31037db43e4175a1257cfc6b00b2bd6ed85d5ebbb114e723186f4fc28" , f "0xadd291785cc4e4e4fce9a3c2534dea7bbaadf665d3077eaa8747c82f0a860800" - ) |] + ) + |] ; [| ( f "0x2c02606676e19aa1b2ec8939df8e985bab732ad14a78184a1edf875a2d074b3d" , f "0xc6c03db6de921d9e9c2bbea216926d6c2457458a9bc0f0ad4d88c896889a940d" - ) |] + ) + |] ; [| ( f "0x276267e7399206f94df7cfd53265c8cce8963a04da2b98ca133621e30fcf6223" , f "0xfaa32a8c5dc05e9d05f0007e4dbe253b67a6447720a08a0e6a831b224384852e" - ) |] + ) + |] ; [| ( f "0x601c3f3d52382a8df88165aa3a85211969b4009e101a2106f3460ffe042ec430" , f "0xe34aa82941915f2812cebc6b3b85fbed770dc7f501347e4f76f2755d3c17370c" - ) |] + ) + |] ; [| ( f "0x41dfa353224983d1c55c4c891d3c98f05da57f85521642e05838802bb4b8d608" , f "0x10b72b421d8186450316b255abbc3a8a57aebdf471114bc4da3e358a3aec2218" - ) |] + ) + |] ; [| ( f "0x925dd537405e64df4d8305b44942c9eb9526e656e28a7fed94696c6a68b74832" , f "0x858e024c3b60564a28b37bce126fda275fee40a0ec26de78b9ce9d5ff722150f" - ) |] + ) + |] ; [| ( f "0xc9c79b550a9440cdc51d7b20d2eca0cc966a15811c023241d791455289abab09" , f "0x116c72a17fc060f2097250252de019cfd4e5e141e10643b34a70e6f002fb7e19" - ) |] + ) + |] ; [| ( f "0x8add7d64b92559b23cb66d6a11978dfa640f0f27c5a04165b8d36adfb4139534" , f "0xf429f958adf68ece76cef5de115885e969d2e6f81bbbcb2b1f4e3767cecaa737" - ) |] + ) + |] ; [| ( f "0xfa1a695ab489ca819c9328cb867ec60892f4d160b1650dafeb7a88905ba60a06" , f "0xa772c0439bfdf030ffe1bfeff2578815fcf083bdfb32ca9264c7b9747c1ba42a" - ) |] + ) + |] ; [| ( f "0x0e7a406f3262bceca6ed63801c1d077bae2a41ab6d99c1822d18f7b51f7ab304" , f "0x2f83e8004472e2f92875557896f630d47738a3461e85be1a52442b5a12d0840e" - ) |] + ) + |] ; [| ( f "0xabfc9c6819a2b3fb00cb993899305412f3386e1d930b499527ed4ff0d5b95413" , f "0x15a0e645f6e6591d4208b78270e9c15af616f5b8e3bf32cbfc53550aad5b383e" - ) |] + ) + |] ; [| ( f "0xe48614ea9789df3d0866e4e7914539335cec58fdf84dc2a9bcce427275867600" , f "0x5e10f115b32631cf204f1c7d2c20eb1241770b21d50aae496cd335b7148da62a" - ) |] + ) + |] ; [| ( f "0xd0e1d92ee887d7398464e76f3e9d736cbfa6d166203ee1f01586d4520d738138" , f "0x0838334eac5d5ee38d8d3c27a368f4afc1f87fe95fc3ba3f8f809b86174ac406" - ) |] + ) + |] ; [| ( f "0xbd3b2ea39b4a6202c88f120636258c11773bd37cab3a358104483f1402fe1132" , f "0x464a1e1c232fc2d165a5f367cd03d37f0a1eec09e776082c308cf3bb16293b1a" - ) |] + ) + |] ; [| ( f "0x0df36f40806f47e4a01a734a021b283173811cafc50af3f645cf1e49b0972713" , f "0xae2da528c8999d21be565a302430c4953d2f3160e0685f5c32c53a5824752f1f" - ) |] + ) + |] ; [| ( f "0x0fcde464d2942571e0cf4e5389f7b2bcb4f4826276d578ff767fa9097cb44c30" , f "0x5ba3233fb0da621d3bc421c0ad2cd5eb2bb94192c4e066dd87ea703781de3d2d" - ) |] + ) + |] ; [| ( f "0x1112a24cd580ba1c87b09deb96a1fc78b2dc3fa1101aac915594278518ef090f" , f "0x6230ac9d79b0d84246e647905dbda34709108ca6a7057bb50569c721b08e0638" - ) |] + ) + |] ; [| ( f "0xd73d936eb8f6de895ffe29bb5ac98149a1a6937ad3eba9d9ce841c64d2b5193d" , f "0xe0fa02ab3d75638089c7b9d76476f0ef59c563b0349a1dc22979e1ab61c0e519" - ) |] + ) + |] ; [| ( f "0x4128f3489de47889cf9deda46b7b217f7078927c96d7e31c6fa2b9d5c5b68803" , f "0xacff603c6d396bdf2361140a59ea9c92cab7fa9aae5920c01af20c88c0675533" - ) |] + ) + |] ; [| ( f "0xcc13827cf74a9dbe48b068dbcc3851e525c8d76002f91bc0ae0e2547208ff22d" , f "0x25c7d5f3a7903fac6472448fac64a58f8f977f051b25220ff3094233749e943f" - ) |] + ) + |] ; [| ( f "0xf94b50d38ca94ee365837b3778565839268b783a58c2125ad5f28f6152eb900a" , f "0x3f032a78abf24c9384d331b4e16e147319faf4a9e99456e2b8a3daf8cc7bdc1d" - ) |] + ) + |] ; [| ( f "0xf279ba699c28b7be7b54dd8a4c44f93c140b471250213984e8e9e516f56b833e" , f "0x017bb9deb030fe3d3bcdefe6232ecd672c08b729e7c00a2a1c4eeab231126d26" - ) |] + ) + |] ; [| ( f "0x8fd32742f46113b9a8c00e3f655eacc46f92937c25d5c9873263a6be45eb3004" , f "0xde5bf8aa58e975e917896577a89b2e4ea396cdf59b7c0663db96099022e52730" - ) |] + ) + |] ; [| ( f "0x265350a01abcd92ec2cccd0878b6c7df63e92022b6a77dc38a0342fdd04d241a" , f "0xe6255254babb88851ae8993e4fa5f94a93c380dbd8c3fc41dbdf89e6d9478b3d" - ) |] + ) + |] ; [| ( f "0x5a98c254d3284cb803c741bd0fd3dc2e9ebbe397d36e91293cb20c0590e86905" , f "0x4022bad6006510f41916d07f198aebe42d5b5420476de0777969421c3d967722" - ) |] + ) + |] ; [| ( f "0x3930d6b6c1f5467bc7c72f7fcea41d6ffa3abc321a7d43822b0e3228e78cc336" , f "0xff8768e4cab97d413696a7b37a2c6fbc1fe3a582ea1afe96a6c4063cdc2a3002" - ) |] + ) + |] ; [| ( f "0x127c6fe7b1fdd27e0ac7cb00c988c6e2333d31d8f23a4583bd54c5c31eaf5612" , f "0xc9cf2407e4e1e669f0e89582e6696944260811e1bd3bb47a4e0e6ea2a192b22b" - ) |] + ) + |] ; [| ( f "0x5a402ca5c3dffb55b17492d014e9862b374bfec50bd12e56e0d0d12659302321" , f "0x4f58be2135325a639a0c2f39ac04ff57869653e45c236138f1ee90ccff2aec32" - ) |] + ) + |] ; [| ( f "0x8d9a4c957b783f7de7013d72f545e4078390ae0f8b03c93cbf7b3eb449134c27" , f "0x233191cf43adf06359ce5b020abaa51bbeb41b23f1ff9c56f942af35e2947817" - ) |] + ) + |] ; [| ( f "0xb55eac470610ee2d4b283c3e27dbcc85e8473040b8761e658eca2b705d99b315" , f "0xe037e46bbf110eac6143496433a29be6a481ca54151208c64ed5209d5d4b8d32" - ) |] + ) + |] ; [| ( f "0x19bf04a8bb2210391c196ea98e63624d9868f1e3ead7937b62601355dd526611" , f "0x2ce8f08fedc3fdc7c12a07f830cf6eb7e41b77551fa578944bed0070048dec3c" - ) |] + ) + |] ; [| ( f "0xf989f61d8ff46b57c79c00508f377fec97d7060cd05d14901a7d5a8b74a29f3b" , f "0x9bef33f706892d70989a7a76995d2beba4a288b72b4d26adfca7805763f6d036" - ) |] + ) + |] ; [| ( f "0x84c9e1f65bb669e2e327842cca7bd1657011c5f2041256acbf967eae1ba89f17" , f "0xd166d812c0eb6bf27c6075fd84173c26534fdf5b4992243a3769a03537048f3b" - ) |] + ) + |] ; [| ( f "0xd60f6f23cfa5ca0c83a52850c0ed913b1a598532f2af2f78e08703c4ce9a0708" , f "0x949d2acddc3b5b87c56608075241ea9865ac2ecdcea8a2b7614c2b6a45843c21" - ) |] + ) + |] ; [| ( f "0xbc06855f02f958820fed5b37556e7ad9814c9561746cd1724f6d1f84f7bd123f" , f "0x38a0c02e592970362d8ed702d609f857a100ac50a1e17c4cbc80cb9cd7d8252f" - ) |] + ) + |] ; [| ( f "0xfb4306f557d9f7ba35b4f6eb4e43aa3dddba5a5458f02ebca2f10dcef505fe23" , f "0x3ac6a57935bc4e9b851b791ac215f87957968f42e525668436c5576d9f6cc21d" - ) |] + ) + |] ; [| ( f "0xfb493d60605b4953a880b403c58e292bf0d045955565cfa95435edba47c7fc1d" , f "0x639b69805f8662221b43d7e0e9142930542dfeb93443f2f65fdef2137c079715" - ) |] + ) + |] ; [| ( f "0x5971bf4f2d5e28c843880951ee55394fe1be01d9fee8f171c8e6934d2f13c324" , f "0x7bfbe7beae2023ee4f478b0c2921afea2f16d1d3128dbfde3cddc5d74cd8e73f" - ) |] + ) + |] ; [| ( f "0xc19f6d7fe77f9fbb1b51ad23900c2e45af991dba5c0353e3abb27d65de05383b" , f "0x382f23964ceb07abe5b8d6fe495368c713c2815f361c52b103baea4c54106a37" - ) |] + ) + |] ; [| ( f "0x7ce9b3a0ef5fa4bde58b3dbe093bd1cb4f48af43bfebf0b5be54564a7517e337" , f "0x81020031d05613c8d05c7e32770e4baa1814e02946a623e01a3d1a2e5df22e2d" - ) |] + ) + |] ; [| ( f "0x63a3ea760b7e4cf3ac71dcfba204899d79d1a151692cce38ba168468aeb9e53d" , f "0x67783143b5659b0d63d52492f067e29835a4d28e9b1b4b65bc5cf02bebf5bd32" - ) |] + ) + |] ; [| ( f "0x444c6b7dfe48c2d4348e73735226a362d4660d8deef4bb0b49687925dd9b052d" , f "0x0d4b3d667546fce0a214ab3672acfae65bd02ccecad341a690a669c8da587d21" - ) |] + ) + |] ; [| ( f "0x0af2f7f9879549a650e2e02c638ba2d28d98cc89b0c6cbf209e8309357c5d223" , f "0xe8b88207cad40e0957ac6de28534160351995a0acb1c65cad7f6914e6d70893c" - ) |] + ) + |] ; [| ( f "0xcfb6fd520f409297dec06c7cf5faeb2484e1d4212b5ac152e97e8cd40ab21701" , f "0x8976211e8af4e377bb6382b86fd83d18a85d024057c01ffa94620c3023dabb04" - ) |] + ) + |] ; [| ( f "0x76d830ad864666c59db68147d879b0767ef30617ca93594b8ab8f73022ebba0e" , f "0x211e06b878aa7b42d4e94fac47e8aaa85faddf6ace9e573c77fe5f09525ba713" - ) |] + ) + |] ; [| ( f "0x85e6a5e4c6df941f27e3094f240eda2b0c5d9deb9d8dd37fead21cfc84c2703a" , f "0x44a8f294b2d84b0eebe659695909fd492432475834300b125f5592ede386791e" - ) |] + ) + |] ; [| ( f "0xa259508311f22898d158560c99fd0066242be38eb9e8650e2fbdd731bec3161d" , f "0x96562cff3e3a6345d10cc26e7f38464593503dda71f27e97e86a3e309c41832d" - ) |] + ) + |] ; [| ( f "0x0c0e7d8e6ac92a8d8f74f05b630847d20873642aadaa04d060abe3cb78a9b72f" , f "0xc3789c59e9ca91ab643cd44d56699d6e91824775de7a476a52a41fae3a4cfd3e" - ) |] + ) + |] ; [| ( f "0x3d4ca24e95d8d57916f56ac256a8e74ea5ab6f38cad77a0060ffa45f30359133" , f "0xf913ee4bf2dcf9e821ec7ae339ad41ab07ad9b47265052cdf8afbfd7eefff809" - ) |] + ) + |] ; [| ( f "0x3f0a585fabe909a7f6e3e678c4b81d38cb631bbd9abce88e6d6759356f4a0702" , f "0x4ff7293d9a4292bcd8017ef1f859e0e19848e655c63d5e007cefa340c11ff91c" - ) |] + ) + |] ; [| ( f "0xae4232eb70f36a0d6047dbc2cbf760926290c235f87198f61ca364fbdc2b241c" , f "0x29559c810df577fe6c76adcde8300f2e6577ff89d37c5163aa738bc534757232" - ) |] + ) + |] ; [| ( f "0x6dbc8e54904b0b4062285c3bf22e84641292e40cb12737f56fb27e2259ae7d3b" , f "0x8d909070522589a9eb7bc16b18546941086bf9a5dd25d18d20d9c0de3a4c8f3d" - ) |] + ) + |] ; [| ( f "0xc598cfdfbb0c299ef98385000b91a1abfb0d62de1244425359803dea942b5216" , f "0x18e15766fa77c7df34d88f4d46b097600a7e4d04093b46e91b4171da17165404" - ) |] + ) + |] ; [| ( f "0xbb9fd7b0e572b63071f8efb5fab1468485060f16c151d12f414a83b0281ca102" , f "0x7373465841e750ec1b4d2f68df94470b5bc27957295eef7feda440c3a9b4131b" - ) |] + ) + |] ; [| ( f "0xe80eeb3ee5bda6c370f113ba0838541745ff16498ab125c37bbbb8f881955f3d" , f "0x877b9aa2f35c5468f0c30724a01d88f117c44c852a93268abb24d6542532a238" - ) |] + ) + |] ; [| ( f "0x021b460def1e01e542b68d36bc057c7f6e367cb4dbcdfbbb8573d04e326db83c" , f "0x35bb8fd267e07ed25f1bd2d632af31cc69ee8e86e4dd837771beb3c1ccf81f00" - ) |] + ) + |] ; [| ( f "0x7860ceb9109e56f5ec6e28281b19502e2c7a011cec193e7ad9fa8fb5c266e705" , f "0xa21751bf6499dd3be7dd302728039b5c744a8de8cf5bd05885494a54fcd85607" - ) |] + ) + |] ; [| ( f "0xa8745a96208601457bf8e60a2a5395646a6656dd13ad748e965649e68d4afa02" , f "0xd78da256df5b372534ace87ab5e7dcd1e190dac63846f12e0fbb72a1f6e02339" - ) |] + ) + |] ; [| ( f "0x9b19986f52be1d3abe34049190b001ef7c01a428e247c7f09801d8a4ac9e0610" , f "0x3a4ae5d77d2e960ff6757a4ef26207240b06a7c5f4b5f9f3b37997a3be48aa32" - ) |] + ) + |] ; [| ( f "0xb900079bdcc17e70ff3ec7eb0f66a3353a0342cae0501f09634e46caa9bcaf16" , f "0xc43bdbd53bafb2521447098d99e10a16dcf6af2475c7e82f383daccb374a0236" - ) |] + ) + |] ; [| ( f "0xdb5adf7db44c5f67e899a21bfd6889c3804efe733728b01f0f1765e1920ca507" , f "0xf693aa71fd755467580fa60696b65f6079f0d1d54e8e737a64d32e4f3445c61a" - ) |] + ) + |] ; [| ( f "0x511ee939a172cff99a720ddecaed06b1091e67a764f3c1400e948060b100a929" , f "0xd30136733c124fad453d1c19e36b42ee2471c1a6a4a2ffaa7aba4b0ea693ca1f" - ) |] + ) + |] ; [| ( f "0x3f057fe816139fc80059c914b12c20ad1dd77f0a037f97e316f6b1070dd1310c" , f "0x3147c1fc4e4153952b08fb4fa4d43fa4a8ae8d4259e5429ffbe77399ecb6b135" - ) |] + ) + |] ; [| ( f "0x58be340cfac9762e4e3ed4c6aa4f501a3ed385a2c6fa746d86a6aaa9c5c07115" , f "0x6862066b492cc532b0ba7e7deee79ea1f6cf858b675e526d23467d133618dc12" - ) |] + ) + |] ; [| ( f "0xc9fafece7353177d651a2d0fa419b75bbf2b2df3d4b9d8963ddee4df31c0df1a" , f "0xf21c72a71264c1b3b0764b628da09da5676ccf501bc6aa234b669e3c47f52a13" - ) |] + ) + |] ; [| ( f "0xb36f46deb10075cc0e6af9959926e6f0a670eb65f46e6b8e7645f42c11d89822" , f "0x18b9fb8abe4037e917af6ca59e48e616412ae547192092972c5e4e50f5aa5a37" - ) |] + ) + |] ; [| ( f "0xdfe2bf1dfbf4bb11805a502eec3a269e4481275e23256c636de2744c2965c504" , f "0x06a2d259b6a07ec1f910b1cfc4d3e84afb0029ec8a797f0abca7c674d3ce6f18" - ) |] + ) + |] ; [| ( f "0xede5e7b2a149a4874e2c267aeb76fb15e34d65ba2a102ce15b84d8d4bf926e38" , f "0xe3d7fcfeadd5dc417ed719272e7d9ea192f97e2b3f188a12d73e938ccd693727" - ) |] + ) + |] ; [| ( f "0x0f934ee997ba74993cdad04e01f904d64532b158ab154604f82af70801765c0e" , f "0x27a8013a773c3bf3de118fc820e2ebbc52e19319508ed0b8bceeabe5048a6e0b" - ) |] + ) + |] ; [| ( f "0x545dfc392a014db2053ce3c1c42fbf6611e3948fcf74225ca9398cba6540fe2f" , f "0x43241e63cdb2e7b70bce94c421dbfa84b6bbc0110650bc3a1ea34636090ccb37" - ) |] + ) + |] ; [| ( f "0x134dbecd9e8dc6e0f07faf86190aed32d8cbeda65e13f75d10d5aa279bcebc3e" , f "0x9476fb5b909d8c1a19057c00d634622e45a63131a4801ef5f7f0e7de50a89508" - ) |] + ) + |] ; [| ( f "0xf560a35fc657ad8049e34582c6f41e8a5faa11d866ab208b2e180226a78d310c" , f "0x0b1748bd0ec4f3356641abe1e26050d508d01d8b2a4c72d063970221c41f791b" - ) |] + ) + |] ; [| ( f "0xa1717c9390345e1fff3c2ad2ac0890a0607f5a62390a904b3d7fe15359139928" , f "0x2ada4e4fb3b668c1cba02feb3da61784cd9883deabd3cea547587880462d6122" - ) |] + ) + |] ; [| ( f "0xc44d772ac6da625804305b462b94e548bdcb76fb712d754ea9525cbc45124024" , f "0x1737f5d55904c040eba52766d0fdfb59cd85c07b4d913b7e13cd19a65c7d982b" - ) |] + ) + |] ; [| ( f "0x9c9cd8923c9ff7c3902c02be3624f0c360c325dbe14241ee79cc21a2ebced82b" , f "0xc0a2af68dc3ce96c4adf7e1fafba1dcf99ab0ff6d87289e88bdce738f380462a" - ) |] + ) + |] ; [| ( f "0x2a7b8f73f4255d663cbf58db4d9eb7fa630d9520977a3da4a3fd7238f36c5117" , f "0x6296283bcaa815dfcdfb40914f7b8117c0e9e53599ee8b5a8d8b7f2dc92c6b2a" - ) |] + ) + |] ; [| ( f "0x929da98904d1c22c718752e3b83a30dafdbcc0d062c515607c466b3eabed082e" , f "0x1c0f0ce4a7687e3111f20a9c0f8a913b41e72a65e86c298f41db3becfffc1031" - ) |] + ) + |] ; [| ( f "0xdd9494771211fc4a661b32d5d11253f2c5502cc46df350c948093aa228a41804" , f "0x36a30b62cf4826ad66f9f96924f45eee45d5362d33132f9e6e71e8abf6e5cd00" - ) |] + ) + |] ; [| ( f "0xa060b08b318523ece150ec0afc0b4fc2ec6cf5c008c0eaacfd3851265ce5de16" , f "0x0641e334a4258cb4255195f24d7b4ab82324378f37fb715bbd1abd6c6ecd752f" - ) |] + ) + |] ; [| ( f "0x9a5b9f22e9546a789666d5b9100535d52a293a13f3de822a6b91e1afc528ce1b" , f "0x346c99a2163399cf2bfd3ce8cb28154c51853427e0c0dfefd1814d7b416cb110" - ) |] + ) + |] ; [| ( f "0x61650d0b0adbd9f81e308353c578dace2781cce134d16da2f735dc7e5c516f1f" , f "0xc88b5c05ce5cf4056f03a0265a4fe76f83c9d87db4e174bcd589833c539bc23b" - ) |] + ) + |] ; [| ( f "0x64e592d39a9efd866667c7c19a8510e17bbff06ee30fc3baeff9d27718003a36" , f "0x9830c10ef084ec1091b18549d1f308239551c2f62e45b05cd7a802427a13b408" - ) |] + ) + |] ; [| ( f "0x8756ab7debe0ea0a078048f91df3ea19d1ce231a2c2b88bad063f74292464c1e" , f "0x966ba0b4c15750371d0b098d1acf40a8a808c500ecc98bbf6e060a36ccbdbe05" - ) |] + ) + |] ; [| ( f "0x3f6815b9fcbcfbb2a388b32d517dc7a85579560645bcf711020b3d0bc389cb02" , f "0x921bb3301967573674421545470f0fbbe67a51d95759e98d9d9275a6816e8f26" - ) |] + ) + |] ; [| ( f "0xdbe4f62c6f0bd4c27999a77a3e920885a9432e70d4994f8397de5fa331350214" , f "0x54495ae5395aa02cbc9b49ab0f9b9550957a1c8277554b9b6e12ac2b6a79ca04" - ) |] + ) + |] ; [| ( f "0x69e8d3758d6444a58cdeb4d84a77a1dc55558280e967ffba0ac0fc68dd4b752c" , f "0x609e1dbff6901f6d905f32566c9b91185c3b9b9227b974af849de827d4c16922" - ) |] + ) + |] ; [| ( f "0x7813d38c25893b5aaf96a011d64dc9ba104790d246f58e4927d46959cfeb4d36" , f "0x74905ca75b9e86375f38c5230c2f6f64b3febbd2e448a60f8aa76de6090a6c1d" - ) |] + ) + |] ; [| ( f "0x273f439e005123b65ce05172f153179084addc9233cb5896ea4c9a2264a9c83e" , f "0x64d8df851b7066536724274e63820be4d2962eb06f98b11c9a44175dea22dc25" - ) |] + ) + |] ; [| ( f "0xcfa5db1da077bbd956ce6c45304696ad33fdff76c56ad8e843523b94d301f211" , f "0x4664b66bf0707610c291540920c0e689be5be3cf7d041b49d6f88cc6bdb73a24" - ) |] + ) + |] ; [| ( f "0x87e6250f4fa04def9ca9824113d66f5dee11693d66572bff654d12620166cd1c" , f "0x870590d553abcf7d848e7f4890f1ca00e7bd8da22c8833e3cd661527efa76324" - ) |] + ) + |] ; [| ( f "0x4c8ef85cea495a28cdae9d7c2a668621d8b0d77469d786cb0becb3dfba36e039" , f "0x999345ac8703f3a856619086fd27b661a0990de503112c907474d4a89cb58838" - ) |] + ) + |] ; [| ( f "0xcfd2622611e04888bfa2ea2f8bb781b84b9ece49171021b0725ebe3bbf833e38" , f "0x06db6aafc8f86c6f34dcc9caa3d75e40e2fcc79505b8e9b7446b0f1521532723" - ) |] + ) + |] ; [| ( f "0x61d45d103d15e82cb530b5c055aa3eccd8effba2e8b7b3b5c98f7deda5175531" , f "0xe94320e2455219fad0d0f26043d518e096be4fba990d7775a1091d394c2fdd37" - ) |] + ) + |] ; [| ( f "0x0b846fe9196f2693e65dc0c747f20704f34311637bf390715dab5d2b4c741014" , f "0x166fdda03e631913b5750d1eac6779d780e4ffa09459d40dfb6de246a1195509" - ) |] + ) + |] ; [| ( f "0x95e43d899f840394414dab99c5c9593ed9f8b1c9062448de4fff0fa2be40d628" , f "0x6565362ffa329ab7329de32284ea25bd761cdea4c18001113c386591dd68dd09" - ) |] |] + ) + |] + |] ; [| [| ( f "0x8fff07cc1be007056497a1f12ebb4fd8655c94fdfcf22d0020b654982b87193a" , f "0xd4ebe0a5b60ad7f05dbd2ff86d592fb158646de0a2ba5da5df40766059f56a0f" - ) |] + ) + |] ; [| ( f "0x82c829b5fe86e07a4169c2d9fc38ec218d54a072231c49639ac25cebf63a5d0c" , f "0xd4ca767a66e22628f69c6f35535fbebcdb6496cd9491eeed4b1745d24ef16d05" - ) |] + ) + |] ; [| ( f "0xd8e721a4120e2e60d6cf43807cadf6bf5001ecb77956b7d6b59507e9cc446d0f" , f "0x306a956a59b3eeaf3303c523de311656b0dd4c5f60b830e67718ae7afcc0ed21" - ) |] + ) + |] ; [| ( f "0xcd71ce1e36a5813b6686972b8ba51ecccff6fd3547168d5859eb8d445f78501b" , f "0x7e377ccda0ddbdead09b850079446b70df8ea914944ddbc0a8c8c09c2747643c" - ) |] + ) + |] ; [| ( f "0x9f1310e358f47bea4ccbf7eb1e6eb38ef7d932c05e2d5068da1936a60a53fb2a" , f "0xe127c1bd500ac5cf02c554b5421a98ae7a5e00a484b8ce77baae29a5cbdd7614" - ) |] + ) + |] ; [| ( f "0x7d4a2d625441f8b95039bf40d1aa7c5edd06c87face8fdb10f2504b97094a802" , f "0xb7c4ef3dbea87d6ee83de900a7b7e2c96da9dd8e4014321c5e6e7c090c49f335" - ) |] + ) + |] ; [| ( f "0x0dae0ad27aa9f409b22174383d7a5ca40ce48eee6a57d51178a1640d1b2c6812" , f "0x92fb82c40d10dc931e6b1dec456c262b94750fbb56b96027cc8fd98cc4969c39" - ) |] + ) + |] ; [| ( f "0xd2b38cabb9251dcea95c7ba04c5fb6b6d14b29e132403603d883ff26fcbfa426" , f "0x50e21e100e20bf37046abd13a65768b8c2840f00d6c18da9b448181b4dfe871c" - ) |] + ) + |] ; [| ( f "0x19132e6c4c214616158b6e8363152e34f73ac50973b84f6e680d13ce90417b1c" , f "0x72f37b56aec06ade6d20f9b59443fc1e4068a2d87491037ca8867a854a106918" - ) |] + ) + |] ; [| ( f "0x4090eb3744ac4d6e8bfeebb6149eb57cff31cc4f4b49bb7a294f55a88bda1b28" , f "0x79a807fd07629eb5441d2ddf6b9e5f1f2e5eab3d738d9debf701fdf5f6915f2c" - ) |] + ) + |] ; [| ( f "0x9d07b59c27d0f5233cdd5b9227d8300a00de3e8d3e1702421d60e606206aa511" , f "0x138b95f42887dcba9c1d17e94befed1394c2d8b4cfe6111910742a0e46cf4b03" - ) |] + ) + |] ; [| ( f "0xe654818c9c3f4398f38219ad0486bfb9787a6b5ad8dc771491fc04a020c74f35" , f "0x2dd3aa3de5c233ed846e3e50f08f260a31ba7c68a41cca80ec741140114fa50f" - ) |] + ) + |] ; [| ( f "0x93ad4107d1946f3fc94c82a67aad10c659b334ee5e9ca9dbc95c98867074df37" , f "0x2397d6801fd5c5875c265b8de0af7653d1da9f7eea9f21221efc0adceb0f013b" - ) |] + ) + |] ; [| ( f "0xcc393b85af25222ae8b843a44f06b6b0d17aa35f78e6d3039331934762495b0b" , f "0x6683c2663982798233702596bd72854366524c2373686e991b6a6e57205a522a" - ) |] + ) + |] ; [| ( f "0x71a15ca36d795223269e356924415a703f70a6d0b75ca5a0d707b42df7ea0e3c" , f "0x0f48e4e4790b15964c02cc790e628e9b9c5a094658ccb64fc1e6d07e9ccec032" - ) |] + ) + |] ; [| ( f "0xebad659efb5d90e968ca047a16ed53f22b3ef3d708a4db6cbcbd0e7a6fe6220a" , f "0x807a7d51c787563cc4dd86e57ba4eaefb22b8075223e750d9c9cc84abe811b2a" - ) |] + ) + |] ; [| ( f "0xd9838d914a9b73486c4015d0c33eeb487ce9f9943256c1ce529853db4eb6cb0b" , f "0xe292aff950e947f461d8146c69ece0594aa262346fef8e22c7da3b2fc0dc4512" - ) |] + ) + |] ; [| ( f "0x4c10ab39d56db4ad37deaf3862a39a4484e367faf597ed89f9cf7ee635cf5f04" , f "0x8291a0d3baebb89f6bbf336a1a55ab43a3334ec3ed38d05b80be918e061b9d0e" - ) |] + ) + |] ; [| ( f "0xcef06ef061e9498678fefef3ff6433928e5d1792d64d9a03bb1f4628bf53ac07" , f "0x36787f97127dae435ca4d9961de5d40a8e3b57bec3f4a2e3a4d00ed30171e701" - ) |] + ) + |] ; [| ( f "0xd35e9573718be3fa6d007f16c5e892a0eaa00ea21e399ee70784267c1a49bf3b" , f "0xa1567c111821424f1b67a3647de6859d9ec15c1caa72a9ca2231cdc1d3761f1a" - ) |] + ) + |] ; [| ( f "0x52cd552bb532a423ef9c47931df3d45671d6acc33fedabefffa027be517d482d" , f "0xd97ed5bf163945d9884f92adc35e8436979924c41a1695cce6ea5a7c65a93526" - ) |] + ) + |] ; [| ( f "0x756f75e84538be18534fd25e2404f626371969bea9f8c652b80993118a21f72f" , f "0x858d31db7dd204192c4f32506c014bcee923bc3aba558f9f1a422753b917e618" - ) |] + ) + |] ; [| ( f "0x0b20549b6d4af980259a5a70fc1e3f6f40fece5a1224bf40961cae2894a5b800" , f "0x9605b2c282e4593a09abbf5d0a836c8e88655fd808e8a65d41b63b4fd16e3c02" - ) |] + ) + |] ; [| ( f "0xbf1badeb3766f8171be36a7ec943847727cf5f75a82b3313d58d4ee374bf2337" , f "0x28a2528b5f88eff27bb51ca93151470b4e0423191119d9ca07d3e94c3763b329" - ) |] + ) + |] ; [| ( f "0x8352dc9a00766d3165986b376e93eedb065ef19170c3b71efed5092cbfca9c30" , f "0x5e0a4c187c53534a1bc72b88fb9a098f79a46e62cf81d854519fca8e59b88d2b" - ) |] + ) + |] ; [| ( f "0x031a53b87e4c4060a3ee38a07517393ce0c18323eb88b648e0c98dbcef293236" , f "0xbec2b221c960cbf17dc2025cfb50a17415e6886335a6e2994436585038167b3b" - ) |] + ) + |] ; [| ( f "0x17f1fc060cd137926f07942f28bfd9789af89835cb7b154b2c4bdbb8178d5430" , f "0xa6f20d2a2780f444cc93370272c8562b9c0824f97a70fec7157d398bf02d761b" - ) |] + ) + |] ; [| ( f "0xe193941bdda17379afd05fcb4b128a6dfaaeeb1bd26da69ca0c6429a2c23e23b" , f "0xe9797e6cf4862bed99afebfe4fe9b7194d4458476c0368c2260cbbc5db27e932" - ) |] + ) + |] ; [| ( f "0x7e2c405e9004ce6f67aae9b737f04ff9f9b0354c91dd5a4db12bf56d8c830e0e" , f "0x531838e34757f682b7b5e242f7477c68c21cc4438fd50db3601623e97d343d21" - ) |] + ) + |] ; [| ( f "0x1dc156a48cfa35b580fc0f19bc5e028a7a66f341cffe5b11201aa6db7baf9827" , f "0xab20a8e51b2e90e1bdb03c0fcd487271025e5ba61493f2ded88f6375d830c708" - ) |] + ) + |] ; [| ( f "0x1ce882fa21b3c9c85a6e3a83dea4c5d48bc92eef8d4d1c9324d20a738f8ce30f" , f "0xbecc092e5f4d7d224c883f2bd9fd24ff8abaa53c26084477d17db36d08f78118" - ) |] + ) + |] ; [| ( f "0x886662b47d9b4cecd3cb848f934e19558591a1645a8c043c2cd75c7c4cf94e06" , f "0xa5ed4f91d2fb0a990377637c4895a128e837c0d3b8a1cecb6533a9784796b81c" - ) |] + ) + |] ; [| ( f "0x3f44a1811439af6d9469cef56fb7917c9a404084146531967a0134f915f30f03" , f "0xbe0521025082dcfd3bcd3c6d3f2c9ffa5ba8b040d4d7529468011d58c80d2b13" - ) |] + ) + |] ; [| ( f "0x909ee676393c53986bb6797349f724d4929577b42113790bcf62d05330f25a0a" , f "0xfbcef4d80a6efa3c580b60f8b49ad636cf25bb89dc3e46767a78b073c87a9121" - ) |] + ) + |] ; [| ( f "0x23df416c5aaf94452ec8a0df4e2a227956a6b8d355372f09d5654202477b2b06" , f "0x46f1f7e120b67b3f2b335a73ff64d492daba8b8a9752f4d591e683599aeb5000" - ) |] + ) + |] ; [| ( f "0x32244843dedd5ae6ba9ae8ef7da9816085c977163dadb9d9e4609de2e6faa812" , f "0x161a137839f311a8db850c22424946c1cf7ed1a24a007bf0d6cc1c26855db61e" - ) |] + ) + |] ; [| ( f "0x6d18435d91ac01111663f641244e6b921925b34290af9d43e6c0abe11a38f503" , f "0xbd09ca5774455bbd5039083aeb7c400fc58b2dfc4cbdeddf89e3d529a96e0e11" - ) |] + ) + |] ; [| ( f "0xb5791b1d06c84536ecb0af0c5a1164b6d6b309c3b26853182f8500e87426dd2e" , f "0x86e4fc36e71d927d12438985fa19845092ffaadf230afa7f72bad4645a25dc34" - ) |] + ) + |] ; [| ( f "0x1de7230fd998ba002a4ac3d931b37ce349f1448a1d8d43638476918ac0ff0137" , f "0x7d7b689c34e3bc36ee136cdebf1b40521aafc4e570f71cad0ed210aaffe2e71e" - ) |] + ) + |] ; [| ( f "0x39ce5d792e7337feac08ff4080f0a9821a4969a405b75bf4f94f3121eb755811" , f "0xe0627dfd0f99676917b90d48e923b103f544e0bebec395ba588230882ad6eb1a" - ) |] + ) + |] ; [| ( f "0xdd23462bc3766cf5141eae32162222f1f46ce8e8c4919206beaf59d1f230b833" , f "0x4c70cede897acc30e0beb88c6a0735ca7e1d3f854cb81672576256a93fd60e32" - ) |] + ) + |] ; [| ( f "0x1aa42d10d270614a56cbd50a4b4050e7c7bc13c1084539604bad76d2937d830f" , f "0x2bb6b014cb6174787f871883198d6e4d88706e21464261037ae93e855d261d3d" - ) |] + ) + |] ; [| ( f "0x5c94ea81df6421023b52f6dafbd30d7feaf6cd807a3dbe09aadc3e5ea1945a08" , f "0xc2aa62a7eeae9a3347cccc529b5ccbc160705ee1e707c017e58b29a7187efb0c" - ) |] + ) + |] ; [| ( f "0x44746e0467607642d72b4512d4839f924e42970729fdb9287a424b3247dd3827" , f "0x572572af4c9159bdfe3d5ba3bb236944d3a049fd61837ce31006f9f67310e132" - ) |] + ) + |] ; [| ( f "0xf0ea247a1c5ec68a9862f216e9f2bc098c685898ea1985fcd609f9741b372513" , f "0xf2b9d8267463cdeaee9fa40e910ab2b2e4e0fb3ffafacfe19220a03b29e6db36" - ) |] + ) + |] ; [| ( f "0x7f9ae1074622235507b1021ba2a05d0ed8a4e95fed8e2b4390bd8ad72c3c883d" , f "0xb33ed986e2f712e457f99d64d5be4e3354f43890d390b749c1e335215f02eb0e" - ) |] + ) + |] ; [| ( f "0x7ed5a8d84e7f20d60864f2de70dc6360bbf9d029803528080a0f37b86c706024" , f "0x1d589eb07658da6745d3338a527d8d7d55dcd84668cc311dd7bc36b4fa986806" - ) |] + ) + |] ; [| ( f "0xc6267c6fb40e8f33e2392535ba8722ac7731b428fefa36e7795f7fc8149a293e" , f "0x394f974b3c94bc9e640064522007d67752dcdc15992d1b60e79f685759543332" - ) |] + ) + |] ; [| ( f "0x8588093e8fa50ce4f0a2bca2f49cbc3790f60f37072025dacd5e6fc1191f1101" , f "0x10b0e9576dc6a64e23b4ce3f9d59cf4091037a798c7d3e141f3c8a14d146722d" - ) |] + ) + |] ; [| ( f "0x4a0bb5ecb8948de900f5fb041103b117510b413026f345dfd9cfa3d3e2af8e1d" , f "0x49524771f94d78263e5d40bfb71bb4e06e266c07107d9607d3d4d4a18cfeb51c" - ) |] + ) + |] ; [| ( f "0xffcc72a5e20cdb725630c7e9060b4b22c8cc751727b1b1f2cdf43d721e228317" , f "0x690a13a878976f01d5edc6f011f62578a25b49c4eb643e0c5a87dd9e673be13b" - ) |] + ) + |] ; [| ( f "0xa6e99cd9db8824c459ecd62ee864b8135abd70a91ac8dbf591f554514f8e161a" , f "0xf8c1c6a4b4b22321d7264247a403f659c6c8f1974afed47cfbaa596dfb7b6614" - ) |] + ) + |] ; [| ( f "0x0200a5d54c088c7b75540c7727bc77d62216582c1cec643d72f6684a1d6b9c21" , f "0x53ccc50b476ca9a70c875f73becc08001ea001e247558df6ee7ef3d3ea5da41b" - ) |] + ) + |] ; [| ( f "0xcb9e29e5712d4a3f347a7f1c775cc475dd7312983a2aa3f8f5dee6217621500f" , f "0xac7477cf5870bee0ed5c35f23e61e2f872c9f3e19f9121ee66bf6406301ab734" - ) |] + ) + |] ; [| ( f "0x9478b5c5f6f09a090bd0d1f59c340b4ef4c3007241343d49565d968f39b77a15" , f "0xbf9a542a4258d9e1d3aa4aa5f04f2bdf2fbc789282e759e4958831338b2e1e37" - ) |] + ) + |] ; [| ( f "0x0f938ab1af3604b847edbe18c0e742d1ee2565afeb6b2325408fd2f05a068c0e" , f "0x4842d71f7a5e1b73b339108d21e9bd9a1b723362512f73eb8d6f67d06d456a09" - ) |] + ) + |] ; [| ( f "0x28fcddd1711b96346ab17a8e5795f031fe50700150c91c3d506a3a7210573e06" , f "0x951f01656166adc80d4a6987e18ffbe8dd154a16364662a4308f8c756e280315" - ) |] + ) + |] ; [| ( f "0x6e7ca6a0444721308bdde067d75bfe6daa7602dc9659e9d7be34d4e2ed2a1d02" , f "0x9c5034df6b084bed0f31669874db752b97d295cb06a7a7bc56385d55068ebd35" - ) |] + ) + |] ; [| ( f "0x1556d3673c6d4c5ec0dc2a6920cbdc9be61cdc6b2742f273354ee41b47cc5b24" , f "0x53f23f8698970f1089bb56d6ed478cb75ba6024b8fa7baaf18ec3fd47e3dc02b" - ) |] + ) + |] ; [| ( f "0xf13510e07b0736deb5d6729fcf963b4d8a6d4560adb53ebcdbdeb873fd91ec03" , f "0xdf7351a2aa429b789f7c3f896a3ef5063d3a0f1d6b1e3e0886686d6ebc9aa017" - ) |] + ) + |] ; [| ( f "0x3b757a69151e69df512a0c8d75cb5f7b38b58b547af99c8dcb2a6f00729fd405" , f "0x39b0fb61920037eba0b9150a6f3c401f26c397a22aa868a9a5a30205763e2c35" - ) |] + ) + |] ; [| ( f "0x6afbbc47717b7566554c05165072afa126465416698839efcaf5bddefbaacd31" , f "0x1d6507810c573180864df7ab11b574b06b3dd3a68a4cf6ea3245d14ef683f639" - ) |] + ) + |] ; [| ( f "0x8238e672ba34e764e37b59eaed174fc212195465f283a7c9e2a685910413513f" , f "0x0d2b75f44af68b49f5b7f858c44673bef0f032f2dd2c261f1ed94b793768e215" - ) |] + ) + |] ; [| ( f "0x1b9d26c48c590fb3b99516b7e147507e57f6234257b268488f080a2a114d2011" , f "0xc4065f1636351230ff422096bddf4beeb774f7f4034469cbce81387e26844017" - ) |] + ) + |] ; [| ( f "0x67326d8adb4627f6d08f816d39c96ac807e50c7e38dd44e59510a7296d08c82e" , f "0xf0f7f0e41e21186be01e61d4889a196bfa01d1d063173c4588a58c3ba4305131" - ) |] + ) + |] ; [| ( f "0xdb28da8cffc61f573fc573e6938c997a62de755b663cc13f79ea891ea3bfed34" , f "0x3f963e204ed08dde4faf45450af5c1414a307b96c7f8ec72f00f9740c522a839" - ) |] + ) + |] ; [| ( f "0x78db65ce85d1874229b5e3f078202afe2e8aff913cd6c0cf709f6fd28ae4a002" , f "0x6ffe02a50e23ef02bb7c068bf0696e29a060d8238902af0199dcfd882594d627" - ) |] + ) + |] ; [| ( f "0x5825204f38c68128f15913817aebc301e37a5ceb0897c2016b426262a02c183b" , f "0xd1f051b166a2bacf47b6132b412c42c7b2dbb6c95b12d0dd9463a3e76230cb27" - ) |] + ) + |] ; [| ( f "0x1c0cf54385d93f300709989619a23f7df370eb9f76ada3f1edb858eab2ecbc0d" , f "0x7b9e7efba523e29994dca8445f8a13da6f3b3a89240798d8a28717ac6878bf1b" - ) |] + ) + |] ; [| ( f "0x1726d2e59368f1ca24643d3ee57539eb7511761754fefa217c03abf4bb115c29" , f "0x0bec53d1b3d3218465ed9ed9cc77dbe147565b762e0f874c8fc9882f049fd431" - ) |] + ) + |] ; [| ( f "0x82471e81d949b4e25f2acc6e8c2a80e1e11d2b1b8b7e4aff0aaa3c6bb045353f" , f "0xce3fee0218ca7256f4795de623707574cc826f65ec706630cf5aa13639a9e80a" - ) |] + ) + |] ; [| ( f "0xd555ef85195f6e23c8d1776c7bcdcc914e405d57fd528c9a680fdb960598091c" , f "0xcfb12ee164bc824fc6f6764acb167426455b75942288b709381c9bdefe9d391f" - ) |] + ) + |] ; [| ( f "0xb5a8200429d76e8882be9b08a87266eaac115562cb6be39cb36ef28887fe6e33" , f "0xd20bccd6f1960b101a323987fab8600840e2e30d17d56e1995011caae8476110" - ) |] + ) + |] ; [| ( f "0xa361a2c33b847c39f3d4691cde85d1348149c19e5b29e8dd6b8ce7d1dd2c8529" , f "0x12befa978949fbd94d3ba8936162da4937b21bc2ce7441cf5e749d93c5bfcb3e" - ) |] + ) + |] ; [| ( f "0x03e59963858be04f192ca26ae09aa6bd87b3b3fac17d0bc70b16fa7393158d24" , f "0x617b33174f2d2841fc74611936d53d82de9a2f5e6f9e47e4e5ccedb091bc1d16" - ) |] + ) + |] ; [| ( f "0x9cc07a224dde9810c184a0c371a621b1cac5ea3b52ba07131699760aa1621a10" , f "0x3cd0c4b63ffa651a36b50ad57e20240e8dc219989b8613d9e64f5dbb22395a1b" - ) |] + ) + |] ; [| ( f "0x6091ff78520e4ebac81254ee94282f00844a06a53787270e4a6df725e4de4410" , f "0xade237fcd9087916de81c284a4efcb42307dd9cb93388f391f855c06b82f653e" - ) |] + ) + |] ; [| ( f "0xa4caca44ee33ceb19592a5290759154733750e69d49e816ba508a8b739316017" , f "0xdec2cb5c218c1b715c825b6a746e7877ecf872ad0c2651e78e843d59bcbe3c1e" - ) |] + ) + |] ; [| ( f "0x81e3d58ee0b4e0ff618b4783663c1a1e05410e34fdad3a03aeb5aab993ada533" , f "0x516b902b66731e57af54d67b3ce90123b3e86041d571439d97d8c08a700d8702" - ) |] + ) + |] ; [| ( f "0xf230e5bfe5cdc57b02a5c6c2298aad2ae4ebfcded0ee5e6f07913d86a610e235" , f "0x61bd800192ee4ceac775325f9dccbdf37936a1ef72ec3c7a79528cfc6bbb3719" - ) |] + ) + |] ; [| ( f "0x0a002a25e23accf5b74c5f3ab83c9bbf99217d15bde0b210c6bf7fb3b5683b19" , f "0x8f434797d28a14ae77a93b51359f43a77b035fed4012c0fb80115cc882bb261f" - ) |] + ) + |] ; [| ( f "0x5439a165f978d7171017c84686c58aa07b95e29af37f8d2b3c679d515bc7c30f" , f "0x3ebf11bf0387deb8f0b2daba8fb1d3263e84f1f27175896878feadfa3e1cd01c" - ) |] + ) + |] ; [| ( f "0x0df43462163179a1bf59929773c8db31dc0ce06990611c869b2b3309aaa12813" , f "0x7482c0393a63c2c80ede302a44285a77d12f3571b22c8cc466afdbc35ce18636" - ) |] + ) + |] ; [| ( f "0x2896896a15cbf9da1b3afb1cd9d002f3a648bab45eabbb85c53d81820105f11d" , f "0x7d5bf9e176cb0a3aec982e3aba7d2c19487f6fb67f0ddc9ef3d4b0b30392e411" - ) |] + ) + |] ; [| ( f "0xeab86ad7b586a4704e5faa557e20adc72ac0f6a3d59902d5ebcc200ae59aab36" , f "0xaef3266867555fe3551556519cbe7787a7d1676ac7ec11548b5de1d9e43ef439" - ) |] + ) + |] ; [| ( f "0x4312b6e600a3b8cb2e776d8b43a5d6a9e3605edcedc78fe911e3a16fa2e8c607" , f "0x1593bc720ede71943bec4dc57a4972267c1a80e66f839059791f48b1b9192c05" - ) |] + ) + |] ; [| ( f "0x38cd33cad53acb7f2bd3cff50f57569ea495b6b3da1906a1fbd6ab82cd05931f" , f "0x8e0cbf2c04f06efa4a00f28914d886155be570b67fc49274dce684e016a6373e" - ) |] + ) + |] ; [| ( f "0xf573704cc1c84e059b8625e6974d09bf5bd6504e1b6bdc0ce5c54b126326f116" , f "0x4eb1397bb81e12e9e90b1f1e7d74491a6be768cd4b43c75921d5049ac1e19d34" - ) |] + ) + |] ; [| ( f "0x42c0c3f02ddd5f1acdeb8895aa2fb2e186df65bceca3f0738d3ee65b2ce29427" , f "0x3d590b4f721fa38fb90bb88e1a8a683651b454f621472eaf8ccc93ee1f8ff30f" - ) |] + ) + |] ; [| ( f "0xcac35c470098365f2efe40d9432297e948c35aa3d65df989f0eb2c3e55430a18" , f "0xdd6d2de5d4a335b49d678dee81b36b6c9cb476b4009429293191660969710811" - ) |] + ) + |] ; [| ( f "0x41ddc184428f306542fb32ebf7767be6fe98110f55eb4b0f431dd3a5411b123c" , f "0xc1ec76ba4ba38549843f50ef70426f159ce324e5b944a2e6816ca0f5aeec0822" - ) |] + ) + |] ; [| ( f "0xa13e21d78d8ef753088e36cf4e5e966b366285fe64138bf79f8e1c081c380f2a" , f "0xd3e0fac43de87bd94adf126d48f41a5f0058777676f0fc12ea5c722b5a64cc2b" - ) |] + ) + |] ; [| ( f "0x6113cd7e9de3df039fff4adf6ae3570255e60a05146435e995a7fd6279e5151b" , f "0x7b913c20e06083031f00e61151acf7fef59fb5c6ed4eb633b6510442e453040b" - ) |] + ) + |] ; [| ( f "0xd38d0485ce0b5345cb4b3a65d3a52db0177641d35c15d198d8176f751603ca17" , f "0xbb09d90094f55fb64c55af3f160ef423e6d784f217e7e01bde83cd6fb6690310" - ) |] + ) + |] ; [| ( f "0xe60a53b69af330c8efafdd7c5125384c5b46aff2308fe104603a6a263deccb0c" , f "0xa69c313a79197dbaee2f0aa0d95fa0e3c7ed13a49d93f648d1035b18b8544b33" - ) |] + ) + |] ; [| ( f "0x74dfd6dc9773e71153f580ae9886c0a9557779f36429f124720d2c6d45f7de02" , f "0x36f7588930c6dafb2dc65f768d239c94fb09dab378b27c0c8a2ea55d4b89991b" - ) |] + ) + |] ; [| ( f "0x4b89bdb3c673d716565cafa91101d700915e2935cd6b4054bc9d6be590a17032" , f "0x9f7bb6b3db7870317bbe9a7acffd253935d6fdc9acaf134eda9303bff26f1001" - ) |] + ) + |] ; [| ( f "0x8726e827d7ce0ccc60ce2269ffbdd1d35278da67d5c2c045a163bcabe0e0c70b" , f "0x9571748bda73c46ce7f55309dca92f063a80f2ff15990d8501420f616f4b6b3b" - ) |] + ) + |] ; [| ( f "0xed52972dd54cfe475f86e177e5f656178062cfed9117097598592e2c6e129a32" , f "0xb269225b0085fffe4a38af5ad33bf04d45da3652b43108453604a93747c88f22" - ) |] + ) + |] ; [| ( f "0x5aa1aa1c576035c04261c91322d5f5879e8b14e2ee18e3a421febc548a6bfa0e" , f "0x0a34a87a949652a132d370f92d0477929b9bb23b1fd233cc021f45103f92f436" - ) |] + ) + |] ; [| ( f "0x31dac049ed5fb1a5aace10688b018fa4daa7afbe66ced54742bc1557c655113f" , f "0x4d40a3871fde4dda5b2d2300acff7f328b8603b021ae3f8d4212bc2543bc180f" - ) |] + ) + |] ; [| ( f "0x1ace2370252825b911d6801e9b5bebc16a1c8745773b4fc5891117dd90c8783a" , f "0xe3c7318b2ac6b1623d6e8edeee6319617ad9175034e158a8b0f278ffeada7e20" - ) |] + ) + |] ; [| ( f "0x4c057fd69b9f734d467426a152493c886648ebdd7d080249deedc3e010fee23b" , f "0x2fb1ced9ba7bf34604eac04d94e8f77df060f96e89367ac05e456059bab65609" - ) |] + ) + |] ; [| ( f "0xc9e658a986a64bde8f9fe6815c92ba3ba6022c7192869724c6e1ad84a991140f" , f "0x7cfc5eca8f6fd551e85704942996cd4a4568d1edfef092923430203fee085729" - ) |] + ) + |] ; [| ( f "0x2f4a391e8b7522355d5356364928b8b4bc02d967f1641fc84ff90aafe7aadf3c" , f "0xc1b9a95cdad1fda2e42fb286a2014cda317a985d9bebcf869329ac027a585b2f" - ) |] + ) + |] ; [| ( f "0x6b89823ee2bc4eb6eb75b85b98b44e6d9a61433c35041d8521e0335820b7b232" , f "0x7e5606f328c1c9af14eb1e387456bcd8754c49bacc5e3db321ad590c54cca21e" - ) |] + ) + |] ; [| ( f "0xcbdf87bc54f44e51577c5382d1458318a4efb4cb891eaa816768f96cc1382105" , f "0xbc9a68c84ad860b5b2f7a881b1091e5598f6d1196f6a3119e2eff75c7c9f403b" - ) |] + ) + |] ; [| ( f "0x6125a43f423bcddc51c76e680c9bb212a3fdf50fe448a1fd99d6ed3363b7890b" , f "0x933ff80c5a3c2135fd82d43f59e2d96168d033ee8f3c3dd8dcae5510727ff93e" - ) |] + ) + |] ; [| ( f "0x648597ecca0b8c34912f041039271c7c5c8dfde1d5c046bd8412a6897506731d" , f "0xa7e5161bdf9c6af2a23d6847b4ace4b91ef3734fad99ad3b163612bbabf3892f" - ) |] + ) + |] ; [| ( f "0x45c7993eb701b5e9ae6aa78a6ca083b4490f13e25d807f1dad5f70e6f222df29" , f "0x397bdfc96b19f0b04c024e4ae4793052604c24f3d52bada1d85dad083e3af11f" - ) |] + ) + |] ; [| ( f "0x8e30752facb6897260292c950ef8576889844a8358af7be68894eb094763dc38" , f "0xa8445fb0882c222f961e12b8ef5762495ac6e35b64d88b7177caf09efbfed210" - ) |] + ) + |] ; [| ( f "0x8c49a6b6f3342adaf212af9205baa6aa1cbf201e233e6544ffa295c73d968135" , f "0xfa18da52b9590bcab1158e7e65e49d2f6c88c947f13d479b7c6ed6109719e30a" - ) |] + ) + |] ; [| ( f "0x3f79997395e36d5913d76bb3e5bab0af8ac258f70f42ae4103b08f4841b0833b" , f "0x4cc62eb3dbdae91953596eed1f87a9dedcfa0b0ff7cb629d0806c7223b0c9c24" - ) |] + ) + |] ; [| ( f "0xc57321e6a92950c8b0b65670bbd3592b921d13c4b2b42bddd56bc406753c5520" , f "0x419f5604fae0711ca201e814e0befa39bfaac2b88e719c0f58cd2464553b1605" - ) |] + ) + |] ; [| ( f "0x7cd650aa5297c31960ffddc098370aac0f9a981e8e2d17ffa8175eb4e0260116" , f "0xcb391a5390dcb5e818e3eccda7cfafc6e3aa3f80fa6d49e8ddf7dc7d8969041a" - ) |] + ) + |] ; [| ( f "0x2d70dc1e43ac60c1c7eb27a9cb8c0f71773ef065c594a69e955b003c12f1b209" , f "0xb8956f46bac40cf714f3c988a3e7dc5bbd219ec2bc0ab8c52e0815a92819c43d" - ) |] + ) + |] ; [| ( f "0x6d12dde4490932b2a022c67f40aa11200764d7eceb921d33472eabda0e371218" , f "0xe288292e697e7ff4df8d542c4f9fcfe0963d2e435a52b5ed3f165983da338f24" - ) |] + ) + |] ; [| ( f "0x0f7109fae7e0bb32fb657464323850b05be5713bfa5df046920cff5dd5787a30" , f "0x21054f6df3cbdbcb15ab23e8e5ed60f81956f3ced467b90765778869af254e13" - ) |] + ) + |] ; [| ( f "0x413add315af80b9340afe8ebcadca759995c0baa8b32ee87ba4cffa057823c23" , f "0xab4cfee7406f20abee23a805234a0cae6f53dde2e0c060f530d14b8f918ca216" - ) |] + ) + |] ; [| ( f "0x1129ae4f60c39f5cd81871ec049bfb758f708a99bef909e106b70da4e6165a28" , f "0x99ac18dea2eb266d6bec255f13751c7b1bdbf17c9f59bb72f10bfce1caa7102b" - ) |] + ) + |] ; [| ( f "0x4cbb9a0a88f0f010347be0447558a176a65569dc20cfcaa7a38c9a7d76efc52a" , f "0x4b384e7e9b6044bcc48e0e274e9624d589824afda68016491301e5ea1d83a600" - ) |] + ) + |] ; [| ( f "0x78ae543578a2b2d20fc8365e15683f453732c08f988bb5500351476804f5362f" , f "0x3e05aeccd614b8f590e3348089885bd34210efebd7bbabc4ac66ff0206182a0d" - ) |] + ) + |] ; [| ( f "0xae763250c0b7a94d3b1807b61215f23ce7f15df17fa29b901dc57d62f2ecb42a" , f "0x46a3352a24c1ec7e1ff892f0dba5c78945f416819fc1f14ebaf09d1685128029" - ) |] + ) + |] ; [| ( f "0xd2bd5e7aa4770d9bd77de43b600061ebc3d8c0195ce74c716032e8b4c9778e02" , f "0xa3266859b5fb1490b696100dcecdd7631c95d50448e3bdcb4c2cd44221b8b43d" - ) |] + ) + |] ; [| ( f "0xa0b4d180c0fd8ecd8c46d127c7146f373056832269eb65349fe5019923a2801a" , f "0x09832ef381da9fcea3a18550d2839d690ad6eadd8e76f146387f5ef7fd363e3e" - ) |] + ) + |] ; [| ( f "0xfb75386a42468a8d4c3533743c533af6ae4d281eb711ad4d12dbd74399a6bc32" , f "0x987d4edd082658fbee718e01b70af1c16df73622f265fa448573174b2b6a0b08" - ) |] + ) + |] ; [| ( f "0x67f9cdfba444a2471e28abcbfd399fc6d0ef792f54dd838d00f99312b7fb8127" , f "0xecd86484f4534bb8e37ee633e00c6de0251e2f10f5aa069ac1329ef99857ed0f" - ) |] + ) + |] ; [| ( f "0x3f1ada7102f99782151a25a33231166bd4e227797ee0faf38519edbd72e16b16" , f "0x265adaa9aceb224801521ffdbe74011548888fb21e84bc23e2c465b24508d70b" - ) |] |] + ) + |] + |] ; [| [| ( f "0x8d23a4dad04545005e0bc76064f22ef78022671777a7347d604c3cb67bc45e32" , f "0x1e9fccc5fb84e063b4b93d0af056e73b93f7b6d7396982c2bc55fc9d01080909" - ) |] + ) + |] ; [| ( f "0x040a5ce45972faba5f11e50095d7bad6ecfb2743f51ca6b83065916c7a263e04" , f "0x0b05ec5d43467188cb5852bb04e45f1f843d55b44dbf07270f53f7c96008ea05" - ) |] + ) + |] ; [| ( f "0x30ad25750f8c953d88940b7145b0b952ad91081c513808b00dc076c2544c2736" , f "0x4efa77bb3572be362afc0e33e4dc9a44752d9f4fa2ea97e01528802c8c66d133" - ) |] + ) + |] ; [| ( f "0xeffc7970525c69715492c2fd89d648518d2456b27f0df861b25a8df5f98ff715" , f "0x205ced0a4b1fd0ff0f10ebb8e383ce130e259d0ef12a236f7f962c1bf9da351f" - ) |] + ) + |] ; [| ( f "0x858c38b8b70280f7353183f9758f6a198da54bb343cf68eba8ad9e1673d4603c" , f "0x98cc0db0cd1ce3e9c0b0bab8d8a70d57fd531d401318eb7ecbd39aed46ff3404" - ) |] + ) + |] ; [| ( f "0xad8542607a8a028852c32396a89a9a01f3d9cf1bd5ac411b7dc0346713a9c824" , f "0x59870b6e1dc401c85fc8c550b803eef94f2c5a180e19fc177b76b177171d3e0d" - ) |] + ) + |] ; [| ( f "0x37b8b208bc04b773c5d4c690540f48f8c5b465aea6ecfccab9d71fbf8f678b14" , f "0x3d9479e71976f18ab7f92169340ce1cae9f043f23240e4932a260fd2a0f5a108" - ) |] + ) + |] ; [| ( f "0x01c7a17d9bca54af5269d85015f1593a3102d8ae1238b29e335d6779ba558b28" , f "0x8c8451e25a5cc499bbcfd99fd9f9c595aed44a754084eba15b74c627688d3f0e" - ) |] + ) + |] ; [| ( f "0x2071e30c16ee58cc19cacc3f222c7753fbe6d28c7db13ee679997e987d064f08" , f "0x9307f35c484731722064b1b8cfec7c278db2426c6affc4cd94079f092f5d4701" - ) |] + ) + |] ; [| ( f "0x710c054b52c4f18c5cd7588dd23a3b0e88a6149fac6b739f4cf8386efdd2a831" , f "0x95af3d4b183f4158cb4a4e298a0c2fb2debbdfc3c1faf484b4ce53fe1ac66e3c" - ) |] + ) + |] ; [| ( f "0x650d07d5f41b751aaaf5e9c272631c20ab6cea2682ace7fba9f35ac73bdb5a29" , f "0x9283c6d2e01916a77d86d445cff2c2aa6d9d8beda65c03e91c46b5c46655680e" - ) |] + ) + |] ; [| ( f "0x0601f13c041a5a3f786a909706cd49a7023129d08e8f82035fe30bfd43131602" , f "0x7582341a7e28875eb20fe42464b90416a7952c6253d6965765facb44c791e12c" - ) |] + ) + |] ; [| ( f "0x3f348718f00ac413a940780f4e4c37f6f71530a5d88c9d53f3fe120ed1542e35" , f "0x8991e7650bfa7894775473916154e2ff4f29beccbb4666ccb6b57bace90a0b1d" - ) |] + ) + |] ; [| ( f "0xbfa1cbf722238571df1b147163ea4173f18abc2320ca3d5781d3a2ded665162d" , f "0x780b7edcaf9f65ca320011cb53f2339b1975db5955315b75a944e4c3efe52110" - ) |] + ) + |] ; [| ( f "0xae57faf7150c4a1d2c33e75299119c390def7c94b064c1bae536eef818b7e634" , f "0xb5855737e07152923880a3e15d8f5f63ff529021f10c6a117bcc7b3f5ce7601c" - ) |] + ) + |] ; [| ( f "0xab9ebe35486bc9db13284794653f72624390ef50ccd21e828aab09059bce8f0c" , f "0x3117fbb1dc83695aaf73078bec2151978d7b30d73969521652bb8579db6ab732" - ) |] + ) + |] ; [| ( f "0x478355f7aa00bcf126ca85c9ffadf247f22a689da8831f22fba8447e8ffbdd34" , f "0x056e48b44f7222bf9a2fdf8045466170fe3e33f3b1761c771e24897d5fcccb22" - ) |] + ) + |] ; [| ( f "0x0aba39d8532e0d06eb8a159f285f96ddd2dec2687970683a14f43c7016029f14" , f "0xf0f4aacf806c2ea1ba17f65a5bdf376de439cc68d086316dcf409728f1c01e1c" - ) |] + ) + |] ; [| ( f "0x7dae47b23388558114f1aa73dbbc812873ede7d19adb5d93ef8ff58d02345b34" , f "0x68a75fe7a10caae6cd5f3301620e33085529121c2753a965ba34ab7fbc239722" - ) |] + ) + |] ; [| ( f "0x7498691d8823f8f460e2858ef19ad5bc5baf07626ac95fe55e25fdd3919ce825" , f "0xe9c8e331855ef9c1049284e1fbb76ef6fcae050fb3a11f28b1eb4ccca4e11721" - ) |] + ) + |] ; [| ( f "0xf87d7dd4feb10708becb4ae06acba8410cad996c9d935f79b0aef95e60bf5505" , f "0x7022f3d67763d73dc321bb574443a9a45cd94f35a52acce091574c555691be02" - ) |] + ) + |] ; [| ( f "0x8a5a8ff424d906d635a360956d33ab85125f6b369065d3e4344f8358dff5da38" , f "0x969b929c201475db40a209fa893abe83e35b824f9e6aa6c15728db2990355b1f" - ) |] + ) + |] ; [| ( f "0xd322a9225670e1956dcb69783f4e697aaa44e0505f04b7c7a9ec6a6b320d5b2e" , f "0x13d8d53cc5f117ff49bee145796bb326a0a96182c891f4b66cbd5b464b08321e" - ) |] + ) + |] ; [| ( f "0xa2ff722f9cf2b2ed391c3ceb5aad9ce81f4a38dcb527f6ab9e61d3a5996dd533" , f "0x5ed5b0477fb964e9b2a95887bda04721c89726df0c33529a1ec4b394d738e606" - ) |] + ) + |] ; [| ( f "0x3c1d40c9ab1bc468aba15fdcd035eb9a508639eeb71eaef9b5b7a4d70188e428" , f "0x8e3a39f3d594a3e57cc6d26423b7b644a686854b0ee2d99830ad34653861fe36" - ) |] + ) + |] ; [| ( f "0x30fd9686db290dc88d4762e76ffd0b4ccda8e486666f87f6f15c4b3383bbe10b" , f "0x85e37fb67a0cdf2398e70aacf77b0074fdf03251da76e3b54821f0f4efeece10" - ) |] + ) + |] ; [| ( f "0xa2e8e795343ad968f15a3c20d143f3d65506c76010a00119073f340e04ff5b10" , f "0x80ceb7fcc54393f7503b2aca39eb49f4cb58c483c0ac43a86941f87661217329" - ) |] + ) + |] ; [| ( f "0x899d02a1d91f35bd8cc34ac8278f44703981b08057969851e85435225845541c" , f "0x599d1afa2977db2dd3bbbaca3d14b2cdd54448aa5055c2630991d914056c660a" - ) |] + ) + |] ; [| ( f "0x561da5dffca82e5b3144f8af7d0b2b3a072e06c0f50cd89ba7f1573310ca0538" , f "0xb67104c33e8178264cd5848e019d236167b758f2c073bc2bb0bd0cda255f6b26" - ) |] + ) + |] ; [| ( f "0x0c5152e51b68439b452e6ac8bc9ebae48d8b50b18272d205a6b4641a1dadbb05" , f "0xb5dd99caa56d92973d3cfe5b805fac5f8b3b410ffd6a254ff6c04274b213d137" - ) |] + ) + |] ; [| ( f "0x36a353d19943970aabbbd821fe728e391d4acc19138ae5365079cf61f05b2d2a" , f "0x63ee26809245caf2450eb6e2c60ac4fce46c34e2107e29df738651ad37b4ca14" - ) |] + ) + |] ; [| ( f "0x353b87f3aa11798d564d345f0eee728fcddf51bf424d82f85ec24302f0d4c203" , f "0x413ce0c93f89fd657a2551d420fb43a5421e33443eaabf89c69d99c80ab7880c" - ) |] + ) + |] ; [| ( f "0x24a9f435c2e88556e7693625db390dc4c0ac16303e63a943ea73c2ea4491250a" , f "0xfa4fe0fa6f9616e3359e6748a648162342bc7fc5e6e5705aff57c68f08048518" - ) |] + ) + |] ; [| ( f "0xab0354c378515be22b5968082b550b999f0616eac1fbd18a15d1e1f88bed9135" , f "0x340a7fc8139425788b0e7efc77a9aa23a7bd8659593315234c8977d6b3ac1a1e" - ) |] + ) + |] ; [| ( f "0x62bce7c482641b2889dbd273caeceb8aae5cfd56c8ef0597a54e99b0af1f913d" , f "0xb0a7771e0900f385033d6039b4e9edfa2f58808e674a85a86bb6a33d933bd23b" - ) |] + ) + |] ; [| ( f "0xee00d7dd8160d36809fc4541745371b8f967669762f2184b774031cc7cc2e611" , f "0x34875876804f396657e49052e9951582007c5673a2c537ef254caa15e37e5a10" - ) |] + ) + |] ; [| ( f "0x2d521a85f3678ca4898b4def968ae5e916b79c1727efeae334e17d8e6fb1f812" , f "0x48be02e6250cebac77ba83a2066327c3ac7bb988489b7b087cff987f5acd2e14" - ) |] + ) + |] ; [| ( f "0x118787411f9abc2b062af3e7e29bb4f2e94cc1c9fdc437e021b7f63b50c8fe20" , f "0x14ce2807b08cff68755a3a3461c9ef9d1a72dc2507803d6e752e931b67085828" - ) |] + ) + |] ; [| ( f "0x85ab1966f744de9b866b5e6301e548fdb6ce71f629de879110b45adce26e6703" , f "0x57b5e15975441d96911594b1f83def2ff1993dcb489e7266e5d00972212c1d03" - ) |] + ) + |] ; [| ( f "0x467d32230ad7ada0656383944a7cdf3e96b0d8fa776b9c76caac19bec2113e31" , f "0xf525c584795312af26a0f60d69ffda4fb95edb2e5da59a2bd7a9a3bddad4e01f" - ) |] + ) + |] ; [| ( f "0x20f070b5020419fe403e1a566c64820ae00c84621ea0432574a70689cb403f36" , f "0x1e23c9ac5a8ae1ee330dce2d99818e2236f91abb497a7e3284df654c09b8ed07" - ) |] + ) + |] ; [| ( f "0x9392c15ca2c647af266b67c1ed266c77b33d65d252af57793ced99aae7a29a29" , f "0xe2b023fe0a1ba0db90ee0bbe265066e0960aff3aa8e0c69b95053121edc2e43c" - ) |] + ) + |] ; [| ( f "0x54284641ed8ce2a4a50d2f97526ed65039dedfbe124e4489232ebac8ef6c6219" , f "0xdf2f49091992d13325f3cede3a5919ecd441dffd2fed423f4325510acbf35231" - ) |] + ) + |] ; [| ( f "0x90df037cf4a8878a5aefecae4d5c4311b57d7581c01d86f79c23f112c74cd50f" , f "0xfa6941e18adab217535c2882681db35378904882a8cded30b80ff5d88669073c" - ) |] + ) + |] ; [| ( f "0x7f331b03e53d0aaca7acd5fd23079ae50bbc02f0c13506ebb03eb46b82b0961d" , f "0x44ae529c51dfb965225904481f8992fde0badbc7c28441a06018210f984e2f2b" - ) |] + ) + |] ; [| ( f "0x1591ddc58ca9d90fabbf368a7d2da02293848dd2afb4cbe0cdaa7adfaf39a200" , f "0xcf3a705eb39194c6ebeff50bb8fa1a7b1e8ba57a2c24788a7777ed710449fa07" - ) |] + ) + |] ; [| ( f "0x6293a2b371899911562a76a7f785a3d3dc065539b669e06a27a63a2aea749623" , f "0xffa71849f5ecaef03b66cc6367f6174d8d04c2808a8301dcbca023d2b4aecf3c" - ) |] + ) + |] ; [| ( f "0xcb91fa77b6f3ac3e3c86f0ed48e91cd975b77884b850d281172db39f1f429d05" , f "0x7d4e3cae82f5cf37cca4c1ab1ad8c422fb6a95ad13140b61d5a99ec1bc444e31" - ) |] + ) + |] ; [| ( f "0xf3c0257c7bb97f3822c953e1711fd0bdd07561d97bcc1a0063df5f5d5e224a15" , f "0x99695b6889d69edda83e36e49669091aa3b908ce73e1912b570fa027b6acb839" - ) |] + ) + |] ; [| ( f "0xfcc062eb4cbc3520713116ae32dbf3900e5ce37f927c99f49ea1c499106db322" , f "0x8fac556c8a646c805f9eee1f0aaee5d15a130a9f965455f5257a3e4a9b769634" - ) |] + ) + |] ; [| ( f "0xbab9c3bb39743f42766f5db8811c495d6a580596c7b552292432c95855996825" , f "0x175706310e37836f9eed488d91e996495b282d70219859cb9613bb3f7aee7704" - ) |] + ) + |] ; [| ( f "0xbe5a2c705d874de783a33b3290c8f523dbf85b52ff46d5a61caed4d21d5a0a16" , f "0x35aaf467b665917228ab2a702a27167679f03ae4ecbf85024ccf72897bdec80d" - ) |] + ) + |] ; [| ( f "0x88f1584545a552f96f38a4c5b621b93a9206ba46999793d9edaa831a3e93021b" , f "0xfb4baa7262b98791de5a039d465c4984c88519a199f6ab1f3015b5629f76fd27" - ) |] + ) + |] ; [| ( f "0x3e11a482bf5b41ac904d7ada45889b214c96a214e25ce9ffcf63fa28b87f6309" , f "0x9347fd69aa9fd6f4a4971df3aba89700bca229af0f711c71baec03c022f03e0f" - ) |] + ) + |] ; [| ( f "0xfcfe3fcf078ea17c669b5781c107b3ff9c32b4923feb7829884e9c98d45f932d" , f "0x50133fd3dca42ac476191c6aca9d971ca68cb6a92f6bb0c8e88a39fe82bf9503" - ) |] + ) + |] ; [| ( f "0xb185b3edb1cd7722780e60558a50fce7213dcb99219bc5c6592642cdcb66cc05" , f "0x01bf39a0fa2fd24f1c6d01d7aca495b7750f0d8f79e714fb2708c881ee899e1c" - ) |] + ) + |] ; [| ( f "0xa347bb85d3a1f6c540842fad82546c33cc72316035765c7448e9595a98b25a2f" , f "0xa0bbb00ec5619eedf1239ae73e3b7d054719578337a6d03c741540affd34d80f" - ) |] + ) + |] ; [| ( f "0x38867a93baabbe4f6cecce5f887577a78e0e1e389e74971154ab4325f3c18819" , f "0x68abe4474abe0c592878d525537861a91ffe8aa3740ac015573ee8e36bb41b30" - ) |] + ) + |] ; [| ( f "0x730265ced4177e2ff7a4a6ea434efa8d56971dc52e1a22b0146d5e0074cd5d22" , f "0xbb40569dbf06549e516cc20823e06d4d36f05756d84ce6b9b7f4d5e2c97e1d10" - ) |] + ) + |] ; [| ( f "0xe0a30968f348087f45aaf8f283da31b4020a8007f6ffe9da039f49ec64847111" , f "0xf19e064c0e3a8473c0f0b4318229c2f498f38e33061af7275336be8c0405432a" - ) |] + ) + |] ; [| ( f "0x4aafefe7936b3bd7bc4a5cd489e3c62396a106fd36746b10f968f604eae3b819" , f "0xf8a12836b45cafcf53a53c4a6d7e7d7789e9801b691a1d656356836609874f15" - ) |] + ) + |] ; [| ( f "0x4ffea1813c0bce879e235dc3a2e6b7acd91014965c7d10d556f51d93527b273f" , f "0xaa56a700fc50b68732b0a226f9e26bfd5e298c579974f8f25ca95e99b9537e13" - ) |] + ) + |] ; [| ( f "0x5bb991f12afb8d0a21cead33dd92a3baedeabb441effb49a8fa0a49e8e96452a" , f "0xc83df99d20d563f205685e4d31c41a5b5655a31c3c73e5f1efdb1dcbf61f341f" - ) |] + ) + |] ; [| ( f "0x01156e79602ea6c461b991e242776908d75bc16ad92c817e241c1542a6362e01" , f "0xd325e98f5eb6e8edef7f85ab5be482149f620951cf972399e0aa916129898d33" - ) |] + ) + |] ; [| ( f "0xba7aa4908156339dd1adb9712d8cac1a8525baeaf062558d21dc086c2a65093b" , f "0xc6ae8722e6499ab71cc26c3280d64bf758e47c88c236619a21d3753092aa5403" - ) |] + ) + |] ; [| ( f "0xc4ccc965a37cf166980609e74031b18bdba59bb42677206b30208520bcc42214" , f "0x68baac5a4f13b3be5eacfb2b12a473c5e61b994eb1552a545f1d065101d61d3c" - ) |] + ) + |] ; [| ( f "0x96f7415cff8249364b8f49e2dcafc4e5ae2adbe1ae9e61a637fd77403434cd13" , f "0x343f6e93f6e0297c9798fe5797d426485adb075ab87e3d12830395c688350f23" - ) |] + ) + |] ; [| ( f "0x033bdd61ef3277da301d9dd8c990144b646f33ce1538aabdfdfe0c0bfc3add3a" , f "0xbfe209e2e32ab85a0511bb4327f16fa0d53b868041b68cf57d18b9b500c1d00e" - ) |] + ) + |] ; [| ( f "0x66aef9d1e673fbdebe21e9909f8f8af444d70d889ffc30e091f7f3ecdde7cb36" , f "0xfda9e9e852ad3ce10840eae5abed632069a986265a4d0dea49f99eba0f83f122" - ) |] + ) + |] ; [| ( f "0xb96c35f8d0f6ca76884942d037f0c9147e99899a05daa6bc628c77679ea4220a" , f "0xb96d9696178533fe2a7a0af2db7d4629396b2f8577edd09544778d35732f9203" - ) |] + ) + |] ; [| ( f "0x9a7bf9e0e3f05382e6e25fc11a0029b0aba06c1e2d2e5bebb627d0cdcff69137" , f "0x2518ddd07a56af02076533347e6f1e12c1cd1ad2152c56126914269851b1af36" - ) |] + ) + |] ; [| ( f "0x6fbee48cac20b129766b761a8057aad906b13bb18786af488142fc29f0823913" , f "0xe2f09b48f68e1e4edf6e38e8fb828a000ec1b07ea9495acd93a85965c48ba72e" - ) |] + ) + |] ; [| ( f "0xe7b9f119ae4506223e818bd09025d7aa1da8b79e2d51f1c475b71e60cc4a682d" , f "0x2f82b763a719ecacc2d0a2240f162598291e7ad242fa7cf3f0a50b80a9f9ba04" - ) |] + ) + |] ; [| ( f "0xd6d6919c79e88f82df5ab69a374fa2771366d8a389cb37301fd08e81759bad32" , f "0x86ac99a370b8a958ebff70663f1fb0ac1d77d43612ca057650299aa28d08133c" - ) |] + ) + |] ; [| ( f "0xc096168836402d39a592a01e97e86183833ae89a778fc6c2092b611c1ceff30a" , f "0xe4aa1bc2aa28758f2541ec130cf3ae98011a803a62b5c59770e7a2b246040a0a" - ) |] + ) + |] ; [| ( f "0x854a9faa5a0936756279964ddb55a56e7ce051a39c813827e9b6b1be29f08724" , f "0x20c5a5ca206bb37d904f58fb2157d2018639aa3f224834272a5808cbe23b410f" - ) |] + ) + |] ; [| ( f "0x58835965cd2ed51dccccaf42a30fe7c781abd418bc774a194cd16c3739d7fd12" , f "0x3d84c47e35971585f20e35f557d5d42d5975362856010c306007a9b81141bf00" - ) |] + ) + |] ; [| ( f "0xb54e8c794b74edaaf3dd2e8a8aa38c278152588c8ec92779f8189183676d2d04" , f "0x663ebe1e361daf3367676ea161399ac09a9a4b00153ed0f7a4ff9f249b26a53e" - ) |] + ) + |] ; [| ( f "0xd3136a39fcac6cfd95f39905aa5a20489225d1ef327ac86c1692bed1ff3cb406" , f "0x4619890ecb76baa24c330f827c0bbc4d117f5e8362c4115ab29c86c46e0d1c1a" - ) |] + ) + |] ; [| ( f "0x457e76032ebf7fd673b10920c2c41bb144708ec54a67214291a93af59542ea23" , f "0x90e34de99b7b84e092cc54edbbbb5bd027de89ea4fdb141a59df0b63dadb9d2a" - ) |] + ) + |] ; [| ( f "0x109845277a2ce758827311c7debe2bdffd77b85f23722c24a790e4573797e11c" , f "0xfc924f3dc251dd567cb0125f01b7caac2624b724e720a2a134af394b8c9b4517" - ) |] + ) + |] ; [| ( f "0x61189de32ba5e24eccb01771216f80e094ec7ac9c2e283b196869a0daf8b292c" , f "0x906c4718e5a6395df17432bc5beba00ae07b68536fb252d236e2a0b3a99e033c" - ) |] + ) + |] ; [| ( f "0x9e19d2e0d09ce9a4a5591ad015abd7d84e1655fdc2d8791b94f2c7707d669215" , f "0x5c820d9000aa7db942f3c0c3db681b998bd728fbaa010172973cbbfa48293b2b" - ) |] + ) + |] ; [| ( f "0x79ca4f0e13a17ab8f2fc953b025d4d10fb551d613f15a29bd8ab90c9a4d5de3d" , f "0x7da35baa53528d6afa1c9ba193cae2740ed340bd4bc0e26ba569910a50127132" - ) |] + ) + |] ; [| ( f "0x83ec57750cbae10256e7b1a8c4b80a2b4906c0f4c179af9fadd3936b124c9828" , f "0x435e831729ef76a887416a5aca7352824fa50a57cb3193c9f52a42ec0cd77e0d" - ) |] + ) + |] ; [| ( f "0xcab8ce72390cff65fda20b84fbdc3646f3e6e387f517cafece0139379b901e0a" , f "0x3913e550eb22c144b8c2882edaf9f627f465c12096baacb6b7349747e10e8f12" - ) |] + ) + |] ; [| ( f "0x22d868e8b412bd18e810225dce64313f05a2a31bedd3e63fd4073a1f6c70901f" , f "0xc83cec8daec6324e669ed556b879878c3fc7e5ce13ac61900fd6ba9cbe513830" - ) |] + ) + |] ; [| ( f "0xdc1e79c6c79c8848c82b1a68fcf44b6ae706fcbcd02954e543ed4a365bb7a21a" , f "0x01d698131c642b38f658ab408079a3d80335ab0bc3a68600bf23a11e65884814" - ) |] + ) + |] ; [| ( f "0x99d70daf43ee9f2ea5233b5cd3719dcc9805a9d7e1cf60225146f5d0cecfdb2e" , f "0xeba510af1546a8eec2c7fda32839d8e5bbc57091dcf7777a15df4523879eae2b" - ) |] + ) + |] ; [| ( f "0x84b1b11b62c2f9afd027cdfa8a630f754fadc8a272ba4b2ce97640c6074ade1f" , f "0x14b29815a04744522745832b9172c903519f91c40e34b07f132ba92afd539b3b" - ) |] + ) + |] ; [| ( f "0xff890cf73a50f65cb77e245cc9cd189db78d4522bdc525359d6c5f545232b23c" , f "0x541052346286c5337b9469f4cc258bdf932f78b5c59c01797266cf9090c2521d" - ) |] + ) + |] ; [| ( f "0x10c9e98612106aa41fa9d6144715ac276a956780783419c0655e7d83069ef629" , f "0x1e0fcd6b871e2f2f96b1715c040815267a90e847dd9f14296cd139ce4da5ea10" - ) |] + ) + |] ; [| ( f "0x9816d03351d8f6b0e0db4edb40262415c895d98322840851752cebeea32faf33" , f "0xcec1b6c7ef97ac553690e371dcf69359aa6146f37cb87f578d6200de5727bd0c" - ) |] + ) + |] ; [| ( f "0x29db71c5fca4156148d811423b6726d18e1818cd651da78213aa24a278025517" , f "0xb1546de7dc2994998da9b376b1254bc6fdcc169d0f1ba61b07429b9b463c2f3a" - ) |] + ) + |] ; [| ( f "0xd3b7381aba0d51aebb421f49909277526bd30877f43d78e26bd0ecba74324731" , f "0x4b3a31a119b77561d4e2e37b3c129a8bdc70d8f27fdd7d993a76eefd83f34e16" - ) |] + ) + |] ; [| ( f "0x7a11aa30ca033c9f8071b0f6eb016b5339eb28ced5d19637e4356192be30ea19" , f "0x2026a4b577fdd35fb9c579613c27517b5b008cf655610d212abf95a277069018" - ) |] + ) + |] ; [| ( f "0x980bf0ea668221a6368a17257a3a608394c3842d8b4f4b30acd8b6a8b7ee082e" , f "0x05e09c87098bbac65158606329c0877067a8e163282d5cf042a758c089b6aa0a" - ) |] + ) + |] ; [| ( f "0x8b6db0c6ec8d63ed869f4dab6ef3c55573c3b366cf8bf8b1b5dfc05355d5a433" , f "0x2ddf114d84242feca64035b096d3a2c66d99b6ff49e3905441e5e63212d21b17" - ) |] + ) + |] ; [| ( f "0xe2dea8be769a52aecef1b74be67162e8d8da6673a25e048c0719bb254429b90d" , f "0x3b297751a8928ebc4145297ec6d1f53c0cdb8ed625b51f15631ad732b30b973a" - ) |] + ) + |] ; [| ( f "0xfc003538eee4f760f76fe13e3907de4c5de4de85498f592693d8c1288104631e" , f "0x7ca66c55b3da630108dae01b1180a233524b1d2c5bfed4b94723404d1ddd250a" - ) |] + ) + |] ; [| ( f "0x60073e6219dca275ae78174101d02290ffd9c0e470dc4d464371ca0378dd162b" , f "0x794ac1587be36ae3a5005b224aebb575c4191ecbb242a1cec87140ec6599d139" - ) |] + ) + |] ; [| ( f "0xc794d3045a0d1902d08dc534bc6d7058b2dcedab59ba9e1d79535e2b69802617" , f "0x2252d4743f37e64ce044f58c90f1670bc2409b640a52a1860fc13c8fba2c8434" - ) |] + ) + |] ; [| ( f "0x9827b14560a8027d9d6a604afef851470590849b7271c93abb9898256086363c" , f "0x05060b97faab0f73988c095048071df7df49397de1b9ce07a35c4aa13e91763e" - ) |] + ) + |] ; [| ( f "0x0509a14603179d13a6c629d8e0a3c0be024942cefc8af0932b97a0ba5c57c121" , f "0x78d6e07cdbe47441e6e5d10fc6eb6ad699e2142d2f23e85902fc69651e29cb09" - ) |] + ) + |] ; [| ( f "0x0334893db4b849f11834efed332163431c006bbdf28ad8ed62748845b11cf00e" , f "0x2fc6e48c85c40e49f96197b445de21cbbd333ab01faa8c1e4d5a06acb67fea39" - ) |] + ) + |] ; [| ( f "0x3dd231f9e260ad52a7cb5690c676b4642856876a32e6597fc2ce36d2c782b603" , f "0x8c774804e1c6c83c903743f589377f66480129e575ec119fcb47bf56d431863d" - ) |] + ) + |] ; [| ( f "0x74806634c9cf2164daa3a06f552e5cc66440a002b262fdf3344fa613e941502c" , f "0x1c3638b75f9cf0adbf5bd802c8d1b2f6fbc1e811b9a4687f44a63a576c2f7b10" - ) |] + ) + |] ; [| ( f "0xe9c9fad6eb52ba3782bcae0fec6b8f29b739581f4fdda95bb3aed92b0ab12f38" , f "0x859a5b95d11f05048c57407b09f3be819b2bc49b4f60dc8fef91f618d0c41b39" - ) |] + ) + |] ; [| ( f "0xdad7bb2821f291ee1368da4229b28de5e3884796e2deb73c2468a925b2c30105" , f "0xce849bbde14296a6d433d7a504424d8b9541148c9cbc68ff9ba00e6cc3d05c1a" - ) |] + ) + |] ; [| ( f "0x07a97d909c5dc914bb43c7aca53466de89415a4b10f289f2fad1e9188391f110" , f "0x57aa741f1fc3f1a6e37be3c87ccfa245481a2547064641cbcafe579e8db2fa04" - ) |] + ) + |] ; [| ( f "0x291b8f925986b0197fc9100f98c0fe29ae636f6d0eeebcb58d8eb4eaeb8f0921" , f "0xd4fa011de2519358530f659bbd55089bfd446b34d7fa0860e33064e0e41f7311" - ) |] + ) + |] ; [| ( f "0x5663c09bf9d57a725551b249e32307b720acb4bb76912594988c9ece27be0821" , f "0x143e023d057033eb52d45d79da5124885ad1ac8a40d56c3576056655240c9711" - ) |] + ) + |] ; [| ( f "0xb3b92bf4e656b3c2bc5ad00b36b87a72bdc48810f9ec36dd94e3c505a126ec3e" , f "0x85ad6f3fffb9b5e9145e596b532de8fedb27cde5f537c6e1c1904e215f191117" - ) |] + ) + |] ; [| ( f "0x85931332e59a3a93dbe94b1427decb2833cbfbea185b50f74a5e31e4795ad815" , f "0xd0ca8076fe2eb81dadfed3e7e093c35808390e2f01813ceb55bf655d0396a624" - ) |] + ) + |] ; [| ( f "0x321bbdf4dbb6ee46c2b9177f5259755a551f425a97bf7672a842527154701205" , f "0xe0b263b013ee4f3dfda280762fc0eae585f47f15b1a6e14ee522e91618afb026" - ) |] + ) + |] ; [| ( f "0xf136f93fec6db5f666f52af4d6985f690b1037d13305c944e5599e7c35060631" , f "0xe39768a59968cd460f726574dabf947d5847a20e94b72083810e22ef2702c607" - ) |] + ) + |] ; [| ( f "0x85e2bfb9675f002c878bd1255fca4df6dcf753ec863b026e09f5c653c6ad1a18" , f "0xca53f56f99744a538b2534bb5887e21a95780c5f8423de3ec64223fb874b1d0e" - ) |] + ) + |] ; [| ( f "0x862a83c6dfc722fe98be3d55b1f56773ff14aa567b7b16536e5f3c25a23dc816" , f "0x4c277c2868a75446602fcb2c2138abbf8c8bbfebeb7f09fc8a6b7e55b6de5f11" - ) |] + ) + |] ; [| ( f "0x0b2db49291f78e8a758fbd2df4435ec71f1d90737cfe075180914d48d3937131" , f "0x7d044a46231f19a70539ab8b8d776f2d7ad3597df37154feabfacf0cd52c760c" - ) |] + ) + |] ; [| ( f "0x03f118d4d2afec1b28342e85ffbfbb23e1c845ec52dc5223e1afbf943dbe1515" , f "0xf552c0eecc191881b3d402ffa525a8db5ef4c40486adef4f9481f827a951bc20" - ) |] + ) + |] ; [| ( f "0x7567e678fb7dd1265e31171177879172d44384cb32437d672e9b3947061acb25" , f "0x18b9cb87f945e159d854bff3be94bc770d67bcce15f7297abc82e956d9a4b215" - ) |] + ) + |] ; [| ( f "0x8337d227536a8b3f09b64bfdb9cd06ea89e7df0511fce98b1ce0162677244d36" , f "0xf17acd21b418dbbd36547ab608e1199b0236be8dbea0c0eae6ab251d70665d3d" - ) |] + ) + |] ; [| ( f "0x0688160a65a955f9e13ae6eff46d56a43f90724b0ef9160d77147d42512b5a35" , f "0xbafaea5bd14120f6b8ce21ef10894df9952d5b71ad0b5d707ee04dae527eb009" - ) |] + ) + |] ; [| ( f "0xf63ee234562af35260426d7b3cdb98029fa0ada52f5965303bcfacb7a558e435" , f "0x3b50b8a6dad5c0230a12c99f718ee81943d6dfe969acdcc5edb4efa533ad6937" - ) |] + ) + |] ; [| ( f "0xe416dbe731101074d3f035735514fbc34dba76dd96f69d2b06ec7e135d9a0c1a" , f "0x343d64e068c21b571655c686576dce0f391a24d5935a214b6c61b02c701a5d07" - ) |] + ) + |] ; [| ( f "0xc2b557a6e035db34180ed1f8b4c04550445a7bd3b9367d5f380e92ee7918c906" , f "0xf28f8e324c512f599762e547f6c95eeda5135b59b7d7e31de434fca3f79bac0c" - ) |] + ) + |] ; [| ( f "0x68186318be70c6a9bd08b6625fbdcd1f17d1a1da281fbac901f248e19b938300" , f "0x7db059c3819b934ea024a1be5320ab85bf7316223bebaa4dc3a9e6a835df860b" - ) |] + ) + |] ; [| ( f "0xadc32da6eaac37243841762859593faa7cffcd014f20d1f27ca392e8c9f25131" , f "0xdffbe1d897c135f78112f99ad759b144996b7e67543c8beeb44cf02c94cf7e2e" - ) |] |] + ) + |] + |] ; [| [| ( f "0x7ee724503a094860e2898115ce75a9ceb4e745eaaaffd84eab963400c0efc205" , f "0x305b7c441c54115a84033e3689758e0ebada5aa8b866188aa8db63fc0f97ec1d" - ) |] + ) + |] ; [| ( f "0x55ae66acf5c3c6dd6ec1b954fe4d29a84338d6668e54bce64d675988d2d2980e" , f "0x12842f882dfc9335a26e32ee92a361a24c3c73cda1d1879c5e82339307bc4c2a" - ) |] + ) + |] ; [| ( f "0x64aaf49e6fc6fe82e253e67b93474800918ef1e2a8416b3b2c72ace8d12ea33c" , f "0xe4480e29dd7c1b237412e1247a79ff80ed33be7375a5693d59f86aeafc953213" - ) |] + ) + |] ; [| ( f "0x91c1d89e150b388147553889f9175a96d1e044ee91047cf91de1b81393c49116" , f "0x8f2a30712778e95acac2c0b1c129458d2918ec69462ccd0a164750e0b5c09227" - ) |] + ) + |] ; [| ( f "0x1991a95b3898e1e405e1c5cc37e7819b35c76cbfe604af14e349d7bb5e187c26" , f "0xca42eea8bd4c74640754343a50e417a52f22b378b4e506e2925a13403a11f907" - ) |] + ) + |] ; [| ( f "0x50cd3876df05c4b6eea5ac594d1bfb30f5b4dce0431ceb5904f8a31c99283312" , f "0xd61a950c9a47bcefc60316b24cd634b3f7955b8a845891fb217d96e9256d4b14" - ) |] + ) + |] ; [| ( f "0xfd14799be516426cc9a3635d78d5a274608069b996d083763028378f782c1a27" , f "0x6cda7f1466e7d6230227a43bd05315ff295abdb104a773fec6c1525a4b3c5534" - ) |] + ) + |] ; [| ( f "0xef6bb5daeab60b13f2a9a2fd9f9777654f7e030dd7b0037b8db51b292fe73929" , f "0x59d0f1073d54022672491a191aba6662cc55c2eee5203170e9e2e3ee7b26e625" - ) |] + ) + |] ; [| ( f "0x462823cbff7ed540f40dbd069c613c78d9e3ded534eb0be7448470417b56ba25" , f "0x1c617490d48aa2cb05c6daa733c1e5be2ebbe7b712bb5702e3aed79e77fd8815" - ) |] + ) + |] ; [| ( f "0x84306f2394763c94afa3fcc47a531a7a7310c5fd2e24ce41d506046b0b3b2411" , f "0x10892fe44431988b40ed0c87f7d1179b1b84f319947da6a3204e3bbc0b5ecf3d" - ) |] + ) + |] ; [| ( f "0x9b03923cf29a084779b9399d338e212000a9e9b35937dd77442872cc96e29505" , f "0xee1c3cf570635e0dfc2afad195d3b07da5c0547519bd3ef563628e800a7c932c" - ) |] + ) + |] ; [| ( f "0x391dfb8186f012c79c221fd8e70ba5f9438415fc6609a9189ce99925b8fec505" , f "0x15490673e664aff6ad9934436bf59a088156b1dde59f7acc97c2f5c8b896080c" - ) |] + ) + |] ; [| ( f "0x0935bdfaa382f525eeff3bd919c5c17c71374ef75abc437504d7938daaa69d2e" , f "0xc04cd4084c18a09e86dac5a8e7cd9317406f5e507886b8e7ed056f065de40438" - ) |] + ) + |] ; [| ( f "0x95f730260dfc4587bb30b0fa395cb65240f419a99b0935246f0de41380d3230b" , f "0xc099f536d8650673851739fe72895a3e21eb032ec09bbe87af75a6e28e919e00" - ) |] + ) + |] ; [| ( f "0x08c23f0e7f54eb7ba97fdfe84c07a5a499606a00f93e273b5d73fc2c1a1d7f1d" , f "0xabd72826bcc860c1ad946874d4dcf70b18f216bb90c5117d514cd1d82debfa29" - ) |] + ) + |] ; [| ( f "0xae394c16fdbd0ea82b0b74c72a7ef3a696c91f8128d5fc5254f51f4332dc9e3c" , f "0x649385a15d8d4eb4a1ef03a66666b400bb41c5be96878dc559ac73fb67b69838" - ) |] + ) + |] ; [| ( f "0x340b010183c41e932af533daf17406d138672a9b7d62ef35fb40623857d8f53a" , f "0x42026b316aa6bbc13239a5c953817ef3c851b70e6a03913510b274a1b6c2f83e" - ) |] + ) + |] ; [| ( f "0x12f0addc5a888e95deb6937a370f3f6194fe48b37b3cffa20d23d68e515df008" , f "0xad72e3260f03129c39634cff16c0dd538642d2cab8160d3091dd881007698e2c" - ) |] + ) + |] ; [| ( f "0x2f2164aa63894391cb9eada14b1609a1d8fe0bdd2d5596a27a9d97e526701e39" , f "0x18ff4df1eac60b5fed0937c145d97700074fffccf19ed3a7cbf7f794e4284610" - ) |] + ) + |] ; [| ( f "0x8fe45c21143071d2b697f17b6f3e286e5be3ebabcc7e5696e5b819114afdc91b" , f "0x70e1593f15b325129fc873ff0b12e34c6cbc708a670a3f1c096621aa16c08021" - ) |] + ) + |] ; [| ( f "0xa6af618cfbd753c53e952db5c0d5a6fd85b48fe17970658da1daa1a44f1f5e16" , f "0xf6553db18df3bf7aa08b2000570d3b7bcfb508f3640842778e140dd700ba3004" - ) |] + ) + |] ; [| ( f "0x7723b997d97f4726417917db23e199b94c9e862f96c5228c112d73c1afa25d11" , f "0x65d6aab3b7c6192ef1fcb599d9b2611024c99bb3179f6717f23ed5f9b3b6652a" - ) |] + ) + |] ; [| ( f "0x494cc8c6813731608464e4219dbdaa2dc747641048392f4bae42b7e1d39f883d" , f "0xfc225ba03a1bc3978c3c770fbd43f4bbdf79d265bd87fc7c09cf078bafa52c30" - ) |] + ) + |] ; [| ( f "0x2821569191619e4401131667f64cb7d3fbef8ca2ce5bb706f492488e96bdea0d" , f "0x5a3e3a22a572df0da0f8162e2f78d844be6bb77842e02c8f16d26c8d56f32e3a" - ) |] + ) + |] ; [| ( f "0x9a5c99ae8e12b3d8b8115d50c63d0ca758da1f610228baee1fe819a501b9bd0b" , f "0x117bcb8b80a59db3211e2aad092b051cef7c5611e5527e9c4dca884792734b27" - ) |] + ) + |] ; [| ( f "0x400155edfc291f63a5b80b8d338ea778db31633eb9b27eeac77ef2812de3e00a" , f "0xc1c1e5415b8417a5d0c1c49f5076d447136220b859e5ea1c0441d7a24dcfab1f" - ) |] + ) + |] ; [| ( f "0xd39d3a809e87100ede6fd8184755365465552b45b197dc58657624be6899bd0d" , f "0x1fd1f5df76c0bd31951c53fb3bd900059aa856f8db37b1bb5bd287b1d1c7402c" - ) |] + ) + |] ; [| ( f "0x35f2edc86cd05e0fbdf088657e1d32cf02b751f5e847f6472e67743417094308" , f "0x83f775f87dadab6ea140ed992fe7a826d2199eefbb835c4e5943ae1680615816" - ) |] + ) + |] ; [| ( f "0x5c6c59569a2e80b84ee5121912f93cce6b01e27cc907199c5b57354e9a9fb421" , f "0xf13c0f93c2510f63bedf67480f5fb1cd53e68d5bbb8ceca5a9eb12d8cd584315" - ) |] + ) + |] ; [| ( f "0x92e45da5c623e2b63133a7e9be3e1bf85c88ef90c30223c1d34cbbe66701d831" , f "0x8a2f2bc0ac868b3fbd91250e857d880a0648e22f1a96cc752f6a0eb69b07e43f" - ) |] + ) + |] ; [| ( f "0x6a2c400c385cc600abaa9fdb5af35195007ccd49a318daad6a26a87ef5d9e811" , f "0xfbf7dfbe19586698c341ebdaabfa9ee339b745b7f2607371b7f6ee2b9148851f" - ) |] + ) + |] ; [| ( f "0x0d7df0fbf01d65fea6d65c71e34558f5d1cda1da88f8b0ea061a695044b44f3b" , f "0xcab71af9cf8f5ee0e69ff591f8251caf1870285b6ecc346911656ea7a44ea933" - ) |] + ) + |] ; [| ( f "0x7348fdd7f5e950931e0fb6e5480a503f1241535eecdc4e479a2f96d77c612b3f" , f "0x1554f737cfcbe146119e4576cdd2d88c3603ec7c44df05102c41521e05be7422" - ) |] + ) + |] ; [| ( f "0x16ec5ef43e415a9ef242b34530dc286bc82d868037d4cac4383fdc61b27d3f37" , f "0xc15c52b3f60e3ada07a99a99f103259a82b2c89001de6b4bb16cfe2f6b85f53d" - ) |] + ) + |] ; [| ( f "0xd3e8f2dfc098d2d554dd89e62038c53d5c5b22940d12364f6accf5c7a1487022" , f "0xd9f0b88d8f53e34f731b49b98f14aa490dc507406b5e809151d9138182d2312d" - ) |] + ) + |] ; [| ( f "0x97023ebd4416fd0f50d18303e433e2d19d843bc5da9b40db44d7e088917d2006" , f "0xcebc666314711ca92fcb1d340ec9c2cd5c8039325fedf807aa3a58c4882f413c" - ) |] + ) + |] ; [| ( f "0x14d5529abe2db16d860669021db3ec6b030784629aa5c83df434e9d0d41e6e04" , f "0x5084779f316df9bac33392aeefe8792af6ef4e3400a578ac41526737df13cc17" - ) |] + ) + |] ; [| ( f "0xc03fc51740b1393444e7a2435401eb174bdae44a785c7565b630b620281d941c" , f "0xdfa65d686f96a1300fb2982a0a464c29de95ee14fe527fcacc78acffed0b4d38" - ) |] + ) + |] ; [| ( f "0x2c7d0540eef075db6215fe4ad18b1212b979f57282a459ee9d83345f5987e80c" , f "0x194dcdfbf81ab8aa1d4491252e564b7c55257a065902a034654c77268e93d30d" - ) |] + ) + |] ; [| ( f "0x591af9b2712cc93216a944ec46cfa6b790fbff4a9a2eb05384c9685575e23b02" , f "0xd6d943ef67a451de22c3dc2bc7185e042eafef43c368fd0058c3f3d92db5720b" - ) |] + ) + |] ; [| ( f "0x0719c37a7e1d7e6b16f370690518a349bb7ad7f5f7ed99e3533e7beeb7e10736" , f "0xb77228c95355683bdb3e935e0cf19a701ae79bcbfdb4325756cb0edd9d21cb04" - ) |] + ) + |] ; [| ( f "0x10756330f6d8756d074f89a95e2301dabe68ed8ad43974fdb617844314bb6c36" , f "0xfd881fc9fddb5e9f650d9844666ceb09c65b74ec6e3233b80228b5f8b41cfd22" - ) |] + ) + |] ; [| ( f "0xd7089d22aaec16f2ea780ad8d191b3ff96ab8987eff6e55e99ef6b0b025eef2d" , f "0x88c203839f360cffce331e91575449d420408e2904a8320332dfe90d2a968213" - ) |] + ) + |] ; [| ( f "0x9dac0208afef00eaeac1823c47544044de3724a7ea383f458c4f268ac8a0f925" , f "0x304e4d1d38c50b63b16bca982883d7627b75d8ed6ab80211d3734f8a3a194335" - ) |] + ) + |] ; [| ( f "0x01c822972cb702af8910eb1352fc8afcb88fae84152a7ee5030584e867bf1f21" , f "0xd7648307ace58f160bfea17277041b0da8d0c6dcbb0520476b91db347f2e5317" - ) |] + ) + |] ; [| ( f "0x2270bc1979d5ad4c5b17df6508cf2fa1b23a2f56ffb4806e8dfaf41f6dc83a2b" , f "0x32dee21dc6eb2537d042118222cb1bc993657ad8334b8d7c1828821c0fcb0b37" - ) |] + ) + |] ; [| ( f "0x5d740040dea084161c90cdc3fe7cb9926bb86f743659b7668b92215cd167a523" , f "0xf70849177ae5e1cb204a0062838a4380d8ef8639738f59ce7055334d60e6d32a" - ) |] + ) + |] ; [| ( f "0xf169dc7611bec035740092715ccb753a78d8ac225fb949a5e728cb09c805931f" , f "0xc359af9e45c93c72d19ee4ab41064d412d426ec3b15952760de46705b6d55e26" - ) |] + ) + |] ; [| ( f "0x4bfa824e1b5e967a93914cc6e78204f6f05abefe89a6e2ceae8bc5693429023d" , f "0x8624851df764e9173793dcfa500748575deabd0b5e4788b964fa2d65aea2f325" - ) |] + ) + |] ; [| ( f "0xd7fa1f31ae8dce8aaf85439ac54b9a99a158aa10ce4bd2f07f8a559c76f99f3b" , f "0xb7065b1c3c0ac79107b7abb458a9523d67e408acbb163d353f91efd9650c7f0d" - ) |] + ) + |] ; [| ( f "0x7592c88fd6de4168a62b7932d73382d657091bbad3c4299378dca0ef5c1bf637" , f "0x9a62873e6b234d75faae091477c06a200d235d9f496a764b902cbc4a3fe67934" - ) |] + ) + |] ; [| ( f "0x26cd5bd4b0a96d4df353cc351fd2a7e4755dd73e701fc70219ff2d856eb4931e" , f "0xa1ea39ce4b5599a44fe6f57e6dcfe9111a21841319c5d977827b0e00e9426015" - ) |] + ) + |] ; [| ( f "0x1ca557ac31002786fd18cd302f1ce2d1e31250dacb0d0c960fcfd335a77d9207" , f "0xd5fadda87f45702ccbc6c1e60621f22ff6d3fdd4a3074a5a17d92920b3ced813" - ) |] + ) + |] ; [| ( f "0xed59d88a7fed4ed969c418172f1c1f42f31540969182ca93d3a416f711f33925" , f "0x20c0e33fe93b95201dc568da6cb3c9478db78679a231938d0d55640c3a7f641b" - ) |] + ) + |] ; [| ( f "0x2459ccf384bbd4241082a78714dc51effe7c6319e03533ff2930416c715fa91d" , f "0xb4e092f8a472ea041d96c031af18dbf8ddd6e0375221ed0485ddfa9b8822c02b" - ) |] + ) + |] ; [| ( f "0x10c64939780507e8905ac356c22c17257f10ed5403183e89f1e36bc3aae10e01" , f "0xe8108829e12768401be7bd8ca93464fe27b6dce3770cc115f293d870e5956e0b" - ) |] + ) + |] ; [| ( f "0xfd9783735bdbfabee858d300b05bf0234c4b3d746b368d949b5cbbd0b0df7800" , f "0x598d5c36deaa4fbe6ac275807e50c194ceec383e70dd980d4152e59b5bccf03e" - ) |] + ) + |] ; [| ( f "0x96a0e68acad73b944ce2842d5739a0fbb2b1fe1ac4c8111991e3b40331310a23" , f "0xc0b7c8bd35dd95239895ad6d4fd9c93a6c5030a3aa4487066e594c3349a6a01e" - ) |] + ) + |] ; [| ( f "0x27c0efcdeed2eacfcbe54eadcc5e4ca92f9bf6cc3abe688b243fd9eebc708d2e" , f "0x18b615115b671a595805c383f874bc96a20ce59d86d9db9ed5aa3866b6148f18" - ) |] + ) + |] ; [| ( f "0x7096741bd623c00f3cb4ca57d7a902bde9d3e08a1ebfb9dd8f093cc037905a1e" , f "0xc6274f28c843bbcbe82bc6ce18bb5e0c86f93a0e2904a3a4cf7340a6ffac8b31" - ) |] + ) + |] ; [| ( f "0xbf3dfb9270d8c4a1ef22563e332b0d752ec8137ca65cf34193921d2396e2451d" , f "0x9be034649e55d6c7699ceed2a8015b1a4861c149ca2cd0fe188054f03b7fae0c" - ) |] + ) + |] ; [| ( f "0xce1807ad1a09f46a1a7fd5519450f2fcf95c88505792275288999c4bd531e720" , f "0x1b88e707ecbc212bfa960e2635270191605df8d16edb812e23d925854a0ee73b" - ) |] + ) + |] ; [| ( f "0x6b40ed2d0e7ee736d5a7eae895418c2dfe20f832cf0d15df98499abca99c0e3e" , f "0x7537b36ce3417557ad0b5c28b1834048139a8ec6954b8cfc541d2ca88cfe8300" - ) |] + ) + |] ; [| ( f "0xf7e72ecb35fd1d61e44268e5c8a6059cea4a5de1cb9f66db4a47dc3bd3ec0229" , f "0xbfed57b4185c3a484997bc35e202c3758e6eac1f9c78ed47bae9b58e50efc80a" - ) |] + ) + |] ; [| ( f "0xc265ccf0108caef88c10d851a5049face9e13acc2f39de44d259306cb51cba04" , f "0xacb63a3dc3763d461df1011a12efb53e4f0d86bb92f8a4cf9274046ee5795307" - ) |] + ) + |] ; [| ( f "0x049da6cec506267f427ea1c9b7fc2f4af8abcde896e4ab4f085d8f95af98d91d" , f "0xdfa0631badd406463f5aa6b574a73878bc94366ee8dc2951373d4f53185a522e" - ) |] + ) + |] ; [| ( f "0x1f1b749d0e9bc20da799c3015850d989ef5414bbc54c9dd7686020f3d727313a" , f "0x0c36cb5f1756c3f576da830826cffb571f900a91138e28420108b181ea622e18" - ) |] + ) + |] ; [| ( f "0x8e4a7af4ae7b6a00b84b62fffc7f01a095b06dde20e978fc020aa9d6c967480c" , f "0x36dfef5b454f11b6c9fdf911cd5f364ceee84c5563180b0ff7e6303f1b5f1323" - ) |] + ) + |] ; [| ( f "0xbee6054ad972db282a6e461045b1d9fc86114c5b647127eb73f23eafad66de0e" , f "0x06791d65841ad41b30ac6314e5ce9e1cd22cc143e6cd1503b5df6dcb0ae9cc1a" - ) |] + ) + |] ; [| ( f "0x7aa403513e871f2f443ef91665e15a20053f4a451b304e2e29353c415c757b16" , f "0x0c24c620c4874ac0de643458edbd4ec7f01ff8c3292dbe64569fc0350e6bb825" - ) |] + ) + |] ; [| ( f "0xcb625cc51c5bb2bcc202c58050ba96852ed7cebe3fbab7f52e8ae8c3ba14e316" , f "0x88ca3c06d527286157420b80f11d5bbda13db1601b2fbefd878b1581af1a3c17" - ) |] + ) + |] ; [| ( f "0x24635a59ef3d09d55bfbc0cd5a6bc3b11c36612fb8cf10c49720513de28ae026" , f "0x9fd3884da696aba5c93c0cb780c73975b0797d711ef08f25317dfb11bdd9e139" - ) |] + ) + |] ; [| ( f "0x1358428f44cd9b0a77dcd8d40cca696539546c7712a58d1d698b4eecdd4b7c09" , f "0xf72ceaaf04234e6999d26f4ff26506ff9294666b5288fe312e78786df0f5591e" - ) |] + ) + |] ; [| ( f "0x2f3233bbccf3d5e5a0da95a9bc8319c8645633d3b428ca1516e0daf0ad737429" , f "0x44094fab294a23f2c8ba70c1826357a3b5909c8a7d2a932bb91229d36905a91f" - ) |] + ) + |] ; [| ( f "0x930df3cb7dc92cfb01b6734c45090ada462df19f45064c7a141c29c73d28841d" , f "0x3c37b8097368e3c9a0a645b4cc4249cb6faa63998fe2431d756ca027c5fd8b05" - ) |] + ) + |] ; [| ( f "0xfa9918fd032ddd4e8c3ff6ff07568c4c3be07cbcbd3556ad2d7b1dc141c2da38" , f "0x68e46d8a52c7c6629cee3a4e6f7d7c7387757481f8c758811d175c17ca98ab09" - ) |] + ) + |] ; [| ( f "0xce53c73865fe0531f2f0884dfd345b2b3c79ef448d5c513f60111b4cfa1e0735" , f "0x38d2064124260d08d499e4f2ce93ed088c9bbf654d7a5de4b976e7b5050e5a0e" - ) |] + ) + |] ; [| ( f "0xd1e2b6e04e0f09199cc8776ad4f2da4f9a428452aa325876c9e3b18ea92e8d3a" , f "0xafb2b25821f2d1b8c932771005a15b447e611fa38fff50b6ab9a896b646d5824" - ) |] + ) + |] ; [| ( f "0xa2c43cebc1c898905ff6f1fa40d8d01dd772d51f99923c73620ebe2183c84938" , f "0x781b90b3db7617dfb60a35511be8062e76748229538cff8c0d43027981ac0812" - ) |] + ) + |] ; [| ( f "0xad3d79d2e4d5e73f57719de7750f2d8b0a5d04107d6a253230273f2e5760f934" , f "0x3da334cc67a14eb404bc29ad265e778b812696363f6583cb96de86680cbd4e05" - ) |] + ) + |] ; [| ( f "0xde3eae867459d15447b9d18b5c65807cf53bf77329dc113a73e7748617461331" , f "0x285c07c4e268dfadf200eeed4875baad2536a6cb9f1239f512c4450543c68a34" - ) |] + ) + |] ; [| ( f "0x283ff31717adc89130e852c86a4455b8f82ee08899cd4efa205357a10f81fc24" , f "0x0d78be87936b522705c350541d281d491d3906be2c344cce7257891ffd4a5b1c" - ) |] + ) + |] ; [| ( f "0x61072b26363ba74c492cf706435ad4ea8020ecbb2fbdcfec56e3b01e739e8c23" , f "0x15e5bfa6bbd9aee8e714a561d9aa7383e29cbeebc6c158839658144755667432" - ) |] + ) + |] ; [| ( f "0x037f74ac4de054a627aa6f005b1568cb5d48d71386698f64392bd6319556801c" , f "0xf3d13bc260d4f26a7c6c1d3de59312731dd914a351c6ba8e44594b6f8266bc09" - ) |] + ) + |] ; [| ( f "0x4a8af1ace623a366c12e6625a0b0e529df5178f91a6c41ed387fa02de7a99b0f" , f "0x88336760ad5893286e7f08ad5b42fbf5aba1748e13c5232468009842f4c7151e" - ) |] + ) + |] ; [| ( f "0xf2801c704502d4958e4a53976f9edad82f1988dd5353242238a2c7304bf58c09" , f "0xa97026a50eebd19befa518525899c2a45681f2177ab903f5f9cb8277657c7822" - ) |] + ) + |] ; [| ( f "0x93f94936753972f337f515e34f96cee12b324fdd781f9812d2fb11424ef1e914" , f "0x30f0d277d2a7739a7337afa32f7c075ddbdb3b2fee922e79f9275c9d2aedfc25" - ) |] + ) + |] ; [| ( f "0xee0dda378359f2a901a96cf5387d372927a064c6b62a200975a004b53d0a0a27" , f "0x2a46449628df5fbfa56d62cd28be9703e28a88ba0d3bd9c3f0e148a1609a8221" - ) |] + ) + |] ; [| ( f "0x79c0ac4eaebd774c31f010e7a50514cf3d12166a48c450b851e862aafa409011" , f "0x3e45d28283135d03989812017d24f61d6d0a5b862ed5299d835c24ba47470923" - ) |] + ) + |] ; [| ( f "0xcfe4bf4abd5fe197e0ab46757c2786ac671843b73e83fe46ddc110dacc52e333" , f "0x60d6ec211acb471ddf1b54c079e4c3e7ebb49d4e6df33550229ee2e49a868d2e" - ) |] + ) + |] ; [| ( f "0x96be256bef8dd598b81936e5982da2e1b6e0fc47ccb6d5fc615289c1d3b4290a" , f "0x6ee388253fa6827a469404897bb2e73e5ed42e6973e1ddf7d83d6e60337f622a" - ) |] + ) + |] ; [| ( f "0xa081e2806818b57969a0e8f13727ac3ad59b6020e93c542e8ef0d358b659d22f" , f "0xfc96bf396491a62df40a9093a6e83a3ef4c79ac49b0f9fc6a78bdb0304b15127" - ) |] + ) + |] ; [| ( f "0x28f600dbf0d2f42d4697b52a3511b804a01c6595da05be5a64f3ced602964513" , f "0x161805845459abce078686def82ec3bdcaedfbd0fbfadca6998ead672e467519" - ) |] + ) + |] ; [| ( f "0xa288e3434ed352c37aedd9dce96e32d3aefb792d4301e71a9b7913a6161ed801" , f "0x218a329550e25339e323a281622dddd438ed1eddfa5e733945d908865948f017" - ) |] + ) + |] ; [| ( f "0xf5d1cd7eb7a86ed49948772f17415696f9e61ac72b8b7605b393a8db81069d39" , f "0x261c1355efa5d575ea574c1b81c9aed8138665de614e60c5db45d446a097bf32" - ) |] + ) + |] ; [| ( f "0x3888db1096598914fc560df6df731f2c57993df519e31263e9047678a66d1438" , f "0x0b48528aacaf854828c637e455bc950cdf4a1f57ca8376e28e308be3e78aba2b" - ) |] + ) + |] ; [| ( f "0xbdbdc047561464df97a095fced04b8fb376bb32a51d3deeacb2e2e6e35c95622" , f "0xab79eabe190cf3f0526e3d4b6048a56e871b25873bf037c3a4f683975121220a" - ) |] + ) + |] ; [| ( f "0x13f0a9fa5c912516981e96f9bb447fec623a439b17253d9e26cebcc603c60624" , f "0x08089949ee74548506529a559825d61a7bab60d659befbd20dd40adc249bca00" - ) |] + ) + |] ; [| ( f "0xc843780e23480b7b1fcdc3d0cc8e2c58de3f41eb95492a28bb00c071f0456625" , f "0x1392d062fc929bce66e386c45253ef38ff9b407daaac1a3d1cb1e5deb713ce23" - ) |] + ) + |] ; [| ( f "0x3c4b1b025439684e456de43653aafb9bb26acc86e253ecb18ca42c5b5f9c7d1d" , f "0x41146471655ca2b1f18ebf6731360c44ff741cfeed1b60035b932291132a7127" - ) |] + ) + |] ; [| ( f "0xe322496ba2c2f43392c53cee892f05179201b8f0eab011e0c8c2385a4a48de24" , f "0x8477f62e254f8d968b2d72f3608d3fa254a677713de3ddedd51d61b81fa96517" - ) |] + ) + |] ; [| ( f "0x8f79d3b3cd6dbe0d2f59a335aa94108dc9be163b16f72a3775fa2de874cbd328" , f "0x624adcfa8ef0baf0e105129fbc5f4eb070168b7abf159fc50c4b606102fc6305" - ) |] + ) + |] ; [| ( f "0xf63f69f6508490d4ca8c61502953c4fb492330644a1fd194a577f9e784186e17" , f "0x485370c95fc563fa515bf4578c9eebc0093d8da50bdaf053102ada03d5145a25" - ) |] + ) + |] ; [| ( f "0x3663224859aea078f9e1b46c25b8bacb673571b1853786d0aefb276733629036" , f "0x3293edcef33ed400d76d5687394b47b29ef9b8dfcc3a6d84755aa68f47db2d24" - ) |] + ) + |] ; [| ( f "0xb8e708c03217041b405d9ddc8aeb483ddbf8d9228c4d1d67d4669d6261511e3b" , f "0x2e32340e24a1440a4b34ea0c580dee5901eb79b292bd32b169dfa78191ddae33" - ) |] + ) + |] ; [| ( f "0x46b07a1a72d8a835c420caec4e21c21d9365ed5c90cbcff33fbde6cf90b6ff39" , f "0xcc45c6ea2497fbaf52b0a95916d0fc7fd3f92771ea9b4004393e47864cbfa71a" - ) |] + ) + |] ; [| ( f "0x28226074d2a241142698b9853642916906df511c93ce9c965f849f8c41c92219" , f "0xfe72873b6b7f1c10627ef4fb31d9f1eec325afb454de49acbca9b15802f85025" - ) |] + ) + |] ; [| ( f "0x0993ad2c15d5b9599be4d14767ff9b99b90f592a0c633c1ed24847447c0b3725" , f "0xd01bbeb3c26ddded5ec302751ba5f981e1def136a0621583a3c1a037d7e30915" - ) |] + ) + |] ; [| ( f "0x65b09d6d8d5ca285ad281d0d750c974cd9c9df0837b344bcf893d234940d9901" , f "0x476d488b8882994d61c83a9ac1ca3d184367e2ec497ac5aadcc3376d13eceb34" - ) |] + ) + |] ; [| ( f "0xe06502d9b9f00db88356686894b8f10adcdad8218a1435a911e54596135c6708" , f "0xa94526b06bc392ead39793034a1804d5488d36e456f7124608f33796a195b827" - ) |] + ) + |] ; [| ( f "0xaee94b881c5d87161559543ea2e61eab4d088ff9e65470d80ea2112a28efa33c" , f "0xc4defaa9f350730584fd88231a9d69050bd4488ee2f36460df2f5e5f02ed5b2f" - ) |] + ) + |] ; [| ( f "0xabebd7996446a92df29bd3e843308ac5fd5b416c9d31754172fce9fdce73f939" , f "0x731794691118b5d850518b701b3f2ae51d6b3b05740a280a7a15b53d0429eb3f" - ) |] + ) + |] ; [| ( f "0xcbfc91adb2b0cf06576f5fa8cc8edb2a65b590c7ea5a294f773998fe2f703929" , f "0xcf9da0cf35cdabfae94a8ee20d8080897a40b43e8415d7655d03bb31d28e893c" - ) |] + ) + |] ; [| ( f "0xae284da8ae1494e5c7b5a21cf8865d08bbeb3abd9ac69f4f7ad9ffddcf2f5f16" , f "0x3c11978f98b26a04ec8c350fcf5a9fd47a173f30cae0eb3a15a1b087ddb96616" - ) |] + ) + |] ; [| ( f "0xe8e5b83dca9f1b85044ede16c87737ed7b10568435b8cac0381e5b36f3caf90a" , f "0xcfd6dce4980402749d8f09e13771b24b341c386f96f1e962459e0fb7b753e320" - ) |] + ) + |] ; [| ( f "0xa4cf8b9438b5e5dc59669cce9c58ab65c4a1ea635b57628110393fe37e41ca24" , f "0x8d2e14d38c78eadac28ebb3e3a1e1ac1aaf9d9ff61fc36e9798b9b260055f61a" - ) |] + ) + |] ; [| ( f "0x887be5dfc0e2db9e7432957b1256ea04174db25e52826ee4ed3ae9d8ff840c2f" , f "0xca56bb983ca73623f521da070c85b9ebc0a9708e2d815742d5d5ec02a150a61d" - ) |] + ) + |] ; [| ( f "0xd3642d4a50e9a18ef1c32815a1802e74aec5814df3e983a57cab6c4d0fd29135" , f "0x692c106a4635d1be60cca3745df60736d533fed17e5156ea0c6cc7d20592462a" - ) |] + ) + |] ; [| ( f "0xaec8be17b128bcf52680b3c77e5137cea09253017cff44d023cdcd32d2e95102" , f "0xe3451d77ce9507d02882c5c116ab89f8653f19ea96c14ea6217c32721d092f05" - ) |] + ) + |] ; [| ( f "0xae684b503c5d353cfa8cce1b4be9e3b08e4214759d181c22a31e6c96bc46cb1e" , f "0x0999873381a134635ff9b93ecf4195acdf1b0353ff15b8e457cbf66cb1bc4832" - ) |] + ) + |] ; [| ( f "0x3e86477f13354226a67648cb404ce58cff04362c5371ef7b3bac4d5abca7eb16" , f "0x80256db69ebec4169d119674bb652f6f5d64fc9717439cd5830a87f820015f23" - ) |] + ) + |] ; [| ( f "0x962d5cde6e144617b0a1b734a2c524c785291d1c4557012dceabdfbad127761f" , f "0xf98f36f5dc94577bb6557363250d50582e851c68ec3f4222d7853f00075f5911" - ) |] + ) + |] ; [| ( f "0x6de9b14100b4aaed8e9ec31c3f7c02b335a36dcbb123d4291b01396e8578be31" , f "0x50d7e5ee2d2f74c772f6fcdf93b9ac3d2c03f48bc9f1bace21c7638318929413" - ) |] + ) + |] ; [| ( f "0x2f3f9fcd0d854332b0d2a6b89b6f38f384844344f814edee62560770da75d109" , f "0x094c813de23af826e9d6571b93271ee5d446e7748512f6767c2b8de47bc8a530" - ) |] + ) + |] ; [| ( f "0x63a7d021bf4bd3a712808d8bdc6ffcf25afa3113f17062921b7a15da85111b06" , f "0x76c83fb462572d9ae27edec9cf07ed6971c38337361b2d38ede7fad885e13001" - ) |] + ) + |] ; [| ( f "0x9942285bbd426f720c10c0d33a5e028efb4b0bb7cc1409a7c09bffe76ba86e24" , f "0x0eb51aec1942e35957007804d275629244e00c302aca93233eac35b2f18ca932" - ) |] + ) + |] ; [| ( f "0x95d10425c52a1fe149a26b0face55e6dde97dd0855ed1d19a873c5c0206d2824" , f "0xf8b9cb74b0698d6e5b3c451c481591601014861c0ea7711f5fc84fb85adf0f28" - ) |] + ) + |] ; [| ( f "0xdfb4a3dedfb630b287def359d327ed6968e632eab62eb70f07ec15dfd326a71f" , f "0x1101d6c6d3b8e7818d68aaf7c608ab1a27a74e5be9ee2ead7590772433d2e234" - ) |] |] + ) + |] + |] ; [| [| ( f "0x26041aca160dc9be1f46e53553558c4b510b7e5374e090181aee7382c3fed62b" , f "0xd1b3e853c8beccd16ccc03df4c7fb2ffe5814877d5f51dd724b72a54d2a5fe19" - ) |] + ) + |] ; [| ( f "0xfe73b2aa73484617718058bd467ddc785e717014a22d7540a2f2370f30ab3114" , f "0x98d170d14a7c139aa63f581ac67521487705137b5c549d1299a9839420579408" - ) |] + ) + |] ; [| ( f "0xdca28e3427cdfed2bb58a5ebb89d1753ecca45d7e9be31335c279742b9d25c23" , f "0xad849d399babf8f20cec5998059add228618fb8bbcc13e2033a9b1861e13d513" - ) |] + ) + |] ; [| ( f "0x164f5a93000710bfdb85faf0172a27540c21716ad7ce8ff7fd7654dd2c347b1f" , f "0x67afa8cbf0220cfc903475473833a707f2490682dc6f5f92c5494ebc56859335" - ) |] + ) + |] ; [| ( f "0xeb84c1dd4bb934d84a791efd84c3885cfa26e6550b98096230a96b51d1270a29" , f "0x622ac7bcc044d44769eb07e976c40cbdca36c65f847468edcc1b6e1069dfd519" - ) |] + ) + |] ; [| ( f "0x26f461ba40b3ffe1e4b8e53a9a7166ed101cb70002c290bec562c29cc67b5d38" , f "0xfb4be709be084edb84fe6ba3cb2ff23ef13524ca7bd5b105717416dc9f81723d" - ) |] + ) + |] ; [| ( f "0x6ee8d95958d70bd77a770a9ade27862820a8bbeb122cf6cda57980086c7b2325" , f "0x1f709909abe5280f87e352f0057069c6d7c8832e6f93241ca9a616d339ec9e1e" - ) |] + ) + |] ; [| ( f "0xd6c1e71aa6218ec830b461dd658828af7a1206bd6ec1df128dddbb907ed7532d" , f "0xf8ed94568ffdcb5695e5e92d7fd339056fe05433cbeac22ae386ff98f47b9f0e" - ) |] + ) + |] ; [| ( f "0x7453567aa1eda6a054bb664d537c2c53e3be5991a22826225850b2bae871ad33" , f "0xc563017e3e7e213a671b185114d5a04d2b0f01c4f4bf71c8cf7af8d40ee65124" - ) |] + ) + |] ; [| ( f "0x324802908f56e5938f5ec19a656dc2a1c70c90ab1af6d8f174fa38632f33a607" , f "0x10784f31d31f84c63ad01079ecdb7e8e2bdc06ab822cc0e5182df4e2f9cbc830" - ) |] + ) + |] ; [| ( f "0xccccb321c94aa46099c5702364e426c80f23f356da09e6021d885d1a8de78011" , f "0xb89c3fdc0bf64c062c65ad66ff20192de961632e61f7346a497c2df4c37fe005" - ) |] + ) + |] ; [| ( f "0xddb90f9cd5cb8bda803043fab4b9ca22c95028dbc2ca1434bfb30cee4d6a6a3c" , f "0xdc2e618682ad986241e9c346eeb946780e0eb0721e739dc3aee8b8b31fb0543d" - ) |] + ) + |] ; [| ( f "0x459f08a46e5ce5aa0846470fc7b02b8bf0087b98e3c02140f97939aab1a9191a" , f "0x335eb2e143ea8d7089f02959676295ef21c287894f103d3ea7f782dfefb64520" - ) |] + ) + |] ; [| ( f "0x7f9d583e9f5553f051686b50c6ae3347ba72dd74588fba8b31bad306018e131c" , f "0xeef4c42e260edec02ca0044c9e84c6f2ede8bc5773c475e584282cc59906f835" - ) |] + ) + |] ; [| ( f "0xcbf5fefe7d4511253db3e85b57d03a3e3ef0af07864087867665d79ad56ebf26" , f "0xb83347c282315f26ff4dc3d3a406d2f3233de64050140c11851970d8ed284a28" - ) |] + ) + |] ; [| ( f "0x81f65026050cbe0a199b2610da8e502fb31edd794fea3d96968a9825bf54a712" , f "0x836869e12d029be51903815bb5ea830d918bdd7d126a4e4498b3ee30d70e1524" - ) |] + ) + |] ; [| ( f "0x59ecc77a543af9311a12a496c40d6b348791a637f654a0818660692dbf1e5437" , f "0x73ed2cc8eab963aed6d585e14dd235a434091cff87feabc8cbf72074e7c5a909" - ) |] + ) + |] ; [| ( f "0xe9b02e6b44340f3bea57448449147e96ea3423c6997fc3b81a5c5a8d5aa31503" , f "0xacf146c0981f8f30ad23b534367fd15985dbc8490154c0460b77faafeec85332" - ) |] + ) + |] ; [| ( f "0x43fa215cc43fbafbd3760546e7fac09e116cda26e1006e11268c99bc1b25d200" , f "0xc1a057054f645e28aeb9ea704a9a32d2c0caa3f350cbdfc1e358e68a2de09c20" - ) |] + ) + |] ; [| ( f "0x2d795cdc6e6079436cd90c8a7db5c2868232052ec9176491f7ec113e56b8960b" , f "0x248d04fb5322189f8b2520e7ed9af57fa741c26c283c4f916e6ee474321ea917" - ) |] + ) + |] ; [| ( f "0x45b82b79ae7c7a2a1cec29ff39a9e3abd06a298ddb5ad825bae060803d50a928" , f "0x17d347c4268a4392d5852b9ccd6ade1cea782efc868dd6b72d1e4655f16b4911" - ) |] + ) + |] ; [| ( f "0xe26ee2861956a584bdc9d174130fa41dee8818c5eafe46a19153287948061e2b" , f "0xe90b1aceecd283ed70c7ce335d8e3a2665f8479cb17bdb58b85586f80c523112" - ) |] + ) + |] ; [| ( f "0x4531757f1c4ba2180bd5d45e5d91c74b8be56c26651a3782882822cb5586d50f" , f "0xfb1d0e4888dad9ad88d2ae74f52b41cafe3e17c15731bb8f0fdd1c6ac5b72b3b" - ) |] + ) + |] ; [| ( f "0x0cc0e8d66f2ad1325d405b78209ca858973ec4a241c1cece6061b47154df173a" , f "0x20e4d7e3b6ff737238af828f2343808d54d77ad78204d3e2c2fe707de2905d3d" - ) |] + ) + |] ; [| ( f "0x2c649deb788712ebde8f8c4193583fdd6539c45fae22ad03c03b77d4ea68f306" , f "0x0945ee705032f877d2a1a2a672616aa439d59d0327665b3eb2ac03035e984c25" - ) |] + ) + |] ; [| ( f "0x6bb4238b1b083940bef39606768b970f941541dfdba4b5f4fb5668439322c30d" , f "0x76766967cffd9ee8e2c4edf2b5625acbf7c796035cfb6530bc4c783bddf46017" - ) |] + ) + |] ; [| ( f "0x2e763eafa549c43041efc9c635ed354d1ff20b8404314759d128baded10e6f3d" , f "0xa7dbe78af6673636755f15e8fe3352cc290561cc2f9aab12c8117cbf24e6eb21" - ) |] + ) + |] ; [| ( f "0xa96822532ae576fc9a9c9e5ec061040f7da37a3d3d6ee19ebd1613c82ebc7f10" , f "0x9df3bc1c28ff99bec63b4df062796f698a0877e988578d5731d2cf0eb77a2c23" - ) |] + ) + |] ; [| ( f "0x7da8f79adec877a5af11a4b9cce51fe5dbe986fa2c5ca722d0fc4585a9099b3b" , f "0xfc3ea263765a626219d0870f9994d1c0332ca51941022906b15573a33aff8922" - ) |] + ) + |] ; [| ( f "0x942114364f950553b4d6102c8d822b906f4e4910c6cd49d9befa4315b469da2e" , f "0xa47ff53ea92f94f04a725c78365d3fe01622132d535a4470b6286c290435982d" - ) |] + ) + |] ; [| ( f "0x2f30e2ee1a6f83fe25e1b194c9484951fe4e2c1c90342f9f693d21ae1ee2bf3d" , f "0x0c6da6d9f4549471b91eadfa1560fcd0512fd9fd4562b7ecf974e9f223b7b43f" - ) |] + ) + |] ; [| ( f "0x393deae278f58156531855dfcaa394a193c156bb8eb07505b6928daf32fcf938" , f "0x3f563c4a6e590b8c96f8d5360f393d556fc57ee6500d37b8178801a0faee6f22" - ) |] + ) + |] ; [| ( f "0xc2065ce4620a981b93ab3b671716ff5743338d2e829017ea3547636bc54aec1e" , f "0x7f96efae61c4430c54f6b1550cff03bb8c32329c2f413f8a4bb63700265e572d" - ) |] + ) + |] ; [| ( f "0x637692fc3ae517834dea72a7e8eeaa5941c6552aa9fe45792a8d679e3026bb2c" , f "0xccac1c1999ca49b5f48ab33babf7858e7d346261aed174323ce6b8c46a536426" - ) |] + ) + |] ; [| ( f "0x7ec6bc5396f523623d048bbde1b3e24b5c5a97aa0ba9888c7425a43795400b2f" , f "0xec06d82b56dbe94c377e806c3ca7444882247485ca2ec3c7b455cd3ceabb3b05" - ) |] + ) + |] ; [| ( f "0xbe65f480b52d79a9be6a2be2058d14735dc64b584042620d228f3aaa1c451d2c" , f "0x43f877ca76b3bcdd3d21eb899ba85e7ef3b9c436bcc6993baa5452f820192427" - ) |] + ) + |] ; [| ( f "0x2244b0fd8f0a726fc6458f867a5bd0ceb0da383b4fa49abaaa3e41ed11906a3f" , f "0x108b6bc2edd33d4453d02d79155e177526d3c7dbf951b17d3da0b0439b4c9220" - ) |] + ) + |] ; [| ( f "0xa7f04c09372349a1ea135fae9138f48c493140a48cbb9a432633189c740ea418" , f "0x7710f9cee21c731631c97732bb69b1211a5c00588893ed75ad25d54294471213" - ) |] + ) + |] ; [| ( f "0xb2f1ef030fdd345b6a2eed84e19a00718d06c575865e0090def0c28ac42aa300" , f "0xc6ecde07ebf80fc881ba4effcb6eed0f010ef48f92b3caee576a762810254c2c" - ) |] + ) + |] ; [| ( f "0xae0b201ac8c6267cb219485a02abcddbdba3d251f56f6200046e92f88cda7d11" , f "0xbdedb526fde6573c005f122838b5675db2497e1a84f608f1c183c872eccbfc09" - ) |] + ) + |] ; [| ( f "0x01bdf29126adacc78e2e11dc235a2eaaefcd82bbb8bb18672ab24d4ab4edca07" , f "0x87e4ff8ed0edc609782f53b221d59f7291f8d262b3f853411e07b55b1c90ef37" - ) |] + ) + |] ; [| ( f "0x82d699726dcbfc03c925a7ac91b29b0583bf25cbe0b2ce68968b2bdd3563bc32" , f "0x7116ed19cbe3b79d31451f2a9a3150b2a845ad850ab0f3d1ad0164bb30f57a23" - ) |] + ) + |] ; [| ( f "0xd0e045c64e037d9fa7a1267c9737b78c42da33fdf913869b1d66ca0a0176ad09" , f "0xb117bd99391a92891b6c4478e58ee88415b21b7c7bb1be7ce291eafe1af11630" - ) |] + ) + |] ; [| ( f "0xb1b3e7ab53413695b47f76dc43b5d6b655096bccf05ba1aa5b92598ce5007732" , f "0x9b37be059c8cc534e05ed2978c4809582d99000b08ef1f768fb70c66e11aa013" - ) |] + ) + |] ; [| ( f "0xd220cbfd1d1b1a5a53c9c4e6d1d2c0082cabf2b8739dbfe9441d5cc5eacea23e" , f "0x3e8340583603693e5c74c7347708155b325c3fb680776556bd38fc619ae3b71b" - ) |] + ) + |] ; [| ( f "0xed8c8d430808cd607a3f322cd9e69399d0579b32d70dbd430a8ffb6abcf0fd06" , f "0x2a493db55ecc0d11231a3b4cf1289edcad25a774b77f32f996e4a01accf7480a" - ) |] + ) + |] ; [| ( f "0xb2ccf73bd2de343784b639e38a463375420197eb7687d999c1b624ae226b4121" , f "0x11a71b5d68251b120751e27a24a42dde8ca38eb64ce41c5e99615ff92261390c" - ) |] + ) + |] ; [| ( f "0x0def3cb2d884ffd3e8213ef38ee3ea87c467ed32680785744643caa5c461cf37" , f "0xce7bf51279a31d5b0f886e65499e28be6963cf065d1de8c6897680d4d84c8b38" - ) |] + ) + |] ; [| ( f "0x93c6f3a4609a3369395e00051924d5cb74dc529e17fb493db8179b0c9fd70203" , f "0x4ef9680933d32e5bf1625268c6889b5a257f828f3d029c8a9cc4dcd6936e7822" - ) |] + ) + |] ; [| ( f "0xeaf1cf2077a3b9640a58ae316fdf1692adf74270839d495d5aacf1623bc1dd32" , f "0xcbcacd3579728ec2d6bbfaf6c0c80c3cfa9bdc6bba5f7afe87d634a4f1ad5b2e" - ) |] + ) + |] ; [| ( f "0xe173e5adcce582b35de77cde0d44da02eaaed4f5cd6fb493f5d64e8c869fa22a" , f "0x7df1d975466e6987ad09bf2517ac95f14926e32b241fa01e2c8a555cfc64ae3f" - ) |] + ) + |] ; [| ( f "0x5c129569e0e8199cf2d7ff63b0dcfc4c562cc12885b0688fa38476b0908a3b0c" , f "0xd46a35017fe45cdf91dc9708eb2ff565a2abdd09302a48de9edc06de23dd1a3a" - ) |] + ) + |] ; [| ( f "0x45e90aece7ef57bc370ed3dd9c6a1a4088883a8d3826ea7e658330384c582f2d" , f "0x748575d01410b6f2c413fbb616d1f5612720f136e89cb737513a56e5abf71f2a" - ) |] + ) + |] ; [| ( f "0x20d710024df62712b03943a2fdc34ab240303fc1e75cb9a71b36431f88a2a935" , f "0xd995feca6a736a5a4bcc567eb76b44a44e96f78d6a17f293aea8a9f82bfe5823" - ) |] + ) + |] ; [| ( f "0xaf45ab3e904bb7cc97642c0fe83d46b463a5bbb28073736e30e826e9a9350228" , f "0x26760fa2078a66fa88028ff664f8989ed1ebd52a3a3baa3eab7accc5f3d0fa32" - ) |] + ) + |] ; [| ( f "0x74b2ebfa3a14dbea39c10a10375dc1367cc512e03149a52b37a880336a8e5e06" , f "0xb55abee2a7665bcb90a5f78472d3d15e57b8e6d5f27f4803b04f8c6cf246ce3a" - ) |] + ) + |] ; [| ( f "0x09aa9421fa8845b71f831bbb74004e060afae454d32ef09b7f3a59a2b46b2d17" , f "0xbbf3c1c7bfb34be5b825b1d79b00e54edf9350c9e46e73c35af34d8ba6f5c825" - ) |] + ) + |] ; [| ( f "0x129f9ea069e3fc639cd6e56d7419e9ac8f4075ec8210100ca5dcf4fccbe69e09" , f "0xafae02eff77a351385e78f7d28c6b5eb5488ac477d7da159b989c3d26f765121" - ) |] + ) + |] ; [| ( f "0x0d4d076837b4ef0f3aae73e542ba04649beb4f8d9bab803603c124122b45b11d" , f "0x918793007dbff2132e9eccdc52446f59c6bdfdd144ec1d817ff76ef8d9454636" - ) |] + ) + |] ; [| ( f "0x9598f70f5e09c991e80107602f7ac198e76dc1742a1a4043aa36e2513d887437" , f "0xade225d4e98e7dffa337ef109401c95d26f48c7aa9a293853f1c6cec40c0080c" - ) |] + ) + |] ; [| ( f "0x78f1810cf7b30edcb1a0b7ce6fe8159053f89104b9ad9888289ff5cbe73cdf1b" , f "0x4b486dd9f9cff6b31b2c871e5e1b631c1eb3ab6d29dd6fc24a004e797dc46508" - ) |] + ) + |] ; [| ( f "0x3a05fe796d468cc9f5b0199836ebccdb8f8a2fdbd99cb42dadaaef716b102722" , f "0x6f3932da880c6131e1623addc69226d5e92c102b14e048f37cdebea57be5ab0b" - ) |] + ) + |] ; [| ( f "0x57204e5f5dbe0918def1fbf8c21da296cbc6366981182911acc9698fa7e2fd2a" , f "0x312f591934f11f227c630cdd65c82e28a689ec6fdf8e178e21181448d4c09127" - ) |] + ) + |] ; [| ( f "0x55809699435afe78016e1bace7eebb608c5d089b2c7c5ff10daf739b39184633" , f "0x6ca1fb74f288e928c0b3bccabd307d1aef429731c84668c137fbb4d7b486561c" - ) |] + ) + |] ; [| ( f "0xf2582009e524a51ffba851a5cba7fb7a99d1cb0c1e371b3d9a044515d5b28400" , f "0x683b1c4e2670c4f111fb7107163c6d565c77cc89329adca624ae6c0df399371d" - ) |] + ) + |] ; [| ( f "0x391b76d1d0d774bf87835b2b7b0a237ec003885b88990f45176aa82b25887f03" , f "0x14ad510eab06e01d09eac359a1a9b981a6d9b5d72f5c85e0b947bacfdfcd7f25" - ) |] + ) + |] ; [| ( f "0xe00205669668632d6dc7dd1618880553791abf22f26948dd18cd21ef4f02391c" , f "0x187c6d9a74167d724eca698b57fa71069fef39f1ca2cb3ce5b1fa972c2ef5714" - ) |] + ) + |] ; [| ( f "0xb5c278c956b5a89554274019c538c586e44aaab36146123dbadd87746398cb12" , f "0x0cef038ca566270a08ef2ac7cbc5f9488bf466fb729f682c85d3ad92d6e95712" - ) |] + ) + |] ; [| ( f "0x2bfcd6d288d4ffbec7a9ad3adaa7c00e1242d725406786793848e96538da5718" , f "0x8394c37895a166e035d931d08582eb42920c25c229efdba97f2976adbf959638" - ) |] + ) + |] ; [| ( f "0xb8def1039b4ca312ebcdea952a558730cdd3dc0f41085dee6d808a8d76d0a92a" , f "0xb2da77dc5c1af5dd568149eb07a5a823398b41e475717a672a70751b8b9d7a23" - ) |] + ) + |] ; [| ( f "0xa54ea2a397150c13d54f14a6a49bb6c93a8ee4b6b4f646c4b9c463e565bbe515" , f "0x6ee4281fd8e5a5dd08d5eb933b4cdcf2aa92a6c3e7ef4714a0e52f87abb82a30" - ) |] + ) + |] ; [| ( f "0xff95fdc9ce5eff2f84f38c4800da178bed0ed1f8d38e1c1ae0e33bed4f884a1b" , f "0x3c42400a86126fe1ac2451023bccf61e8865b55fc21bc94eab49d61438571336" - ) |] + ) + |] ; [| ( f "0xeaf4f7c258281e7f65dd255bca996895650b304af51747d2bf57f2646ab0de30" , f "0x51d232dbc7b39a528e15e3abcfda5fe0ce25613f584af86d8e66a201b6670327" - ) |] + ) + |] ; [| ( f "0x9984b0f3558420ea2c354d3943146ae1877ebdecee07586528e775d7e5290329" , f "0xb97cbcd0e0c2581649a48cb4a4e08065aaf9a040b04609bdde5322d792eaba3a" - ) |] + ) + |] ; [| ( f "0x287fe9532095fc7dd4665d81981d0b74b2bb697838c207c977bc09fcda4b4023" , f "0xa9959483c2312ac4ba1208277d3d59d32e02c4473b566dda83f3cc321bd2470f" - ) |] + ) + |] ; [| ( f "0x2ff7ce968cc806e2ca62943216570f79fcf22896d4da822eff45d5cd653cc934" , f "0x0a9d5a229e7958996fdc6c5599d974512bb524672d376835f7cb6f54bb80ca1b" - ) |] + ) + |] ; [| ( f "0x81ddeff2e21a0db611fdb1a4c5d5249828bd91fb608edab5aec22edbe38c4b00" , f "0xfa864d11ca4746ee68e26257901ac5ae130aae21d5b5e8b0b775c1bee402600e" - ) |] + ) + |] ; [| ( f "0xa4ee6d29cb5795fa6b10142419f5cc6912d40ee533ec5e9f7a62dd8bae1af11f" , f "0x8438331512f1737ae8bdf1da407a35f4046e96fa3c8e165973041a922b03f020" - ) |] + ) + |] ; [| ( f "0x2955aeba2488d186be379ebc65f74eceba9f7f86dd21ff0e9284b68853e12e28" , f "0x864ce4666bb27660a7be66f7df706eea407269fd67049206a7b0fdd31f06dd00" - ) |] + ) + |] ; [| ( f "0x1f6b537294bfab505e7abcd0994a67f0d1ea077787894271c161b77d2f18d114" , f "0xa88f664736008cd04a76e57bb097764f42d98431e3a7cf21ef1ce4f57717192b" - ) |] + ) + |] ; [| ( f "0xa9dbac6a7fa9227e797cc4a24b2215cf79a1b958fd8e327e3452f7b8abc81b35" , f "0x40482c27172a6ba0f4e5dfd6e27ad6d32367868ebe19956f61cbd4c05d80a523" - ) |] + ) + |] ; [| ( f "0xde78850542e81179a2cec79eef6e1aa64510a5dde53f3a16a193db64d15db30b" , f "0x1ea9838e22ecae4e4c7dddc2cbc7317304aa09bbd5eed99b161c6bbc717d5f22" - ) |] + ) + |] ; [| ( f "0xe02bc203d0c1a584db3f8a235e73e2b827f73b2acea0d933fe3cb81d02ded80b" , f "0x541091095469f739ff468b39ca20f61e59681ec8dfc39d2a4bcecae2c1a17d05" - ) |] + ) + |] ; [| ( f "0x926663d1ac305f3ab788ea33ee3025f9bedc861350aefffb5da94a299779b203" , f "0x84c265c845cebb0cfb5ea402ada71b95baaefd25dc037b9f5c4580209ccb4439" - ) |] + ) + |] ; [| ( f "0xb3d8a965a18880157ca5cb1aef9e1061837a08ee0a5e0bfc68dfc6fa3560b302" , f "0x2cca3b7142e1ef2e1ff21c8124a138860a6936a9744e7ce5b5c5a6cba8282f37" - ) |] + ) + |] ; [| ( f "0x758995aee583e7becd8a27fb7b7214b380d299eb35218d002e6a634a26489a2c" , f "0x9fad5079e68c1c924dde12a99ff025a523fe430059df42b581ed6e0733fc041d" - ) |] + ) + |] ; [| ( f "0xde35f23bf6c10881155ac8cfe82f85d3f25356a09cc69e0438b3cf290494ef36" , f "0x47d2cf2a06e9f9168799f2c00bd0d0236d67c7e6aff7e4c92b60211b57953534" - ) |] + ) + |] ; [| ( f "0x3b1561ebbce503f8750edadd3d83226df4e85814b9408ba165635c0ec6e90f34" , f "0x73e480973d1451c2486a3017f6fdf3ec07218a942df0edff01951fcfdb22b62e" - ) |] + ) + |] ; [| ( f "0x0a399043da91d43debea71a0469791e091fd97923b74e234d0391635d465160c" , f "0x60f03402794118888b8bb17e1f3d45c3e8518f62eae9555abb69b7e9797dbd38" - ) |] + ) + |] ; [| ( f "0x58c242d52c008cffebc9ec447dfd26edb6fc96cf76e04d1d70b3576925af8026" , f "0x05ada8e86ea24b4abed741bc504b5209c1fd91718c8460da2745a924c473d226" - ) |] + ) + |] ; [| ( f "0x1927fdba1122f39e36f7d3d9e63d2819b05d2b6c9676fc7fc953a88d20a4cf3b" , f "0x793b85e63cf0ff287509340a6812d862c848e7fd73d6c1cfb7dc49cdcfcd683d" - ) |] + ) + |] ; [| ( f "0x468098c8de61626d4eac5e7e1f2f36d3d4b4b8a21761f5a6acb6b3efbe93af2f" , f "0xf6e6b5c4f3152d4bce5859072e28e41b48c69706cc24b15275e73c094058e81e" - ) |] + ) + |] ; [| ( f "0xa4efd9daa844aed69cfb2b7b807978a84c20a1eb236c9a711b63df8c69d2be13" , f "0xd68acf6fc019aeb100c4fe0661fe39c4c92925e36f57b2a09f6084b1373a9307" - ) |] + ) + |] ; [| ( f "0xf8d323847c2dedb88bfbc7f6b830ef0e7785dfa8b89edb0aae5f16cf8366f30a" , f "0xbfb0ab8ab5480eb51cd60ed8dc2fb9068600309cbbbd74d62ef5b6d80068611d" - ) |] + ) + |] ; [| ( f "0x3ccdef45c4f56bf3ada9b2867fe6ccaa403a7859e92c0c1c19f28741ef692d0e" , f "0x760ded0e245814a2ac3da20d2f63df9ad85485ab57abeb223ef5fd65867f0122" - ) |] + ) + |] ; [| ( f "0x59b40abaeec02bdf16d6119d72f65e591614c490b39e79aad321cb167570ea21" , f "0xaef43a97d5f6592fe138901d7850d89f22e20237434e93453567539509d4e523" - ) |] + ) + |] ; [| ( f "0x295fd2c10582683167548bc30da42425640d0096a0e2c5936b9a81b1787f7201" , f "0x5ae3bb714664e7a7fdda24d61d20232e0692cacdabea235609e15af1b700dd2e" - ) |] + ) + |] ; [| ( f "0x805cbc9b2c57780766c7d2d54344867f6a6441759a914fb620f6e825367ee823" , f "0x4f0bd437926419a32bdf4d2da774de6eb9e031ca2f4027d6c42e5a5360035927" - ) |] + ) + |] ; [| ( f "0xa812aa92fad7b28566f365368bf34dfde680021a09055f5bedeab9f9bfa15d22" , f "0x111dee53e6ffc7f261a63a14d89ebd543eb80081b2fde5add18b2e1bd66d9715" - ) |] + ) + |] ; [| ( f "0x99df431471966b3fa46bbdf77257a66bd5056026fe23ff453a8dffc44d227c30" , f "0xd71696a33f029836a58bbca0c9b0143606f0d5eda724dcc70e3b3253364baa07" - ) |] + ) + |] ; [| ( f "0x8e01a5d32357f0b950c9dc9f6691f0eb3afa1219cc992dd513e935d35db54b07" , f "0xb8cbf5d3dafc94c7d368f3c7543492722fc8a827c8192f6986acea6c279e2208" - ) |] + ) + |] ; [| ( f "0x042e50d26773719985dd99e900a1ca5563d09983ef82905adb74a5e0a325df2e" , f "0x3045c8f3fbbe49430bbd79a6d851c7ab27733ee44e5e78ab3af9e8d021613f29" - ) |] + ) + |] ; [| ( f "0xb8fd08bb044036c8ea77e2ec7ddcebe46f6be2bc0353ea0c3cf652874944080b" , f "0xc80c1e62fff6c293cad5c1f463b998fbeec98e7f4b2ed5b632452d01389c9d0d" - ) |] + ) + |] ; [| ( f "0x9f371f31714f7c2bb5be27fa843503f7b1d4aaa93b83aa042fb08e472e664228" , f "0xc15061f483a3cae6bbb833612c67f3149fa3696bf07c042b56fa0b1681c3e909" - ) |] + ) + |] ; [| ( f "0xd3a9e91e89c519a098069b3dfe32b8a8fec889860ef1a262e3a580176e1fb520" , f "0x19dd4da4d89eb6bb41072dc2f242c703ef9eaa1d81677fe5a7f43bc7b4420715" - ) |] + ) + |] ; [| ( f "0xd7a0cc6c6084365ad7e105a9bdf2877c124e6f5e449d9506e15c53824db5ba31" , f "0xc93953dc410b1ef6acf5080755bc2f93551f1c3965ccfae3ec36a44f1cd7b01f" - ) |] + ) + |] ; [| ( f "0xbed6feb4bf6180e0f17daea26b3ed5107dbe5f752edea7564722a16ba679f027" , f "0x9615e4db4f1037116439857dc451b8cc2f1071bd83dbd5a653c8f742e2f6ef03" - ) |] + ) + |] ; [| ( f "0x0ad8cffc1901e27fa066c15c28c66526e3e05e2a7124a3abbbafec4280d5b633" , f "0x58c5d3bfb52fb5c3cee7aa053031391eb7e07b2327ecc6d29e4230f25d20441c" - ) |] + ) + |] ; [| ( f "0xe16a6a7233d2e05e56b12b9d5d0b56d7f801aa4237bff6dec2e3a65be8a5fe09" , f "0x2c434aee236c7a364019ed5d5f98b6361b3553b1c89d3cb59e465f7735a99102" - ) |] + ) + |] ; [| ( f "0x8c6c7325e32edbffa6e0ce15b5ead04585b7c6e7bb27e84e6a42805dce40a603" , f "0x71d75bbd248c5ae29eb94ff0bcdd180ff117cd4d18efb16b962e3599c8ed8c21" - ) |] + ) + |] ; [| ( f "0xe63ac7c4315378645918d1645a984be0ade83fdf99a6ba4d83f0ced06e41640b" , f "0x633771a9f81a0b26effb1265fde4ba749b7b5036adf685ba7418bdda351f5b1a" - ) |] + ) + |] ; [| ( f "0x5dc0e613e95c576cfb01698be9b09fdeb8bc75f5777aab1fe2dea23a13cb1224" , f "0x5eaadba86d1ad8c1dfe659cd1fb6c0a9d6271fb09468d5b35400ff5f280c2c37" - ) |] + ) + |] ; [| ( f "0x0722a569789b0ca2a909aa9907d075965c76084900013357df325b50e2487137" , f "0x6684e4e9680334b8d20a8a10ef7d811c48bc323a7c3624dc0a07046e822c1a26" - ) |] + ) + |] ; [| ( f "0xe92321f3770266eb546ed51f492ef55f1ec9f6ef07c40a5bcc23636de4122b38" , f "0xab8484800ca19f4923ad5afb80b57de011316472e4a0efac96631b48f369613a" - ) |] + ) + |] ; [| ( f "0x2c46a4173ceec41cfe4aeb75c46ced99c0bd25a3f05e95e233665babcffb8a3b" , f "0xbde4bbe047ded19d0bfc845bc827e072d14dd959a3a005372661d1b23c44f601" - ) |] + ) + |] ; [| ( f "0x9d9dc588bb593ae5f354449a9deeb186538d082b534c2b9b75205cc7942a7b1d" , f "0x2287a579d95d823eca094100f67c0374c574dc6853e66a18d67b34f4b46bb200" - ) |] + ) + |] ; [| ( f "0x36a43e224379d5d1033467f063ecdf4961141add597c85d21e647804198e6f04" , f "0x28f4518b6344db751041e6aa0b224b35cc83c31e40f903f85b5fca8cf90f043c" - ) |] + ) + |] ; [| ( f "0x119d30639928b7957f2c5c28772048a93830008313f8779c58e250ac52a93733" , f "0x85e2bc9d514645a8aa9c22ade3c0b3a8266864b672d7c026993c89a532f6c417" - ) |] + ) + |] ; [| ( f "0x9e0b9ddce0399aff9de8f50d2be6c1bd7218f63207673a55a3c3d3212eab7411" , f "0x267b776e5f6bfab1afd3935300d9956a9fe5bdd5b7ce7250f8f16f8d3978c42d" - ) |] + ) + |] ; [| ( f "0xef95bd6d1f73cf2d2061a42dd3dbc80c44c90a405094ce28c5c2997694b1f21b" , f "0x22386e9d59285c84273098f1fd31d1797d351c161849e89604766182838eba2b" - ) |] + ) + |] ; [| ( f "0xc26edf318a2a26664a6cb49b5fce6b233e8a85aeeb80498807ffd768d9e1f11d" , f "0x6762d0aefd883028b78c43b5dac49d89067cf51d41cc43e5ddac5e4a43ff4532" - ) |] + ) + |] ; [| ( f "0xedaad68843b2c2be9e8c1182056506990ae7de3509f30d8800768f3fdbe5953c" , f "0x2d849c9af2c3767341aba26618c884768468e5ec6b11a885e6df8efd76f3f50b" - ) |] + ) + |] ; [| ( f "0xed0738dfbd8c1fb20a60cd59f814f238b9e12c69235d120d1c0ab912baba6007" , f "0x80e0233c41339892ada8acf7604c088d7973e3b0274a2f5909df540c9fbc9d08" - ) |] + ) + |] ; [| ( f "0xc0920b5820129ec31fde3382a4cc3bafc9bc33d509dc725cf0ca75b30750930b" , f "0x19b8f226a9d9b83d6dcff44c81869a4538920f645ea5df6c75878388bbeec125" - ) |] + ) + |] ; [| ( f "0xe0e6f315acb756b60f9d05f5b5a9e7997f1fbd1ec7a37ec273c60221b3de552a" , f "0xc9bfb5388b33a6ebe3ba3aa18640a8253f2dbc0ec4c7832cf1b9c91d5f9eac12" - ) |] + ) + |] ; [| ( f "0xa95274fca8cb4419fc23307d2cea80b92ceed7db0f66d8f253a60b211c36b807" , f "0x208a15aaa22b8e9cefe64cf3347d85d36de10a34711b28e07071309273ecad1f" - ) |] + ) + |] ; [| ( f "0x1ac5b62031359b8d8d86007e4414b4a4bb73381447ecff6b284d1f4f4b6f7e31" , f "0x4628beb0fa92cde768e600d9fea27b4925365d6aabd5e09fd3388d7042ea1a2e" - ) |] + ) + |] ; [| ( f "0xeacd4aee57ec785392a744f8d1935451b987736d0ccdfdd072b711fa484e3022" , f "0xe00e82f318b863a09357d5ecd6ee0b01c2329fa333d7295c6e90a90e123bc418" - ) |] |] + ) + |] + |] ; [| [| ( f "0x5826bd99a49e94bb75ae35c1ea943cf7dca409d80fa6c33ceb39e2608e78ce11" , f "0x0697d7cc4282ac37a455b229cd98e2c73ef9637f617443ffa29422a980518336" - ) |] + ) + |] ; [| ( f "0x9461725d37489948f48f5924115684341a35544c6c6ea976537860bccb957800" , f "0x8cb22d5c9762873708a14f2da0c616a96600247d8d7de885ca87d53e0893f425" - ) |] + ) + |] ; [| ( f "0x466bc8c526befe946ec81a0c741cbc233be2f749e22fae0cdc46fba2a4ab3013" , f "0x3403dec31275e2d3330ca1cad6d3248214e18cfa6b031effa6b1c1281fa3c836" - ) |] + ) + |] ; [| ( f "0xa319c329d0b0438c0fc1565b49cd793cccf030167cc1e6abf49455f98104aa24" , f "0xe32536c7d2763a056a12d3dd651ded97e63681f009a045b1a1530b279aaf2f0d" - ) |] + ) + |] ; [| ( f "0x4d1e0281fa391e7ba31728e378645a5ca5da65fd85f660b09545cec53044bf05" , f "0x2c2c597b99f5bfc35133b0afdd41b592016e6ea2b27a83beacf84a3eedee4116" - ) |] + ) + |] ; [| ( f "0xfc3a047490094db42f4581576191e8583e465083d05205f50e93d85a557cf737" , f "0xf5e18520268de70d2dde027e7ba954bf5f0584abde0d6e0310878c9d337a800d" - ) |] + ) + |] ; [| ( f "0x33139ce4b0fec2023137f53b5388d063c0123b451b185947565216ded9ab5e29" , f "0xc3cd71ec0f87cfe6d778c11c363b7d2e253759129752e3fdcee726f2faac5818" - ) |] + ) + |] ; [| ( f "0xdb28dc23997eeeed1ef12a76075123f71bc0ec8badb317d7afd3882e0c77d91a" , f "0xe1c157f6c38a2ff0b23370232019222055b7229da99cae04df81dbaa84782c0b" - ) |] + ) + |] ; [| ( f "0xf6dade14e77108c7cb0f633d5f5e14b0e382bb4d0b0da38780afdd5b7743ce18" , f "0xdf211909f5b0494e504060cbcc2a43659ca7e92e8a4e0ae0441f5b51796d873d" - ) |] + ) + |] ; [| ( f "0x4f3292f5fd5a826ddfa805cbfce23c7fec7c36f529f81dfae19ac6a5abed9314" , f "0x2c2883d42f174dc804b48f81b279bdf9ec050a9cd7c27139abbc3abfea38ca32" - ) |] + ) + |] ; [| ( f "0xf2f5ece84863880b8e3af7f4ef16df8a25646037b5f3d9346732a430b9bc5f03" , f "0x0fc6439109390de07b3313231ce377423aac0e921fd8cc3dcd421f8a0e557709" - ) |] + ) + |] ; [| ( f "0x374352b210a8f71a355f05fe3e9b7835facb8998ff0cd862929e272629e60c34" , f "0x759aa135031a170b6e00cb368738b4a710225ddd32349c786f8efb92ee6ddd2d" - ) |] + ) + |] ; [| ( f "0xcf7c5898d9e90a3cebd3123616d096643dfbbab192dd62e4ef7919daeb0e263c" , f "0x012e1a9ec6450cabdea83fbbd15a03167edba17e226dacaf993bb367ccf5902d" - ) |] + ) + |] ; [| ( f "0x8051d5db1d37332e05d65d6422ff6b4d3a34415840e93a101f0f1fbf34420132" , f "0xa74cab5164fbfa59e315623c52775d731ab85d87b0fbf01da11e9ba5f360ff17" - ) |] + ) + |] ; [| ( f "0x700d4143f22d96d83eb237de8c7dc8bb3bf07e3bb203ab6d5dd13fb501e55b28" , f "0xf93a6ddecc25dc671d693a5a52d6620cdaf584beb41e043877f83a8c4ea9790a" - ) |] + ) + |] ; [| ( f "0xaff47a1a762fd9ef20781ea1c87e22aadc06e142250358bc23a004551d621614" , f "0x31c602a3a42fa7fa2184fc8e68bf81e96009e3d7ba650237c7f1a1a63ff08f25" - ) |] + ) + |] ; [| ( f "0x53eded80f3671473cf1e9285fe5bc53f001f465f33c50e8488368bb9d64ffa2b" , f "0x71b345b50fb204fb913ef0a80e324385e2f621ab2031ec22062caf5b4b0a8236" - ) |] + ) + |] ; [| ( f "0x35b5a6ae6530e636494110a9b94643b7225663d658beb78e8187376deddec410" , f "0x88a23772fc914db0e96cad28ae70af4b98bc5bbdf05918b5e751cb0544f6ca17" - ) |] + ) + |] ; [| ( f "0x88cc8bb2d25d8ae47b4d58ac3ae2890aa01496fea68af312dda1b7d29d3e8d00" , f "0xf3b60efe2bc6969af6539c23e0b8db2d65eeb4e3955dfd765984a8e12cfedc0d" - ) |] + ) + |] ; [| ( f "0xc500d9dd8f0c707196e3aa808617d6600f64f4116fc0a71a45e0af39583ad124" , f "0xb9f18b1aef10b3ef1a7d7d063ea09bc0e37db34479b36b9d51d05acff3c9fa30" - ) |] + ) + |] ; [| ( f "0xb6d8c7a88cb8406537af77a54245beb3d61819ec0e7d9d2bf74ff9433da2182b" , f "0x8a6149da8ed626427b0901e9a104f7c9c28a64d72a6606d7498ff6565997b602" - ) |] + ) + |] ; [| ( f "0x5d7abfc0128708f34ea26a95b59a33fa553fa8c7f33d4085162d5a9a5e011209" , f "0x75685dc5f3e524744a3bd508c523bdc3c4da091c3737260afdb9776be481ce31" - ) |] + ) + |] ; [| ( f "0x7b7ece15406821c147484f2fee8a054f5e22cc6a511a5b1b1ecbdf32459c9122" , f "0x5e154907eef91ab6c0beef976a5934663c2a2c95b7bd0fb4ff7eabeda3666026" - ) |] + ) + |] ; [| ( f "0xb934710cbeda9b9c7ac4830b18f5fd7b85d4ccfde270ae298b998bc6fcf43a1c" , f "0x368cb2558930735da729ae6e19ea793884219b09c723e9b74413a7f2941e0407" - ) |] + ) + |] ; [| ( f "0x1911f143c5b07ca1617df06dabf0fc85c09476e1c4d42e6c83b9389c6605943c" , f "0xe52f8d9f560db5135aff4b29c4b9150a4e760a9bd787c4ac7c6f22749ae9301e" - ) |] + ) + |] ; [| ( f "0xfbc17f45630c55870440a4dc2816da4246b09dece6e2ea5202e3de8f5a23950a" , f "0x50e01cde81efe3223043fbe44103f78e578336402a7ce434c010bb309555bc01" - ) |] + ) + |] ; [| ( f "0xb871d878d87e6bd822d87d5f680da9bcc136b9ab1d0bc4043da44a8e1727b027" , f "0x2e71b7216100f8af88848abde2108b9acfc863d087795cb823855ea5ca46d73f" - ) |] + ) + |] ; [| ( f "0x51b2c9004ecc508512a096627e8663e0e88652f321e4b68ff4d98a081fa4a024" , f "0x4a6de317ef58dee5fa54ac9826a079299490682851e9869063afec01cf575619" - ) |] + ) + |] ; [| ( f "0x8610f361712d9e23773e4d28f2655dfa1216aeff969a7678744a9ee0819a2722" , f "0xb5141a80475a9464d1ab758002716551f7aacd00699f310ddc1bf0efe8c6451e" - ) |] + ) + |] ; [| ( f "0xdc7d928cc00a07b3df8756e5660613a15823720375e76f504756c691ba94e227" , f "0x91940dab804786f17604e85a5b51f27e8d0225eaa8d3ee9a6d941e1d5506c03e" - ) |] + ) + |] ; [| ( f "0xb867a11a7eb0d2727115b7ba01820a84d22d251ef8a1843ff8137fa04e78203d" , f "0x24dbfcd66d3abe4464a8dd9651286747475a4d00e4454701c42f408652d7652b" - ) |] + ) + |] ; [| ( f "0x79727a68d0a7ea42cf062b06d37090e995c5c5e404e86f906abf7917bf943920" , f "0x656fc1c1e0456b3154ffd9786fd500c550d1064addfaf0d325a2a9a76044592f" - ) |] + ) + |] ; [| ( f "0x0be56318130bb36da677587bf9b8e1c6e5750453615effd68e44c7fcb2e24901" , f "0xf32cac38a014fc649744ccbbc6006df4f8ec6bafb24f7cd76def0b3c85f05013" - ) |] + ) + |] ; [| ( f "0x0a38baa8339f7d20b40034180ae34c1a642584b0ddde3713305ef0c0cc9ff130" , f "0x35c4e416c168ab9e550e646e3f38e32b8468ba9b66c4efc0b63c270e8cf0872f" - ) |] + ) + |] ; [| ( f "0xc860c118ae5b8fc2dc811ec79124a3255249a5e36d1002932b0107cd20bb642c" , f "0xd44f692c704aa440b96faa71899fdae98c7ae4202e08d5f6d610dd7ce49ff420" - ) |] + ) + |] ; [| ( f "0x0676b790dcbc5cf8d57d7056405d184eca1cc97461709b849fe87139769a0917" , f "0xefc577415cbabd1f66919b8234143c5e90bd6a4aac6ad6ba06ff5a7b6a36d60e" - ) |] + ) + |] ; [| ( f "0x28d981d7796aba56845f867a0685bdc686d1b73a3843026ab846a1a0d61f972a" , f "0x258a8f92d5cb2adbccdc7793e42bbf936acf97b42626dfdce11f79dd7a2d2017" - ) |] + ) + |] ; [| ( f "0x54f23fce320b6a00f3e2eb31c9fe89364b6af3a7af6d82307de042f3f41d6707" , f "0x6b39e7ee6a7f625791787b9e7f2f913b4710be4e8a08c1ac2e3dc42ef321711f" - ) |] + ) + |] ; [| ( f "0x08e620d5684a33d49a06981598d29002f1fdebbc2bd09428f32adbefbeb64f15" , f "0xc8cdb464fdbe8697bd7ec01f2ca393087c1256def582bd611b94708de5d3b017" - ) |] + ) + |] ; [| ( f "0xb0b94de23855a8c9c83a8c662231639b1897149dc713a3acc341bc667056602d" , f "0x26250ff1e636e0234dadb8814be5dde407b71545099b2fff4357c447155c9122" - ) |] + ) + |] ; [| ( f "0x2d2e281fb912cc3050021793e72b5bc9505bc4dbd26b63c09dfe317632fd2406" , f "0xe45be44314479036d420c353e79d93451fb43bd8246e43ba3fc7e6922c334d3c" - ) |] + ) + |] ; [| ( f "0x1f6d7cfd3af081836f9e74293b092ecaba6d4abc0565c32cbb4d90407e9f5c30" , f "0x9c93b3d3a2ff095d2ccdced9e8df5dba4a672be721077a6211b2b1b6a15e4e02" - ) |] + ) + |] ; [| ( f "0x1595d41ef4cf0ea21be56c5e09bbaf08f015a4347233e02b25f81f1bf3331216" , f "0x251f9e15088bc08be27fa2c65ccf8864ef4a26494f3adf63fa7ab37b10ac3b0d" - ) |] + ) + |] ; [| ( f "0x88922fbbab98f6254d54ecdee15894fb90074cf779d623dc6b01ca57b6969e16" , f "0xe01bf42f57f6444744f3417b640c0c673da4ca7bb7a03e5efbc74163de97a139" - ) |] + ) + |] ; [| ( f "0xad635e271775fe822abb76f5e3353a022831c481bab52088cb787ab3be8ed520" , f "0xea00d8560927c1ebc924e6badef2362f5c96a03086c1a1a5261d581764453629" - ) |] + ) + |] ; [| ( f "0xb5f9552144cc7eb899ca5fb5d2d27b009737235ebd95d5729d2aee4f5d0aec18" , f "0xe2d9404ddb681740a7e1208fa34543251b9ffaac2ef01af06a5a48ca2db35d00" - ) |] + ) + |] ; [| ( f "0xf9eedafc7e9af2c43cb6b47013253221d4de8716baa43362d06b03c00c588719" , f "0x6a4d1da766f4f4e3d55e443f5f6e16e269707fbd7b4779978d1cd1989db1dd38" - ) |] + ) + |] ; [| ( f "0x9bfe1cf42976c67264d7e93519a7b0a8cce108eb49918cb8adf04661e3f63936" , f "0xdbd4315890e4f52cdddfafef35c59858ea7e2477d78f740125052eac97a56722" - ) |] + ) + |] ; [| ( f "0x0167306f0ae2fb5c1a8c0332f2962187adeb1f9cb334e3c7d0c47e69ed9eb211" , f "0x71772dcea2d081b60aed9ef18ee2596d9f867fbd93586a5e8dae8a7c948fa234" - ) |] + ) + |] ; [| ( f "0x74c2fe3dc02624d5a090a1f91a3dca9edebcb8d60175270d909fe25299640001" , f "0x6dff7dab98de4ee098eb9edb8642898658c95603cf491cc2fee4c77a21b2bd04" - ) |] + ) + |] ; [| ( f "0x8792d5e3e195c303378ad2b516c5f165ac98c895a5ec6bc0b71ae30cca46f30e" , f "0x014aead143022c356c387c11968bfde3a9bd64ddedd28042780304627a76312b" - ) |] + ) + |] ; [| ( f "0xb41e34101787861d79b93cd5c0ed498de31208d5515342eae9a62ac1ae66cd29" , f "0x0b528fbfa3c72b33491643a08acbf676e94280cd051cdc3eceba89784df6602b" - ) |] + ) + |] ; [| ( f "0x2f4dc7e2bc1ea55467992b420fb028fcbf257cb0da3303206affc9ae4412e12e" , f "0xc0fd215be5ec522dac7e62517d1edd58cd87963fd0b2d3cb2afe583a732f6410" - ) |] + ) + |] ; [| ( f "0x1931f947512d3f95eaeec7c4ad5dc6d643c7346e55ac2e2194a7eb666a11572a" , f "0xb58a867e08c2fa2d843190b98a5cef25278131d2266ccb8cc3d82663b4391a1f" - ) |] + ) + |] ; [| ( f "0xd808b2cbf2eff0f9320ebfb79ada9ac0ced1ceb3a692e051db4ca5e1296bcc28" , f "0x9f71fba96d978b6e28dac800631b971e526283d48d45391867f121072cbb7339" - ) |] + ) + |] ; [| ( f "0x8807c0c7771388545861941ad4d15a12c04353c1e51291540b5f229515242614" , f "0x4579818dfbf3f0f08d65d8a7a3f7e014a3783f51058ef6b3161195e81a8d232b" - ) |] + ) + |] ; [| ( f "0x3780379b4e28c5092b16b0a9bf0ce60d53e19176e0dcac6cdc92a7cefdf09a21" , f "0xbe57e7c3598165a90659a65e22f3a7b0cf1c6fccceef008f47a02e40ea80a015" - ) |] + ) + |] ; [| ( f "0xe5fe04545658a22a81c34d46b5077dcd5243431001a4da2b1cef13ffa891352f" , f "0xdb1bc37ce81484cdb2c15c173249c9277616a73e6eec1d267694068251924323" - ) |] + ) + |] ; [| ( f "0x1f475793878b83c090ebd9165ed8c0483c98ea79b1c37aa3b5cf9016314eda18" , f "0xb201c0b1c1233613d783e8bd700fe4759ddb7aeeb13b91c016907bbb51363612" - ) |] + ) + |] ; [| ( f "0x079e62129a60268affee3f08ba235fcb018b5f14a2b7462c46a3c9ad0f5c7a09" , f "0x51475437f12e03f2079207db2c78c0b7bceb0fa23a0887db38a7a6f33f278a14" - ) |] + ) + |] ; [| ( f "0x8238c95af215b414236a8c27bee278b6efaeca822e3f54893b97073f8cffbd1f" , f "0xe89b76e37039d5d2c30af356a901e50a31556b0c8bea9eb71c108b8e4efd423f" - ) |] + ) + |] ; [| ( f "0x63b4a1f144b068af8661abab472877a48c04f3e5189ae298c0a7fad7f4eeb315" , f "0xd58f495571a909f8b81b8d607513ddc67ea3017514e8d8bd09209932dd2ebe04" - ) |] + ) + |] ; [| ( f "0x8f27617e5e522cc285441acbd50d47d5c5d308e2a81fe25808d587fd0220242d" , f "0xfcf3fea8d705cad2afa2890e27eea48efe12b65d3d9de11f3c1f5c554a8d0010" - ) |] + ) + |] ; [| ( f "0xddfde9fa2abdd43c99aab17709a4acc98981f6aedec6c5f740d004894127ee0d" , f "0x69c8407cb98fffa4b1da5653cccb9d9317944251a3a4dce860879cd4e6c4bc17" - ) |] + ) + |] ; [| ( f "0xa8e8be643081676ea8235234aee8ec63abc3be76aae8f5fb5e3cbaebb330ac1d" , f "0xcb8f8d347bbdc715c9192a353b598d5b8a19f891e5c8fe661c32889fb2baa009" - ) |] + ) + |] ; [| ( f "0xdb6a09760dc462e1c42538cfb7573b22f4d9e87eca096474202f407c6207673f" , f "0x97693ccc42a5369c537442e5cdf031f725d59ec10933b7ea0e5c59fa0d3a282f" - ) |] + ) + |] ; [| ( f "0x0c10ce459b8bf74af17c20bdefd91d9252e0de5a85f53d22b48e6b609fec0f14" , f "0x0adb1ca8d54a3dd98a505aa05573a1563f688db351ee481467b12384417a3a12" - ) |] + ) + |] ; [| ( f "0x2919ab5073cdd318480c7b0a22bddbceadfb25f592e07e59e944ce79bd6b7f2d" , f "0x30fd17bd939760870dc036662915776852812cf05a8986a3a2fc3609bab5993e" - ) |] + ) + |] ; [| ( f "0xa2c03b6d19e9ac24564565248bad23ed3ec8e86eaee8039cdc9e255fdcaf2a39" , f "0x539b51a52d042cbdc287eff71dcd831c1f9571a8d0ecf6fa48c158c9eac2243e" - ) |] + ) + |] ; [| ( f "0x95a032e805762cac80c0efee5f1a1bf5714cca7148e2fbdd9bf001a82cc73c2a" , f "0x463c59fdbb8c1a62f29ba4f0e0d26e3cf531a217c03ccb8a63c411c232989a00" - ) |] + ) + |] ; [| ( f "0x531a5d4f0eb3766660c535d0abff574ddffaf673ad2ea20836c4f73a3628bc33" , f "0xc2baf43b5be6b4268390107476c6881fad36c028d127f8fa7589b3563d5d8425" - ) |] + ) + |] ; [| ( f "0xcaf6725735feaf404a9676508b65f45757ed3e9a9115d46a93e0e2d3d1bd2130" , f "0x88f4c6abc827907d804123f33dab560ee61829e31486dbc680d001e6c203cb32" - ) |] + ) + |] ; [| ( f "0x7171a3eb523c063b4a0990747abd4941a82a209728c7fb2209dba5d0655e6336" , f "0x35a73ed8a41b1f131a3d64b034cd40da7d98de3d71a344588740325446713531" - ) |] + ) + |] ; [| ( f "0x92062817a93455518042ec8f44d0e3f523deeb479a01bdab4b5aa24e5d2e601c" , f "0xa105f08c8ded01d9479cb13c69c72eeae9f8a85d23cfb38369ec74e553cd6206" - ) |] + ) + |] ; [| ( f "0x11780b95bbab2c86bb91eba213571975951e553362c99ee5e1406a724b6d420c" , f "0xc5e9ca9588eb4dcf90281f2cd03761764a99edd96d1bea35e179878e9230e32a" - ) |] + ) + |] ; [| ( f "0x40fdc02a0633b8c8bf13f4e5ff2610649554c8de0e3fd5e3f0b7ec47ca173b2b" , f "0xcbb75b83b8d2d595e2759175cb0973b91eb77628d974fb89f512d51b943f7512" - ) |] + ) + |] ; [| ( f "0xd10a0faf28a09c6f2bf0ff77345fd559184b8eb16f4284f4df7abb73fdd41914" , f "0x93ab861be051aec2d3bd44b838ed31aec199df5c41d6e4478cf6c6e685a69b37" - ) |] + ) + |] ; [| ( f "0xf64b3d7a6dc1a057818c7381e6fd0500c671c80ae5995a3254711420c1dcc233" , f "0x8b8a240ea56b311d14cdd4bb8ce1d3a27e69982fd39102e41a83be2f5694ac16" - ) |] + ) + |] ; [| ( f "0x85cba556f4e2fa4199603fc18cc2e5c5f87ccb69aab5a477efaf6b34d737f638" , f "0x6922d04684bd44435d748d785422632a046907be903275ea2b4fc91a587cf317" - ) |] + ) + |] ; [| ( f "0xa51af6c31a8b6dbedd0cf6ee844d89d88cd6597225148f41e768f8e860a7df27" , f "0xbe7c2385a842f771e0cd9af2561cccee45ca4da11488f5614dc410ef9e136b05" - ) |] + ) + |] ; [| ( f "0xabfb722a2dbd79aa522c95838cbd61b84f7181917e23bb2ce1b295928484040b" , f "0xbb4340f6bc8b9d57ee1de981742fda6b61b73d43b1fea4ce9999ce4577f5a729" - ) |] + ) + |] ; [| ( f "0x1a916dc5913f897d6efe4a65cfb6b8c1b64b7cc35dd8f750eb07e602a50ab500" , f "0x86d85e1f32f80671e2afd7a9b3481b0c23f48f41c0dc0a1ca4ca76b98588e615" - ) |] + ) + |] ; [| ( f "0xbeada73065474ae9d319ab198479591e5e477ba9475e306955f2ce1879725406" , f "0xbc188981659a52b8f277f04d1318f9a5597730b47be091e9f18476c01ba30a29" - ) |] + ) + |] ; [| ( f "0x994aef94c36598a5d67412dc6c94d9170a5ecb55d6a0ce87eed61c2c1326f923" , f "0x67435f4144f1c73925231b21e87f4d7807dcb030193a0421da9fff2dafec622b" - ) |] + ) + |] ; [| ( f "0xa41332f90fc2923df75eceb74f61534f12b2bb9e57d4fb719ea0a895cd5a1926" , f "0x5125364ee0f0c5cae91d84e0bd38091eb54000262e9c6585333576fab44b441c" - ) |] + ) + |] ; [| ( f "0x5125becd6a6c8d3f60520bc3233e530ae9748c7a90dd71a6851f603180d10500" , f "0x98cec978bdd718c6ec60be5098cf2d0584b64ec51753b4c898177e3f02ea7c06" - ) |] + ) + |] ; [| ( f "0x97f41ae0c583d83670c8e8894c6468787df6e3867b68f455d85bd9acb8baeb3c" , f "0x9de15c72fc26f77e4aa550b77315b6078bb8d34d5c9e4f174b8f9b4be52f3731" - ) |] + ) + |] ; [| ( f "0xd84547c291171c3f2fa9e75f31c8c5de6ee99a45fe01b6e3dcda7972872fcd1b" , f "0xb285d7f8e836db9719299763e3506ee4cde37020b31c8692f29283d30af3362f" - ) |] + ) + |] ; [| ( f "0x78e9147204384934fecc911903316e23c8f2928373ee9725ff5582b42701f43e" , f "0xe12981833faea6206c7be6e4200c37f7901bd59e262b63c5fc8ffe85a876b115" - ) |] + ) + |] ; [| ( f "0x5560a1101fb90ba598f786f29780c2f802f41974b70c6358bdb914822f622314" , f "0x654973248e4227cda662fcaad277df4fb8cf04b51ff7772bd67c853371b63309" - ) |] + ) + |] ; [| ( f "0x3e13dc679439fe1e22944e210f829691ad4e2f306f29460e4a45091f98e1e317" , f "0x1ac77d28881fc8f580518b0fe120f6cb94c9fbce198473a680e0a21d27b5fd22" - ) |] + ) + |] ; [| ( f "0x50685bb3dc230cf959946b66809c14d209f9a10bdc5dd4f28c85aa852ba39608" , f "0xec98900ebed512e1df04078d29c34b710adfdb4540e12dd19de6b98f2e42ef16" - ) |] + ) + |] ; [| ( f "0xc8c13766d1ce63443f524ccbbedb891cac1f6b902d6746b3552f604c0266a20c" , f "0xf43e3c44a267b2c2bae7c29445a06b1b98626b953e7ba3e56cb9949f9e024700" - ) |] + ) + |] ; [| ( f "0xaa5b9eaae68af64e2c135d243bbb359b2a0f0c6143d00eb9c5b05ba754d5123a" , f "0x0c50e14b099b85c0adc2a64928be7df118f35a6d8ac96d36ea33c8af2909a725" - ) |] + ) + |] ; [| ( f "0x72d5925cf6b83587b998bb31b257abffd818dceb2497ea7bf57be154ebf17e06" , f "0x84b18b8a35d19323876b11d71b71ed6c796b1c5039c90b1669303569f22ca335" - ) |] + ) + |] ; [| ( f "0x961f90557e90bbb9c9e80cec02c5a5fc592d6d653ab5f733079557fe4f9ff908" , f "0xaab83cbe7dd413f7fc3a3f15c4ed998159f80908e05c8e831ebb2968ed3b803f" - ) |] + ) + |] ; [| ( f "0xbcfa3a1035cc4738b7849b81196f8c39efe5e027b7a5e2856f17dc35051c0014" , f "0xad838a87154155f9cd4ce4d249480ceef818e9ce93896e0fd7c7f39c156efe03" - ) |] + ) + |] ; [| ( f "0x745d6f53c13cd3e500b3fbc895a14893d3f0eecf8de3d8101c1c15e9477ba317" , f "0x946b14f1ffe8225171ed3e592e540ce99c68c58689841debe4cec4d55806f335" - ) |] + ) + |] ; [| ( f "0x4ec4e3cc90b849c24141eef2362342f1d508f3bc84cc9811add3fee94a30a12d" , f "0xbb47ad7e0d151f60100c141feadc8a8deb8b6abf27a34fb64a61d961d879183f" - ) |] + ) + |] ; [| ( f "0xbe13ffdfd2468d9f8098681f118285b904d4ed2cd6a1df36fef4ac17ec1a3b13" , f "0x36745306d75690e47491e7fe86505a6ad4812d84c7d79818f7cff1a359774627" - ) |] + ) + |] ; [| ( f "0x8536d0487fb55717d777a9e83fbf5f6db1bed6c078134b8b851812745aab9e18" , f "0x69cb36881dfe3f336178c2bf273eea035d6bc0e9d8573a32aaa179846c2e9d2c" - ) |] + ) + |] ; [| ( f "0x26831f619d37a94856682e38bf5af7de15d1cf266abaf5129033c278ffb31107" , f "0x90642d16047808f3aa2f665674b7254287f14b18d9a46cc07ea3d2e9e0ce7219" - ) |] + ) + |] ; [| ( f "0x3866e82b3ddc60d3fc94c18794e3167cb180a5243d23f4c4b5cad30eefd3e301" , f "0x6dcfb25a35016ee38304fa357180e94a7105c4198127ebd3231673304bedaf1d" - ) |] + ) + |] ; [| ( f "0x94993d6079d3cd44e030316a3d79ead3e09701e4dc1e11c9ce47a7f78c2d8d37" , f "0x02f56c7aae46aecb2b6e691c8b92dee41cd49775dd42e804c11c9ee0354bf62a" - ) |] + ) + |] ; [| ( f "0xdc715ea5e0624970bcc6ee0c4ca951a1411a146e168c3f0e47b7a8519bcb2926" , f "0x999995a3d5773481f90c738751b3ad263937049eff715971807d2429ba16a120" - ) |] + ) + |] ; [| ( f "0xd59f24c150821616361eb104a301eb873e7f7af9dfe2f1abeff50ba1c4f6ce05" , f "0xb1bd0351438805d094de215105f0ce8e442ddaa95deb95d57c343087eafd2930" - ) |] + ) + |] ; [| ( f "0xb2146a93f25dd64d4e3392dd812b0ae2c4f01bd7e4e7e19670cc4597a2d97a1c" , f "0x6dd6e09b9df2422d0f564133abcd712068fda147c8649679efc0b337b8c0fb0d" - ) |] + ) + |] ; [| ( f "0x1e4a82a07bc9e9ca466351929aa4a35cce610485f23953037b80038461200027" , f "0x9ae6af3e734ad524176e7ed2630a3931efca56ef82b674ef88abf6f9157a570f" - ) |] + ) + |] ; [| ( f "0xf28a2b1bd6c23065e1aa03a43bceaf7863d2447e8b52f191ae3cacee8da87236" , f "0x865e482cd3895598d4e1982cc59135d9e1e1db4ca135703949b17eefbcd08512" - ) |] + ) + |] ; [| ( f "0x75bb9784a7c361e4663216a9f94cf318da8e0ed144115e46f5b90070a43e300b" , f "0x2bdd4de2297bd0954158b1e6e99bb293c465f5335074a11c0f5eab453656e533" - ) |] + ) + |] ; [| ( f "0x148927ac426a3e135bdf47ef0fe73162aaed721b6ea0e8fec1a8e7dc7a85e921" , f "0xf0642c1eab40fe469afbcbe232f34e51b726ef36391f139088d2c02a8b92eb1e" - ) |] + ) + |] ; [| ( f "0x09e5e96e53cf97a3244793b644e581f7756a4d0ff6e852626ff4d5d973a1f336" , f "0x92dd532f82ccb16bfc92e032c3aa63c02d8951be37c3eb477c3274ba39231825" - ) |] + ) + |] ; [| ( f "0x09c92040e3fbf4becb28953ed2d2409e2b5eb204c3b0f5a4637519833417fc3a" , f "0xcb5140cb8fecf1bf0504e4de155042abbf1199db23a8af053c2f5b51b20d5311" - ) |] + ) + |] ; [| ( f "0x806caa6ee2efc32cb4c4479764ba3edd92d15a1c5ede29fd5c42657bc59d0628" , f "0xaf2ac6cb9bb97c6e8902a0eb28910a607c90a6620dc4c699a169b3809dff741a" - ) |] + ) + |] ; [| ( f "0x65a15cd22406be1e4700cecac927a9de637f7db490472af8d9adc1bf21f8fd06" , f "0x8ff00603e4c0c10c5adeac47d3130d8df365d5f4fdc8814ff023da1448489a19" - ) |] + ) + |] ; [| ( f "0xb86fecf911b80ddfa8ae101aeaa771558597be97c6f20204db10cef523c8c91f" , f "0x3edad4857e89943c46864c5fb4bc500193351e6913f021a3ef1085f2b24aac0e" - ) |] + ) + |] ; [| ( f "0xb7ae76e8bbc4ed0d73a7f117804348b5e34da7e59cced8c02eba5556e8895a22" , f "0xd7a98dea125bed4179ce9eaef298a402b2ffc0086a8176d2d882e208329f882a" - ) |] + ) + |] ; [| ( f "0x309440b906376b35534668c33269d461bd80aa3bbcc5191ea20de724e0e7350b" , f "0xa7e47f743c39d4526e7543ef9bcfe7387643973df1f650a7a593b5cc73e48733" - ) |] + ) + |] ; [| ( f "0x64c5c00e7d8201f3ff8f6e60dafa007623783f64f4647b55c50fb6a4fbfb7e12" , f "0x5865fc9daef5a967578e42ccd0d7b7a77be508c0c3363ac44a77af4383d67a0b" - ) |] + ) + |] ; [| ( f "0x684a5872c267acea6a7cb94e7de5aba46f594785b6f6e40c1d5b27e4579a6138" , f "0x23e8f0a562d0d747a004dcb24be41c2de009ba05b168424ac84ee117de36193c" - ) |] + ) + |] ; [| ( f "0x8ee3cf46a87da39b230c7dc8230bd5412ed85ba753dfba92776d36fa73c35729" , f "0xf3f0718036d01358b0a370853c51cb38a9e580a183cdd82defb7f65ec7539300" - ) |] + ) + |] ; [| ( f "0x6b9ff69366d4c968ed2cfa1212be8e088cdfc16cc60ad88bfac33095bded9438" , f "0x70c2f8f52cbcf1cd25afc9d60b4dc93674358ce686dff5b32584d155a2e19d3f" - ) |] + ) + |] ; [| ( f "0x02e42c31f5f5acc3786c92c91f751ab42bb578a5c701e66d51db25153f58af13" , f "0x26da10ffc246c1e959b23577263871c32c511f39da031306b6e787289ffa3f1b" - ) |] + ) + |] ; [| ( f "0xdc287b794c57df4d355842fb36b90b02616b8cd7eae78bcf28d0dd0f31f5fa29" , f "0x4a5f22beb2f643ac3e0f37f1d5e58d76b8a889ec820d287f68d4240a8cbb6900" - ) |] + ) + |] ; [| ( f "0xb5a93b1c3f956f5d1c0cdf72928cc6a73191f9e91432e7cff82766c0ada03937" , f "0xe543b57a2cec9eaf35f61a361ede3b50b4e53bb82741dfd8552b58f4c2981909" - ) |] + ) + |] ; [| ( f "0x7a8c60d1bcfc6fa945eccebc0d11c05e3341fe276bc8f185943efc7bbc2e2936" , f "0xcdb93efbf6f92e9cc9f3e1fc7f8bcbbf3e76830a6f52ce25aa40cbdeb6226117" - ) |] + ) + |] ; [| ( f "0xfeea87f6bb5dadb40ffc6c53b66546b526477a0f5569a59920f6b84964dff026" , f "0x271238ff1cb29931448a2d446197d01ee85ff8b63b1eca9536557bd77c669b15" - ) |] + ) + |] ; [| ( f "0xa63d3f5dc9ea78888dfa24856a1cf71bcf1a7adb066aeca11246d44b5e4bca14" , f "0x5c7d2d2baac6c24a5b200a905e595d6ddc51f8674c8728961324ac28458ea43f" - ) |] |] + ) + |] + |] ; [| [| ( f "0x8cbe44ea2a1daad74719a90cfa264699660e4e1f797414fc97e97938ca5b2c1e" , f "0xe0bd6f9be746f8404d172b06a3834b024778133a57ff213ada027ce1376ac233" - ) |] + ) + |] ; [| ( f "0xa663f4df9b8eedcbed68472c4f8cda70c53aa572d6807e31093248daea37c809" , f "0x1d77afd268b225bfac73f9a2accb46ac72e8f030f355e4d352b79256a449151e" - ) |] + ) + |] ; [| ( f "0xfdb5258394ea96ad727b6f055d05e4bf1c28b2277a112121fc4b989d38716910" , f "0xf6a7a872db98735b28c682d33f3f4499ee4d6fa736e6974c44a526e9ca2b5931" - ) |] + ) + |] ; [| ( f "0xd82d00d7555a6c53aca27ec9dba6eb6e650eaf909500adb1839f3a34c2c9403a" , f "0x634b2d0e7d126d8216dc674361786f29132525870725fd9e30155d4404e0ae1d" - ) |] + ) + |] ; [| ( f "0xe5394fc303712b79005e9f17435a3662b1367524c40bf91b3a5373e5d6acb608" , f "0xe1c08574ec84185f6d731c82e52a0e2f551c1f93384d21a0deaab781b16d1b24" - ) |] + ) + |] ; [| ( f "0xd4e6579c9d41d49fab6be1a8af75e19a3f9695d6214f90588768b729c8a5bb0b" , f "0x4812d688472c3ad9a96e9cb15d54480a99258d9fc455b22fb7521e7feb8fcd2f" - ) |] + ) + |] ; [| ( f "0x27e2e378800ec11a6d961008658d76bc579067f36bd0cc9c948e464551cc3f04" , f "0x0482b47a0e2ff1f0e670936c432cf2cbad18ebfd293c1a01e805e43222b29136" - ) |] + ) + |] ; [| ( f "0xc6f5148454b8f7e1d2d1e7b430d9e55e2abb28d93058c95053ed9c9b4d36492d" , f "0x8039742842a27db3f73183122534bfeba8ae92e527760063cf620163a7b6f81b" - ) |] + ) + |] ; [| ( f "0x7be83283695a0c15ce2c846e608467a94886c8b2e5adfee4f5b27922f0a6dc22" , f "0xaa9e251d8fecf5d9e40623d5f3984378328ba52ecf3b5a286dd0d80a4e2e7603" - ) |] + ) + |] ; [| ( f "0x259e6c2628c597202e3dac8c294d266c49faaad1d3f293c9c3d3a287e2adce0f" , f "0x88e0a2c425996816e304083773971744c72d7931903605d287d2ebd4c8d0a52f" - ) |] + ) + |] ; [| ( f "0xd58b941d8253e2770fc2a1b08c58f62f265176b7cf9edb51aa0e816cb5c86621" , f "0x91ea65a21e92ce2bf2c0c9225ff1ff36460ecb3fa545822a03241c4c57474c07" - ) |] + ) + |] ; [| ( f "0xc4106e8585757995eb97390bf3aabaaedc8dd426c7ac532f68cb559682a56916" , f "0xebc39975778c0ca74a608510b570957c66044e9ac113a1d1f7312573f111b422" - ) |] + ) + |] ; [| ( f "0x65a336ff07b6ea264e5deaf83e7be49ca6f177508d91e04c5ddc51f8d6bd2f0b" , f "0x851794d82d5addaa1a3fc2bebd72583b9a3227c949998e4e37172161df170a3a" - ) |] + ) + |] ; [| ( f "0xb3c9d62e6c4f7c9a119689079ad1dc800e0766d04cfe048f5d3a25a1b6604e30" , f "0xfe1ca80a09a7fbd229feca07c5af2e2febe11b25e7d37b22c6b54a676cc1f617" - ) |] + ) + |] ; [| ( f "0xcf4b50db3168fa229bb94f934dbec36274cf224fbf2cb933ff154a534fc77620" , f "0x196f3764fed314eeb22c44a581665389e6189b57458d7b513e3d0ae468db5d0f" - ) |] + ) + |] ; [| ( f "0x0a7dc1adf121062ceaec634fff4a782c10bbc7998481920254a26115cf159f05" , f "0x208f9fe269d21573142454146c3c14717a245e263f47420ea3aa77c451a5e004" - ) |] + ) + |] ; [| ( f "0xb65dcfaadc9af9b7a4866d0701f2d731765a4a0c388cd55f2823bde52f079c17" , f "0x10aa9edb39b161d8c021838ab36212395a41bf498a8fee78fd7fbadcc5f3671c" - ) |] + ) + |] ; [| ( f "0x846b3642e133f3702884297baf2b9360de0dd420a5c7cf89a28ae60bfeba8021" , f "0xd510fca06ef987c262ec3472b67787692cd92cf58dc677f5e35a70dd7806b60c" - ) |] + ) + |] ; [| ( f "0x277509d39b2416946e46b2a39e118d0da34706af4480d10c54b8d79d27f72d0a" , f "0x206cf548d25f19f176cb4b9763254ac484c8bda16a0deace9625154bd3c03f0c" - ) |] + ) + |] ; [| ( f "0xc0c171f52081dfff50ce3bb7482055fdaa9b1880795a28c46bb09561001b980c" , f "0xd5e0eea2d5ed4d5cacce85d63e6998e6eae64dedbe03746b952397524e856a1f" - ) |] + ) + |] ; [| ( f "0xbd2367bc105d7e2f5c4ea0d39482552c7b19eba7ea7e5e7d7c2418ca897f1f1b" , f "0xdee762756ee007d7db34900dd353c7ff5e5e74fe1bd1bd80e75c432ec6d6d806" - ) |] + ) + |] ; [| ( f "0x5ee9dfc7fff14edf02d9ea9397fd3352d54f1654b66ed33355913a79bc4f1f3a" , f "0x7a78424b53ca9e896c5ef02334bf4f74e7463cfd2d6e8ea93f11aa1fe188813f" - ) |] + ) + |] ; [| ( f "0x63788980e183e3a59b8ab5580e3126c16b63a3651c31c7482a16afcd35656f33" , f "0xb19761224bc3c2a497d9311aa65caf889eb2780a93c022be7b6b5a87ebced531" - ) |] + ) + |] ; [| ( f "0xe0ec774f32b868bd1f9dd30bded3ca4aae18a54ee91c62e532ecc7164fba4b09" , f "0xbd698d1c620e47480402cc2b01ac776c8cba2badeae88c57b2d61dcb01b9af3b" - ) |] + ) + |] ; [| ( f "0xeed4751d94c3d4698fc4e13f5322cbb5cc7e8d5eaf22eb752597920891df9127" , f "0xab3c47bbadd2ff6fcf19cd17bd073199f94f3b04bd986c36a009c037b570a307" - ) |] + ) + |] ; [| ( f "0xae1c07ddc0eee41d50e93f91182389fac9017855f736eb254d71f7205126381b" , f "0x0c1b74b04486b594e2df57a299c142ddf6862d29893f5e5600da98ace1ad0138" - ) |] + ) + |] ; [| ( f "0x20e22210feff391df0132b169e6708259fe62af242935e28da5028597123171c" , f "0x33efe51080aca7838c03cdbdafcd4a292e81b0a9aeadaf0ec225207b0c1be228" - ) |] + ) + |] ; [| ( f "0xcaf548193940ba370d7a003f06c2d74f78e73ecece8df3cd06818bec02ff1234" , f "0x49fc967ae613c5abbcd9b621494959a0f71cdd66d333e52d29fa8ae0cea1bb07" - ) |] + ) + |] ; [| ( f "0x07d53cce4d1bdf0d99229c37b907e9f5d8a91c0bf48f17c2c970d54bf8858605" , f "0x4e6a545163d145462d2ff039f54ef04705b94141a674430799ad3ad5ae440a3f" - ) |] + ) + |] ; [| ( f "0xc213a0f441299530660056eb2b0a3bbe28ccfac2a4052322c00780a76d04cb1e" , f "0x96e3aeee825c03203728af542ea9a02e17b50e8ff1e2a26ce916da053f5fac03" - ) |] + ) + |] ; [| ( f "0xf9eb4fee87a3bd69700b3558431e754d3fd8410e5ce8fe1da9a62ff22edf9601" , f "0x982ef50c32b5a06392da143dbd6504cd0a2f505e0d168ef36197742f1aa1aa24" - ) |] + ) + |] ; [| ( f "0x17e100e23c582f3b5c166a4f4b393a31e7a5701934c16d504821e1496bdb0733" , f "0x49482eb96e02eb3e6be1adfd8e613dba62c08cf1a2474c4abccf47724e4e922c" - ) |] + ) + |] ; [| ( f "0x6d490965e24b533d635990f32905c993f2ba2b095ddbcd5c5fdfa2516c08c830" , f "0xd13c2322224add2d1d7d7b324bcc36e052c07cb44c0e4e81d40cf5b319c04c36" - ) |] + ) + |] ; [| ( f "0xf21038e9b305414ed425f5ffd294f5e46230030826a7128bae58ad0fe84b7b00" , f "0x38118f0cbff3f2e97de06d82aff0646c90dc6117bd8ccbb6235fc5a5029eda13" - ) |] + ) + |] ; [| ( f "0x289cfce4908cdc280d5ce98eec308a739b0c2af666f9137b6f73b1072e055d0d" , f "0x98d20585f392d3c69a46de690def43228f3a294ef78cf9a7c847550ba814611d" - ) |] + ) + |] ; [| ( f "0xb1b35f8793a12bad4665353a2255774bc5c655c30d73c4106fedb5b8a2784726" , f "0xe7dd5c6953cf2be96c8407bddb7ed18d21e92464b4021ec3f35c47c56a6c0415" - ) |] + ) + |] ; [| ( f "0x98c57dcbce05fe6a56dacca5baa48294e82965c07af0e160d4b69882f1b2ba1a" , f "0x063624ea22f3446957be9aafb1c23760d719c07c9d6873bd595c405b05ab5d03" - ) |] + ) + |] ; [| ( f "0x65172825b5bc257220405d121e85cdcf74af8359664cbf16bbf5773c2e571d13" , f "0x24ddd91be01b0e4b5bb1898e8f6f0a6f1de3c674817c94dd2091a977f4fb912d" - ) |] + ) + |] ; [| ( f "0xbb4ce139cb4ead4bcbb978139aef87198aac2e0d64171c107932cf989029c035" , f "0x1f4f7b713d28b843ebdb3489e6c2134c5b26ecdaca7842cef75b9fc3dade6028" - ) |] + ) + |] ; [| ( f "0xb7fb05126f294c46b9b01d21c4ce8869c4707689b056d209e4f4c049ba905334" , f "0x811060b2be08382fd685ff40aa602be3bcc2f06a900ecd9e8f12e7070cbd051c" - ) |] + ) + |] ; [| ( f "0xedac6c0e003684bbc2c51740b78939bdf37645b0abdebe619aa20f930fb39b24" , f "0x323c6c91fa936b450b4543777b58455ce76e806c4ce22424a9c1b6f0039b0216" - ) |] + ) + |] ; [| ( f "0x9bf91a787a03b951021679479e6b7322db28aa8513baf189b21375561420d332" , f "0x263a2e17ceb24a7a6cf6a79a9717ded420c437659cd04a941f9f42c1a7c5dc18" - ) |] + ) + |] ; [| ( f "0x877804f083850214ae1e5cce3e7782555354b57e6fd6e25cb51f3b15e1609904" , f "0x4d4dbe96bef590a1c0c936572b0e878f38d36505d74dac860181b2a8a65e9135" - ) |] + ) + |] ; [| ( f "0xa6765339758de60a31ea2aa7e3d2032f755f918f4f849896fd3d7496fc6fa715" , f "0x52ae10e8dc0e1f44af60f9b8241b2fca5b27592500b807487e41739019d5932b" - ) |] + ) + |] ; [| ( f "0x1e89a20e850c888d67a12117046142ee19f8589c52a4935dbd713b479829c02f" , f "0x4f1641837b66d24b7118f81e5197a8e6edbbe625bf4cfda6dad132b320670436" - ) |] + ) + |] ; [| ( f "0x93a163e50f9f5d4ec9ffd200cf7b9f17820de9b61ee200d071e5cbce0bda8b12" , f "0x0d684a3bc6db8402ec28ad717339d29895fa76ee7c87a60e1967f07e5e7de127" - ) |] + ) + |] ; [| ( f "0xc6432eeb8ccf51915dafeb4d0cef060d77418e3b3cf61643dab2b9640f19f731" , f "0x82db505b253a7b8f9627ced8024586b81c55ec883b2c48711651a9445a509603" - ) |] + ) + |] ; [| ( f "0x839562519252651ba508d28c646a0f182e7305ec862512051b074158b82d5f1b" , f "0xcd8f83bdbc36277bcd1bb442dda98ec565023a4bbdda618511a9552d2e9a2e25" - ) |] + ) + |] ; [| ( f "0xd49d1dc35decc97dcc3a4e9dae09e8d001bf7a633109cd1e75b218263f50170f" , f "0xfca64de89bf21353263cb9ef99aac72bcceca2c7e4b8a97b45062704a6ee641b" - ) |] + ) + |] ; [| ( f "0x66051caf19e42f98738c549cdbaa6334cb4c22fec216c40789b9a8240caac201" , f "0x698d7398396a7a37061cd8fb6c837d32802838061ae714bcc2b35f829c16100c" - ) |] + ) + |] ; [| ( f "0xdc86b0d40f59cb06e4f180600cc7a5f4d9aea4c996f712a97846ff1efce71e1e" , f "0x3e0a020b0349e01d0985ac0e789f3ad2ab886f63f65aa6463f0cfed6a086563d" - ) |] + ) + |] ; [| ( f "0x2873c47af586e23f9f304a9d969e908b24bf28167b49fd62fd431d3adfc8d43b" , f "0x54b3066b3544096cfa648bf97f29918c192824c3e666de1cb48c3a6916232609" - ) |] + ) + |] ; [| ( f "0xd5b138320d1598459672f7dfab524d95d7b6ab4ac50907bafc30bdb8ae766b17" , f "0xda2b7b991d4b90a4d93455b7b9c5abe2d7cc370059ba9e9b69deb9ff890c1010" - ) |] + ) + |] ; [| ( f "0xc1c5edc742ee4ea811f2d6be1549a411430ec9ecc862ddf9009eb85bfd03aa2e" , f "0xea5c3bf7b52a3f8cc63f5526d1d69214efc460759085eb289c5fa8faa1f16816" - ) |] + ) + |] ; [| ( f "0xa8c47e5cae72747bc5436f317ed3221920ff6de4bc4ffb421aaa290dd9bffb34" , f "0xfa0d3a5f0ac6a48b9089deafe278607a5516d5bf83e22d47ee7335a3f3ab4c18" - ) |] + ) + |] ; [| ( f "0x657caa909207b19df49d0c478043ae79ab400c0459c65cdc9b35fd707012fd17" , f "0xad6b06663f285c150e6e1ebeb3a028c27557d10afcd064aff1e18db528c5e91b" - ) |] + ) + |] ; [| ( f "0x81da19cbe61b30fdd0451e9aa0584098ebb564598669b19c745f9e0d85b8ab33" , f "0xfe07d10c522ff0b061e463dee878cbaaeb1e19ea9d60c41891f2706005ec1d06" - ) |] + ) + |] ; [| ( f "0xead30513afcad5715d4039d034e12a4b9e0f8ca440daa78d0e0b6da606d1f321" , f "0xadbc18e78fd97b6024a07f2647804b6212ca108a1d84676a16da7d62c59b8928" - ) |] + ) + |] ; [| ( f "0x1cd2791de239749c6e0c8c99120c7bed08cdfb8ddc57cae37db0c6861b6cff0c" , f "0x3ee16a429533b628a63c93e74fff1234583b275fcb4595b16de53f6e2e10ae39" - ) |] + ) + |] ; [| ( f "0x5b3c0a8750697745fd9eab396c20300688b948703e3b959ffb6d3dd5518ad629" , f "0x2ce1979eaf7b16923e0cc23dfbdcef23c6eb6f02ee8644331e9efe50b2cd7819" - ) |] + ) + |] ; [| ( f "0xeb192eb63b5517ddf07ef933b41692da829245a6970fed8878f868a22fe8240b" , f "0xd9f61b4d7015b1f35c6c9d9434ef11b752f00805fccb7dbc755b5c35d8f1fb06" - ) |] + ) + |] ; [| ( f "0x0e722473ae59b82ad70026247941969c0fdbb250f73e3cf23c496a8e04ff6e0d" , f "0x4e295e74ba94d9832a2a945f4b2abf524562f38d318e2ec04326c38ae900500b" - ) |] + ) + |] ; [| ( f "0xa1bd2fa0fdf233c3e6f148307d289ba588892dcc2ce44ca9c58d92d69e7bdd3d" , f "0xca23ff9ec263d18189d34e6ae62a226be760d3f96c076c92c28a07d85dc94001" - ) |] + ) + |] ; [| ( f "0x0131b2ca33bf8de1645d3426dbf1e11d7d10dfd207b796c4a26a46f39e66b70c" , f "0xc96394bc67ab187d47de59fc66a3a32225039dda760a743e698a936e04991c29" - ) |] + ) + |] ; [| ( f "0x60e1d2c011247f790087c8c531209d6e39403e3ef4343f3dbd47502606351419" , f "0x139efedf8d1c98931bfc77a167257933c9fc1f5d16137a5ce2e235a84f08b427" - ) |] + ) + |] ; [| ( f "0xe2aa6f5438aea4e28d93cd5ddd9e116e293672127843922c39b3c4070634b71a" , f "0x91032e4e82255a61a75ecd7ad03171bd41f86543c9754da4222573943319b221" - ) |] + ) + |] ; [| ( f "0xa448c7670f6638f00bee122e414c2147d09803c92b7d058ed2c5b670d1517c3a" , f "0x55527adfe242f2d34ef1e63a0981e8b02463ff59e1ea7e17849039641b63e725" - ) |] + ) + |] ; [| ( f "0x91bfca1f482938a14dc17fd90bc30a62f448308e76b839d5857dbbfda0905e01" , f "0xba5b5ecd13caa2cdb42794bda6fe0388bacc582b9baafb06df090cc537b3df25" - ) |] + ) + |] ; [| ( f "0xadfcc7bb3c2f0bb4728a8c586cf077ba359c17121d66dea5f9f5c821a45a5d00" , f "0xb3af4d2391c69dacffe746d8b7b98976167ae23cbf3c256a583dfb9e8ded7330" - ) |] + ) + |] ; [| ( f "0x217d24edd6fb8240b80686716dcd979d4a75e735ba0c08bd05bc89533fbcda38" , f "0x816ee4b91baae4996b9676dc28ea00777681e8cfd91c3c5d2895a845a1eb2f19" - ) |] + ) + |] ; [| ( f "0xcb599df15a21191ff2e548d5309fa49c7ad61b30bf9893ee3ccfaa8e5e7dd33b" , f "0xbae038a308e86907875ece5d6789d17ee915f8d8d13b2fca7c95c1e02fdc9809" - ) |] + ) + |] ; [| ( f "0x540fd548ffe3e67a41a3e8bab85a7bc53800676ebfbc28d3fbafe2e32efa2f09" , f "0x00ccbc742ad50929aced4a6858c97db1558ca1d2db84369ad9675b562403ec0e" - ) |] + ) + |] ; [| ( f "0x057f5c48be133469727200f2e6fb6ac0a04bfe8755cebfe680421e798a681e0d" , f "0xcc5800fbabab3594540e2eda1377ada6cdc63c50ab6625ffc3da7e60261c350a" - ) |] + ) + |] ; [| ( f "0xa508992696c057eb0d9bdd8553e1656f71ab3a4d1f44a992c5b6defb87032010" , f "0xb31ffd35df4c8bb83ba6ccc2bfa11e74de6e0056567e1b3ccf89c165d946871e" - ) |] + ) + |] ; [| ( f "0x7580d630f480d73fcc9b4781ff325e41bb899f3657f6497de65d8dbce1662c1c" , f "0x4bbf28146afae1e84e07020d4589cc2a1610c6af030b74171c91954b7df99e09" - ) |] + ) + |] ; [| ( f "0xf3b31123bab3ca797b6125c7f58264eadc478c7b7aafd4c44217b71625884c3c" , f "0xac0490fe4985f9c57f5787bede5bc123bb8009a5a18666b8e749f3d469fb3033" - ) |] + ) + |] ; [| ( f "0x9f4f3a7990e4dd65cdc8f334dd391ab4c5dbd992ec227e4af98ae05d425c0117" , f "0x770d337a461db3a1cd17ea14a5038109732704b3d7cadcb66dae6a62dd315c26" - ) |] + ) + |] ; [| ( f "0xc7d4eeaada1de7efaaa36a22e10f92e280155a51611c5a400d9982628d5fee32" , f "0x0d936d3084e69e83ad49bdb9fce60fc087f86db9a1bc8dc01fc6402536f4b736" - ) |] + ) + |] ; [| ( f "0xad4ecc8e8a6aef3b66af654288277a3ae13e1822fe679c73ddf215a26d3cf719" , f "0xa05798563224ae2c007d29ee2952d9f08388d06f2c939b90d2d90b6d1875dd15" - ) |] + ) + |] ; [| ( f "0xe34cf8c41f066a6d48af69c77ca903c3886920ef3835ee8ba885813703020e2e" , f "0x6eed2320f5d70020e24d2a9d840d748283694f72e2687e5f232d08990119221d" - ) |] + ) + |] ; [| ( f "0x4bfb644160aac9d69091eb08b8db7ed4bb1ac123b24b72f075b453af31266c26" , f "0x4cc02fdff0e4c8eb47d9e61dd8860ff8aab5f10223ba1389c4725ffd947f4b26" - ) |] + ) + |] ; [| ( f "0xe404e8c953e2caf761d96b28ecbae825b32a3e33f559fb302b436dd90bc96831" , f "0xa14182be2c4c9b99013ea38db9bbdd36e1a4b222fcc4e4161ba9ad0a17391f19" - ) |] + ) + |] ; [| ( f "0x5d445119820e3de4a5e818b1b40d96b4fd256cfc1d7b2809a6b073098eed9f2a" , f "0x236f6017c01b689b71dac0fe4a2c0fde8f746706bc3532bf58d5dcbe2419322f" - ) |] + ) + |] ; [| ( f "0x11dc3ca7f7211698ac97e651de05e8764d26918e98d79c02cf07f2ca8b14b439" , f "0x8da09db618e78f5710c82b169f3e68675a6aaed027b08b02028f2da3de56413c" - ) |] + ) + |] ; [| ( f "0xf62f57ae8406af827ab4c8b4c8bfb8a269791d1b772c040e2c150a5cad2ec03b" , f "0x1a2d15d03959b54c57030441ca6aa49990414fc810c1d93ef3c4686ed7905e2c" - ) |] + ) + |] ; [| ( f "0x69a86e37d1044a078a796d4e8ac0c7151caa44043e2c9f0a62aa46604ad74a36" , f "0xec9bce5b26a87f1ea696d6f0c8865f99172e31952ba3eddf27f789f54d5ab820" - ) |] + ) + |] ; [| ( f "0x2c95249453d9cff1c8eda5e9fd1d3eaa9091e842b1422e9ae62a72b7ad34fd22" , f "0x0a0413222aa8a561610562f478452e6a467d9b34a31c813e601e24a001b3832e" - ) |] + ) + |] ; [| ( f "0xec3edd95db8706b4ae82886ffdd18170d75a4d47792b949d9ecd039ef3b9542b" , f "0xaf8b54e82c5f1d03fc2d22b5101a9da414b8e141406c6d1d06ae825f4378f827" - ) |] + ) + |] ; [| ( f "0x7fe1e3ae693ed80e2c3a799b1b83eec9ee8b5ab71536e8098ab57e2a940e5729" , f "0x8973c66419d2ed7a6f91052b0dac1e7ec2ff704eef3b125d3fd21c82e722dd30" - ) |] + ) + |] ; [| ( f "0x7c0d4117e3e4bc0e98cdebf074e9b6dfc034c3436c051ef4c92fa12fe285213d" , f "0x268ab54f66e9d581882bcfea9e12625a507762861b950c3f593680bd8fab7c30" - ) |] + ) + |] ; [| ( f "0xf8e960a040de85c6312d6c9265b844bfd07efb0b432ed00274e1818d9ad7c81a" , f "0xbf90d63e31d1b1e186d6a9e24dce4906b81038f90e1d4d77ac74ab20e87cec17" - ) |] + ) + |] ; [| ( f "0x253d529d4692528e0f169c15a593605daacf7c25f88c51fd6e7d7ecb51746b22" , f "0xa67d28aaeabef9547676d6032a58c62013b6c2e2bed2742093268d8672852020" - ) |] + ) + |] ; [| ( f "0x8dbe9d08390e028079af00de156896817f4292e2c5df1ac12e2e3e523f499e31" , f "0xfde9bab6def389b6e20c5c9e969b5f04bdae2fbb9047239ffdca7e098c98b01f" - ) |] + ) + |] ; [| ( f "0xda3acfa9ad6de513d46f6ee6ed013a9fb1e60809c248fdaea147c8eeb7e6770c" , f "0x351aa46477e22f218fbc46d2adbe2793046654c0c4b4165698e6c7983ddc472e" - ) |] + ) + |] ; [| ( f "0xaee8ab8c3c08d396cee38712c06768c8b9717c1a8176bc62997bf4fe83cfa53e" , f "0x7b8e739aee87fe0d2041f84690bce1652ff616c0ca3f21e2a5a9630d16c7b71b" - ) |] + ) + |] ; [| ( f "0xdc6ad5b78b97fdb33ed1a9a86a1ed554775073382e5169336469e07ebc315e3a" , f "0x02642b9b024d0b82b24b34d5133fe941b0dab8e8087e768895081071edec900d" - ) |] + ) + |] ; [| ( f "0xe0896627395439c40d017ec67fa71db97209f057fce26aae0ecd2e9317fb1713" , f "0xba2f40d7c1946f27f7a1608e0365c261b95c15771afd1183d2acf861a20bcb01" - ) |] + ) + |] ; [| ( f "0xd7af7e4c2b54429402a14c2284a6dab1b79e178d0c4966b5d1f9973cf72aa21a" , f "0x7974deefec53d79163fd3cd66a535876a48dc961b769c3f70f54a8d35001182e" - ) |] + ) + |] ; [| ( f "0x0c75aea48f27805f10295a8563c9eb1852a8f3b50ef04eb97b3851667e3d4309" , f "0xcc4596cbec4d94f7606887596c2d1835112a55f07198b8d554de1d7a776b1521" - ) |] + ) + |] ; [| ( f "0xf3469676fc1f3c47e82bfd0dedc00e17d20f965e1195e1826369457e7f769b33" , f "0x77fb70acba411085216406485548f82be529c37f30f5957825afbc8ccc26911b" - ) |] + ) + |] ; [| ( f "0xcba1820095e887b66b3c370b62e69443954445e1274a96ae147e7b0bb5a01616" , f "0x5da564a393371c5298199395b0846b57351f1ee48e3a6b4d6edb15f2b528411c" - ) |] + ) + |] ; [| ( f "0x3674b2a49bb5d95031a960c084e9c27e6c5ee340f0ce71abca812801e53a4d05" , f "0x66773829caab266881626c47b86db1fcdd16cefc12f453d047bfa18dde7d1728" - ) |] + ) + |] ; [| ( f "0xc0a2a124bd5d05f6314455010ddeed5857e6b2689dc9a12f8e0d6b8e3af3f10c" , f "0x9221bc2ad6788dc2d15d69cbab16a68cf1d613a8eb429968567926c26df75c37" - ) |] + ) + |] ; [| ( f "0xfd3e80e34c8966e0f6c746a65b796d49796e1970d6d068e45c46ecfb0ea6af2e" , f "0x40977ab67b274836a2fc0770cd11407f45b0fac250d1a837fc4c9c41c4042e3f" - ) |] + ) + |] ; [| ( f "0x0e1740286921584cfb072ea2b011546c6e25f2be1a0e3605d7af1a2f4ad6293a" , f "0xab7975837bf38cf8e63544b26726388cec54ff0e544116ce1420a7a1398ef716" - ) |] + ) + |] ; [| ( f "0x2756ec5c37df2b17cfa3ddc9e2305cd96e5be2176bd77112c3dd687955692011" , f "0xa253c31940f9e3b546070acf90d51ca478ba500cb6ecaf8608a5c4d521dc1507" - ) |] + ) + |] ; [| ( f "0x8ac9208918dedae948629820783d977f923fd79320292225770a45e21f2bdc1d" , f "0xd9625fa8da8c030a6cf15f595106eda0bab716b30115c4a93f7a08d6665ebd21" - ) |] + ) + |] ; [| ( f "0x3d9d52ac2bcd55b457e345f65c7123a46914524a325fa152f5164c3dd5477426" , f "0xaf07fecbd407075b61c8048c0204088d99a4c037a485ac147bba73a3323ef115" - ) |] + ) + |] ; [| ( f "0xce05acfa5ff75d0305fe39336c9f542d1a1c51340447437704dcbcefec7d6819" , f "0x0ad56280edfb0064e48f3b0278ed7ba608e6bc705413474346c2e77544354a20" - ) |] + ) + |] ; [| ( f "0x05c34b079ca781b46040dceee6d218b2be0feabe783fd1a37d637559b9bd7115" , f "0xce9cd9ea5dbc2298e0e7c9c2344b1f656c4c120fcfd68dac98c33f11d5db8e3d" - ) |] + ) + |] ; [| ( f "0x02bf0e09cbd97aa0957534f07a1b37835cbd79c8593b95a85319769861299320" , f "0x37e60b0aa082847c701ea7abdc75c5e3c107decfe7f2fc0cf9e66e541d198523" - ) |] + ) + |] ; [| ( f "0x6f9f39926ddfa6424bcb339b39a2fc0f83c1bd820f6dae4ddb0715706040391f" , f "0x69bd25d62eca0e9d88af45abc6a2ab7e8a82c3242c03ac955f2773bd4c93e71e" - ) |] + ) + |] ; [| ( f "0x7c7d6bfc2e26701e7fe6c2f001719f6aed100854317587bd22bd3079ba9fdc0c" , f "0x5cefcca338f3c27cdad5f3dbd88c272c2457818e47ca783ddb722b713aa58c0b" - ) |] + ) + |] ; [| ( f "0xb1a6042613a8c2f2ce42a05bf165acec0c3ade5b8b3a69d96a4f511e88812315" , f "0x173f289370500d84facc7b58b11f945d0430d26c3a7997dc9dad2bfce8c1bc04" - ) |] + ) + |] ; [| ( f "0x906b570b985456725b0a75e4d04974df3e10915470858fa98cd8a272db9cae0d" , f "0xb8e10759dddc412ba8c807d524a309208a57cb2767d992a686c657a12000bf2d" - ) |] + ) + |] ; [| ( f "0x441d3e6e1bdfd13549afdbfb4a06e0d7ba62bcd2c5e0b9d9d39224b19f767d2a" , f "0x2f22584d28d8c8e696f96883227712d3586e7adefa44905c2c171c3ffa21ee0b" - ) |] + ) + |] ; [| ( f "0xcedf133b6e4f4703556aded76fcae962bd73b08b9234fabb9759dc457939cf1b" , f "0xc08c170679c220831efe311b93c095c12b640555ef222cceb85e97ba200e0329" - ) |] + ) + |] ; [| ( f "0x3eebe369db3cb4eef3bc57fbbca0f5c1f7ec3bf8765065728a6edba1cc58dc1d" , f "0xf3fc6fb8f9db789e76f72ab0e58156e27c7fee8626aa2b18ea2bb951cfbab41f" - ) |] + ) + |] ; [| ( f "0x6c26f8166bb4fd5d70deb7bd1636a9e1a2d5dd7e223cf2962de48520f0e62524" , f "0x7ad15aa3a96d38a69b9edf448ffe91a09ba905b5449ff382e489f00de8d6bd33" - ) |] + ) + |] ; [| ( f "0x9bc7eb21a194c04493dafa2e0ddbf7232942586fb896d0d9123fad73c7cd4133" , f "0x43a7f765ca12e2c612b7aa2c26b7fb767301157e2a8c0d7856eae1b746437b3b" - ) |] + ) + |] ; [| ( f "0xb926867b85df378acb41b07b229e5d5a148d465e3328a78075b9cf160710113c" , f "0xc53c5ecebb2aa0221e090b6b249f7582e404b3df0de2d4861157d0aa3814582a" - ) |] + ) + |] ; [| ( f "0xd67526eff15709e62891b4e179492327762c0f53328f38ffbffa39bbe0499b27" , f "0xb142e6042817eb4513a0882b477b3061f08ba345f4f1d80a8c9bd2ae8f017414" - ) |] + ) + |] ; [| ( f "0xa639d208fa3b73bbe793917f101eb93a8b4ea435845dd5eef78883ce3a93fb0b" , f "0x16f890a965a7016cd5da2f26e631f7d16c65cc4cff5a781f25bd61ba50787e14" - ) |] + ) + |] ; [| ( f "0x3154fad0f05cfcf90b9823d700629d8ff054491262e9d1887f7edd3a57f64a18" , f "0xe4bbd59afc3436849ba39e51ba7bad046f91ad9bf9f076287456ec267eff0c3a" - ) |] + ) + |] ; [| ( f "0xc0eba30d9e3fd39970a76277baf68046e356ae2396a2e859911ee2c7b6b1b01c" , f "0x2f7181b7d036142b2994d4d482826e502d9d9806bad10ca7b5842dbd57cdb703" - ) |] + ) + |] ; [| ( f "0x9c848e8afb76b3db17c0a440b6f22ee1b9aee40721ae7cdda013f4e6972b5604" , f "0x13a6c8ed93d5fb3c53149c8e0011f269c988e703b680e156ab57f8ed4aedfb18" - ) |] + ) + |] ; [| ( f "0x628ced2d3efaa4ff3f863ed92a29057e6a4e316463a30bfe9846e75a18667a03" , f "0x7392f50d18af5d3b22c69b1b41c04e8875492ad6974510bd3a37c0205b91a631" - ) |] + ) + |] ; [| ( f "0xecf111746590a552d8dc832b2adab85deced03dc8012a7a41053f7b4e969413e" , f "0xb409b5547c65344b7b83fddd145e90e91ce87453abf5d74596a41bb224d94f1d" - ) |] |] + ) + |] + |] ; [| [| ( f "0xbb4aa388f4ac50a2671de70e8655812364720114dc80391442250b5d1adb0c02" , f "0xe999830c8b9417a7f030559188af3d465ae3e6dfe684de1dbab580347b98553a" - ) |] + ) + |] ; [| ( f "0x8fd6f3436b7616c533a44c935ee7b590f648279c523cebbae1a12c142637912e" , f "0x4d4e667014e141083ff23574605de144854a1735de73c847e6b2eef1360fce1b" - ) |] + ) + |] ; [| ( f "0x819343df9d1cffcf10cbecd013adda49db529398d8c09c77409e78753bc46620" , f "0x1470276b1e6bf0aa1eaa6a0d2289c6f9c35d1e874a94d514912cb6eb684f9c02" - ) |] + ) + |] ; [| ( f "0xdbea8e023cda5c75f0d7e236a877ee0699f9dc926d352e675e1095b337aeea0f" , f "0x704aba4e7f81ed64d60740961edf8f8819d880a596f083478b441e21ed187329" - ) |] + ) + |] ; [| ( f "0x2e9c6b3554b667dc14d0535e2e3ff1e8ece38ab426fb3a66fbac7e668209dd22" , f "0x6798a54c15c8e635cc5df072e9ed4d80f6237bcfbbd5fcf8ff3b733f9c0dfe0c" - ) |] + ) + |] ; [| ( f "0x04860fe7f56b169e347697520342af393b29d17a68d6e3322e588f881fd8e925" , f "0x36619dda97efb7c30ec9d409f9ece5ad29a6d300588dd7fdafff37cc8cf67b30" - ) |] + ) + |] ; [| ( f "0x42a30a7ab8785985a0b409066cf5023d0500f6b71003bd97d2609dbd69883800" , f "0x719a03b57a9269b588bac98c64cfd65be56f16a329c9832a7a8844dd43738200" - ) |] + ) + |] ; [| ( f "0x58fdcd8e8a114edbe5c4d89bd56be02d5c767e12a0823d556335afc9afb21f07" , f "0xaa2bd238b6ca3dc63d57a8b604ecf62a03ee7a786f4bbb8ab81e83f13f89172b" - ) |] + ) + |] ; [| ( f "0xd6d0aa97bfd58b10ad1d57044baa1b79831dd617dd2916987921c3efcfb61220" , f "0xb0cb8d2e43a2597a92b2fbcbb79718c5c082c9a0e27888307f1c7300aa130134" - ) |] + ) + |] ; [| ( f "0xab9a8b3813f7bcec0b9bf408c58eb01e5c0e011230f4edbbc8262f1ee2fd7e2d" , f "0xc7942336455056dbd44b8a6d49ece75f170c376926f8d18934598ea2e181af23" - ) |] + ) + |] ; [| ( f "0x22709471e2184afb9cb074aebbf50d3c07a1c3f0b60eb83906b7255721ddd80b" , f "0xf4488f68bfb41651e5ac94042210646664e1ca37dbffd4eac6a325253da6b80d" - ) |] + ) + |] ; [| ( f "0x5bc899b45c4df432f4cee31d4e47533667e3ff760eb4e3308005b19db05d132e" , f "0x08028f4a4299a01628a1cedda9757181b018d7564b5ee9f62fa3a17b050e7307" - ) |] + ) + |] ; [| ( f "0x05dc07266a6744cb949a13e49e093b63ba3b47ca2695994267ca021a09c3493c" , f "0xf482d5c074558ddb06ed67963d1c9c74ed2f95b0fd2844481d4c10e90328cf38" - ) |] + ) + |] ; [| ( f "0x2de2f68ed34ab28de13dadfd874c54fa525df2768ad8a5b64bc7e942fa82711a" , f "0x830985c6a7a70000478c33242fd7af7f468d38d140907c435f072b4880a7d631" - ) |] + ) + |] ; [| ( f "0x53b3834f5253d56135881cbf2546a8b04687c823cebad6df22dc4c8c0cbab12f" , f "0x0942970049b9543c354e7966aea1b2ec4c7cb1c2cc7f50f9b9ead246f8228a3d" - ) |] + ) + |] ; [| ( f "0xb3dbb834351451588330ca29f5341485c2d7f6ccd81ad56d2916afeec9ebbe03" , f "0xf8042e082cd7245ca17f95253b677348519e4b661f6066edb79e6d9e806e7834" - ) |] + ) + |] ; [| ( f "0x1af9520d204c7645d3632d2507dd80b62b496b2dd2284a4c29bb19aa186c7c35" , f "0xa6eac6e5aa3cdd34d3416a2d40e2fe33d87125b0fab7de545881fa1c416f553f" - ) |] + ) + |] ; [| ( f "0x5d34b296693bbb48156333156375224a22e7d60fce9e6c9ae607ab90041ea339" , f "0x3b250532bacae9c590015f076833aac53d45c15a4c60be59232f2729da5a0b36" - ) |] + ) + |] ; [| ( f "0x9d17c4a3bf8aac703274daf7e9843eddea482599865e58c67ff1da5414f7921c" , f "0x3d390ce4d9ad13d880b39de7678133d7889c027c7835386155811cc6b8874d1d" - ) |] + ) + |] ; [| ( f "0x53e7b73c72b06532318d634844e6f45c5c28d612c21e60260e7994d331aa162b" , f "0xca9c9322b2b534a05c24d808a01c34d5d846a3f10ba85aab5c8d835815d9e822" - ) |] + ) + |] ; [| ( f "0x57b92824e698d9be4dfe2003fb03da615cf5f94235313ae5fe72d0a027f1d021" , f "0x3acbf5411125024f351b81cfd9822dadccfba2ada61fc75374048777884cd310" - ) |] + ) + |] ; [| ( f "0xc371f21c88144d8b1888324a6684d666942aaa5b73bbef11d0d94f03147bba2e" , f "0x6e3dc5fdc32e47322a327f8dec6ecc00fa5d9b506b47776333255cba396c763d" - ) |] + ) + |] ; [| ( f "0x412afa9002a0001d3e1f57a328517ebb87443f7239450855ba8f52549140aa24" , f "0x11cf0e5d532aacb847ae07d7b67aa9276b829a8c72df5acc417b166088125320" - ) |] + ) + |] ; [| ( f "0x74be9221a1cc18f6cbbb4147ca0515296569f4195a4dd7c8c341762cf456ab22" , f "0x014cd9c0ee59da913bd4c0596a844344a40a14583a3fbfc5063263ed0b55ff3e" - ) |] + ) + |] ; [| ( f "0x24447a1dc59a0224681f53ce9ea458b42e2b1915f48e5ba504ac86fe3636301e" , f "0xa051a4af58d86fc4604b3613468b58cbb893648b08d4c48fd9bc767c3e26db2e" - ) |] + ) + |] ; [| ( f "0x8104a261ff6e3de4050bc0d86b0e178df460acade43beb185c5fc093ae819211" , f "0x123e5d7f96940f41a01811108fe87b5613494919fb6583d76406b2b81c9bed01" - ) |] + ) + |] ; [| ( f "0x54f9eb00d937932523247bb32a91a1d67a1fe832b4f78f4e5dd6bae4435a852c" , f "0xbbae7075a1a3469cdd231fa6778deb00b4a6cadf5c80646b75486c9009cb1e3e" - ) |] + ) + |] ; [| ( f "0xa7e246229d18073994acd405b6b400b142bd482f687069d6415e1adc41e29112" , f "0xf42b7bf08d16fa07b036cfa69bcf05e8ac730ec34df32af21f688bac58758812" - ) |] + ) + |] ; [| ( f "0x3b42ab0c7656255b08ef68fb79beeecdcba9db57c35076f3ded3d1ed5c002c3e" , f "0x8841f9324d8f12b5e9f30a8d3fa123c6c16095fb6e66fb276b3279673d5a743c" - ) |] + ) + |] ; [| ( f "0xfeab6dffbe28b9de4a22f1d690f6bdc2a045047726879f65d496e61119f8630c" , f "0xb3158dced8549e709adf1482f29e63097dfbcf52d19bc6ee5f47a1693aade304" - ) |] + ) + |] ; [| ( f "0xf4050c12d1f0ebb0bba98b483a646974dccbf3c07b8a1af28a7d22d4907a311d" , f "0x4109cd453fc5a94cca1fedb9ee19d9ea55d20a5eaa52a16b51fe60a5c2bd6935" - ) |] + ) + |] ; [| ( f "0xc3e25ecea32b443961accff1f8c5a4c1a498a7164925bfc6ae2904bdaea74137" , f "0xc31eb89f8992a3127d911f3ee4036ebd66eb19d9626b779c1381c0f0a15e513d" - ) |] + ) + |] ; [| ( f "0x0554abc8384bd03d2722fd3494c8d1399c280cfb92c98cfcdbda77cda0c4381a" , f "0x7ee84f767cc1d48fa86da2ea1909bbc3840402c4928505269e26eefda1cbf418" - ) |] + ) + |] ; [| ( f "0x09308e50702927977941347310ff0914b92ab672101bcfc49f02e52db2f7f60e" , f "0x2b69aacfa47c6d64747f12e0d724a6ab522416d2f9a40e02ba53e237968cbb05" - ) |] + ) + |] ; [| ( f "0xe7769d8cec4a06e0260a50766e800100344e311663a65206c006419550c15e02" , f "0x8cb9d76c61e419ea3430c61ace2af9c8fe1922d2e7ea6fefcda295dba5b0993b" - ) |] + ) + |] ; [| ( f "0xab9a5030af31ac1061fa768cfc9fdd8566ae0a9b5191393ea98ba38e5b8d0103" , f "0xdc4bc19f0372a9cc7edef7e1ad8fe630c993f06f2395c5eedbb637ed42ab860e" - ) |] + ) + |] ; [| ( f "0x0b5cc0eb9f0e0b9aa4b42f6c63c3ca99cf99f9dae1e2895b584f99720c2ef827" , f "0x0611ec8f8fd827260a8ff39c31185c3a9b5c3a4ba620780088c699e1422a6807" - ) |] + ) + |] ; [| ( f "0xec0f853f1aa17e4959b9642e0051a867268e9fd6405b3efd81418b3247887825" , f "0xf03fd876432b9dcb92904b1b3e0e9a404a2a63daf93516f9c5a6825803ce8529" - ) |] + ) + |] ; [| ( f "0x6393230721fa57aed856580fa3721b7d04f7a9a196e49af6c71ed0425965b915" , f "0x8b928130d93310f92ed324ccb9d0e5818b16436861ec0fda341bc9d0e74fb40a" - ) |] + ) + |] ; [| ( f "0x7d4140ffb17947839dd17b343a02a0c4ff34db690f5193f88cc41b43768e830d" , f "0xa5c56e7548fb46767549be0ecf32faa92f7dcd6160ff893727359a77e9446202" - ) |] + ) + |] ; [| ( f "0x7fd085bba0287f4e7e2c39518a761088d24915aea3b511abfa573e326d92d721" , f "0xccefbe3a104b2e89fde24563d345005d3b3bc485f1b29744d0e173a62e5afa1e" - ) |] + ) + |] ; [| ( f "0x5d0f9c3d6aa33985d6ee9292323ed87581944ff2adef24ef3e24a7d04073f518" , f "0xae65d1241b77ffdf99fbc0d7cc9798b57cb8859577b8eff64bc4005f0c1c2e02" - ) |] + ) + |] ; [| ( f "0x22926da48efdcf02103488519793c0616a0979e03cddf3a1678e4c3a8199c63a" , f "0x47a3dd3ab09740fe99917f27dabe3f0658486d9621133358f89c0839c1949504" - ) |] + ) + |] ; [| ( f "0x999da97e569e4d22e229dd6450dfcff3bdb7ff778dc89f78c6ebe4632d014822" , f "0xa6cec809560ac872a1f08e616c6a93ff0f3ebab63f0931e3a8ba4cea91737406" - ) |] + ) + |] ; [| ( f "0x61591d70c54252c35901652b1100c486b52b7277e4949efac50b2445d49b630c" , f "0x23097fa55c968df8ecf24acd7e2670234f54cc133908c40cf1d248115713310e" - ) |] + ) + |] ; [| ( f "0xb3f2ef071d80c14bc2061dba3018018b06b3d1c1b6319ec3c93c27472ae9ba3e" , f "0x381ecfc359459f0442db9a6755ded0e377fc5b24932bb8ba883d02f68fde8e3d" - ) |] + ) + |] ; [| ( f "0x7b1628548e7ac34c71898838911b4648acd1a1d45a229e9428efba786ca76e38" , f "0x5e058a6c20e1810cd0388c7c39341ed3512f22db5624410773cc48ec732beb04" - ) |] + ) + |] ; [| ( f "0x352f18e0bbd95a7eb37d9360f5ba43ea1589b2101bb90d42734cbe52acec2205" , f "0x3fc745a2a95fb29eac8b79667b67db13a582d3ce75522f1fd8848904fa1bac26" - ) |] + ) + |] ; [| ( f "0x7b6d30d8c502bf6d4e0e0482db37df579fddfebf2e3f3cd94a97ce38ea1ccb11" , f "0x8e48fbd60acb5fe497826e3563801edbcb9909e27a3d8d506f574b2f1710d80d" - ) |] + ) + |] ; [| ( f "0x1db77e1ce7f0f5885cf64acb133add63b12dbc32e50baad518ce8c765882e02a" , f "0x6b07369425a8359507b391667155ff2ac6efdc0ee09818a104e8b9ba87e4be0e" - ) |] + ) + |] ; [| ( f "0x51e74063572a9fa5371cf89db18b6ce5e4aea450a15cfe38c96f71c274a2d71b" , f "0x28cbeeea13469dfc0caae00a40d9ee389c386ec2d25aac9aa0387984233fc915" - ) |] + ) + |] ; [| ( f "0xbf24ccc09718a6d5cd14d2dcb1322d8a46632208823febddf32d10ca5612280d" , f "0xfb3bb94e60ad9fd279d61d068f30fd1bfca8617b254b606efbfb4d59de0bc51c" - ) |] + ) + |] ; [| ( f "0x8064b1403ebfb5c59fea5bcb52dbb0403d962d876997ada2d51cc7b4a96c142c" , f "0x180eb5cc567ef96d44aec2ecbcf70699b8108f501f440438843684828128e315" - ) |] + ) + |] ; [| ( f "0x88c9c5015bc0727e32f62b3830e9d76e5264077478a369bad72bf19465b75f0a" , f "0x0ad6220d9b0becdb8e6b6756c9bba9ed4e04a1254f4463a0b2c0a3715af42e38" - ) |] + ) + |] ; [| ( f "0xea43d467aa48bd54b21a717601ad2df099b6993784f173e1e90aad3fc8b66a1d" , f "0xb891a77ee450c30498cccb7b05c42fcd5fe87c2a937a9e7c3998df75645af033" - ) |] + ) + |] ; [| ( f "0x6d70b062de13996fccdebea6505fea3344037168a3d2abfc9bf6bfce30dc6106" , f "0x478d8bcde84a18d4c8c2c1132528ed8f9fa3a840ca27a94dd513f57dda9f7536" - ) |] + ) + |] ; [| ( f "0x8e9a9744f23069be550c06668d6fc3a6021e824ffef08b089f0fdf62c338ed21" , f "0xb8d437120432da5de663ac6ffc83b4e3eeb8e41437f993f54216dc3e97fd2415" - ) |] + ) + |] ; [| ( f "0x75b52a802baf4692662ba2e78d05a53affec3c980aa40d42a112303b91978039" , f "0x2c648f063b976fac4d0f47cade41778ce3c9d8a6b09e3cb1d8f17be9f7570a36" - ) |] + ) + |] ; [| ( f "0xbaffccceb2a067ed40686ea30eda4313223b871af5e5d4ec4c6c218e143bae1f" , f "0x5c23f95035e6f30445ff683a5e67b1725e92a7d10dfde3309a88606214f40e1b" - ) |] + ) + |] ; [| ( f "0x47e922be8945724feda520ad921495834521a0868461b8458814d0a69843e121" , f "0x2f6e6f49363ff4b96690b331c82ea9fe7abe5ae213bb330e49e8dfd1b3551f08" - ) |] + ) + |] ; [| ( f "0x09b50fa3ff81bdbd1f0c8137c7261fa0e4a9a60996b5616a8117dd69a7e3ef0f" , f "0x89242c2aecbcb2a651a2865455dbd7631affbffe8ddddb778cedeaf3bfb68521" - ) |] + ) + |] ; [| ( f "0x7e93e5aa414ad04e12f0d864db80d0a544481c7d234ab8793ee0e0ee1d015c01" , f "0x754854f718bcbcc34ed7f21e182d1bf62500cc1aabd64f2cf734c365ffc2e126" - ) |] + ) + |] ; [| ( f "0xc7319fbc15e7fa4a067ea63bf60972bf198a6aae6f1bcc09925c607391bf3601" , f "0x20a7a8f9a56e37a3970065adab1c7500d24f0849a9c482018cf3235adaffda2e" - ) |] + ) + |] ; [| ( f "0x150454ebe5337172a11994a72481bb4194e39c85aa5e3c9d56154774de6bc118" , f "0x6c87dc70a8e97be1b11aac17da3eb9feb953dcf43562ca18ad2046828b222d08" - ) |] + ) + |] ; [| ( f "0x4a5581df5e4cee5c9bac3bdef9126fde8ca3520758caf63ad42a5140281d0f2b" , f "0x487582f2d5e488f5e25bd5bcf108a9bf2e4c589790abacfaffd21f26059f6b20" - ) |] + ) + |] ; [| ( f "0xd47f13ac4c63a523ac3c5a1355cf5389e8a3dc3f0f44385e58e295d63b9fa81f" , f "0x711b569aee83ebcabf4d1ef4b9ed5ccb858d019a64a556f4c90ce80ff57cd20b" - ) |] + ) + |] ; [| ( f "0x4b61025c7e22262db09da450aa109db3f242f61c42cc0e83ccb564de7d0e8a0e" , f "0x69dfa89258604ee4e73cdc85bc9ab0c5762e75078de3272e762bab7dc32c5a34" - ) |] + ) + |] ; [| ( f "0xbe868b29e32c5b36c4f95159dca1903e5f63d54b9de50e4b7a8841d10786bb2a" , f "0xa74d595585b4267de103e66264ce0d1c5bd888eb237549f5d1a2db62524c8122" - ) |] + ) + |] ; [| ( f "0x1545bea6dcbb9ef1fbcee18bb2c74387fddbbcd84cbb12ee75a24bd0d4e60f1d" , f "0x7eca363b56e957e0f4c6346e73d0b9ed1e87fbe298ae50bdfd9a1ca23f381719" - ) |] + ) + |] ; [| ( f "0xb21c664795208a39750134a7c1d1e08d0f422d2bb57cef7ffc0245b498d0070b" , f "0x45af53b6565de626715e608a1a18453497963fe318e9aebebef08680e1eeb901" - ) |] + ) + |] ; [| ( f "0x401cbce921b91282fb4a5f74080681eb4e4e87bb6d5228f1476b2e5dc3a1d713" , f "0x88f19c39afffb201ce2632f9917f25d05b110c3d9890231dbf6483d91f71bf31" - ) |] + ) + |] ; [| ( f "0x26e800c1c4df35ec1d4331e829d33f56a4826e07e1b36d84b1d9fc9054389b01" , f "0x7ec9ef08fb0ef193dc0f81755c566e45707f6e24fd6659c0539d4d93fcf6ea3a" - ) |] + ) + |] ; [| ( f "0x73c4283198f930537a1d519abdb5eb62c6266b0cdea2ba95ece1fe7b8d726620" , f "0x8851579746b4d3711a198f8c3b64768d9d6128fd6a340a05726a504ab4cc5c3e" - ) |] + ) + |] ; [| ( f "0xd56a38cf01b9ed03a5e227fb9efbd00e604e923ba02000e306c300d275ad7426" , f "0x1b75eefbec4352c0c367623fc523a8148b18ffa66bf588cda74ca3e9f3549420" - ) |] + ) + |] ; [| ( f "0xa318520d6a848a6651859c27966ac7a986e5b0b27c409bc9526fc735e35a7822" , f "0x54baa96ac1b2ef510469b5e3552087a305e7118657c420f24da4400a47b63937" - ) |] + ) + |] ; [| ( f "0x6ad3cdd851bf1c70541b07928241170b219341c71781008922335ebf403fad0d" , f "0x3b21e72a85ee312c901e388409e9bff68b6aa6b73d2833feff15b25b7064342f" - ) |] + ) + |] ; [| ( f "0x4d892ca98e6e0b088352bbe7fea10a05d93cf956546a2e16ba3eabcfc3092e11" , f "0x5edc801d55f880c540e9960201bfaf2afe872de061f1e99845558676f4677530" - ) |] + ) + |] ; [| ( f "0x353bd19c34273a24215cbb61d8dbfcee79f5dd28a9fa09f185b6f4ad734b2236" , f "0x37128957d3c1f1063fe05bf4a8f1aa141c0a1bbc467d0df9541c6dde38008c26" - ) |] + ) + |] ; [| ( f "0xd1d00ffc803ae0666036febcede69ce9993d9d97bc6747fa5e79cfe15e016636" , f "0x57df62cf18550c3bb3b4d07374714f73ed5d6c5bae5da69033404342214e1d1a" - ) |] + ) + |] ; [| ( f "0xe5478aef565c25d5887aa61d16b81b4dbd43633c5316d4ebda6d0fd668327603" , f "0x31820f0259cf95032d61c135a4c4e6149908d40e89e2a399a6e6c2e92d7c663a" - ) |] + ) + |] ; [| ( f "0x9e547e450cb5c721cbf273fda6339e2ebcf5959b7d57b606a76e872de2e6913c" , f "0x562284fd088cc3dd6c0a8bc2262156734f72368c05b2d13d9e655cfeee814726" - ) |] + ) + |] ; [| ( f "0xe9f9bed2527300c04058ef767fee68c8a07d1185fad12cc69dbb3a2db3ff5205" , f "0xae673517f786eb7f2d47f71d22a1708e15b54b087e1845fd04737bda87e0fa0a" - ) |] + ) + |] ; [| ( f "0xefc77d15f2e21ecfb803a820115c6160115d9991287e89bbf9bf8e04f1b09b30" , f "0x49b0c3a69968d9d6b139c7cde56e6686aaf7e800514b8822cbe5c35adfc5fd16" - ) |] + ) + |] ; [| ( f "0x796772b283d8c114c27c038804fd271eefefab39e1cf719a988ee15efdeafb39" , f "0x5f0928789b50c371a2e4b127721ab5c9e49ec37b1aab2501cc47597d967bd713" - ) |] + ) + |] ; [| ( f "0x8372e07cb095954967ef9d41db7b9320c60753555633c937d4611eaf1a406e15" , f "0x947fa2d5209fdb4e2c7b9816c1096113d1ef87f6b4c5925fe5d866b8cb68d43f" - ) |] + ) + |] ; [| ( f "0xaca798512f7a43e469d8b9e426300d1664ded90ec77f115c6ac796ef28ff5e2d" , f "0x5a887fc5f7503e3c45d0b4f352fd9856bfe26cf661d324ab88b0d48f9a2b0a13" - ) |] + ) + |] ; [| ( f "0x40065e0615a88554306fba68ede7ff500070fe3d39d6d68d601f47c15b5a9f00" , f "0x952a015edf3aee8711d7863906a80917fc122b65d1dd98f37a31e9050062db1d" - ) |] + ) + |] ; [| ( f "0xde2b01bcaedb1066bfc4b056d2d6bd57bb2f0e9174b38902fc02019760386a07" , f "0x5f8db42c47d347b1f6de04929dd453f70d55ce5557d1db64344f6b3c1a6dfa20" - ) |] + ) + |] ; [| ( f "0xb9e8e091dc3f1da3d86082e98ed5a8949c74bdc16562c8b99abf7ce894f9cf37" , f "0xdb4658dfb1950b24b491e6caaa5f4368c43b5c67d97b79860af33982ec5e7935" - ) |] + ) + |] ; [| ( f "0x577688113aa72efce652cce8b462babe6cb131b728af460f50fc1a013a822d17" , f "0x3185e0477385a14a189a1e74420f02801e3e79a8c6944951937c0c3887eb3a19" - ) |] + ) + |] ; [| ( f "0xeb4aa00dad163f4ecaeeed53ae1e75086bd1484552d7c993f2388faa03f73f07" , f "0x27cf2f8e4be583f8886aa98d7c48c62cfa79d0ea8cecc72fc27ec22a3837ae19" - ) |] + ) + |] ; [| ( f "0xd1dd47bc4108ce7a9ac06a9643c93b22d939e038c8ff8e15849c26e67f25c80c" , f "0xdabf018f2623e554e4e89e7f12abe1b5b69161342ef16aa2d21bb4c62210c22a" - ) |] + ) + |] ; [| ( f "0x83ae3b83b90c523857f10f6284bac5462e9348c3f29612eb3e7844c189adf11a" , f "0x549d3e8b916be567be7a9a6d888cb112bc04bb26f9e316b696c8e5ae6ba62036" - ) |] + ) + |] ; [| ( f "0x22e0728d099ce63981550f840abb884b700ad6f5717b8746c8f7aca0cc2b5d20" , f "0x5cfbd38bc62c1249d052db89ac23e54b9316bcdf7216cba2833cc64eb2d0a80c" - ) |] + ) + |] ; [| ( f "0xd0fe0b6962a4ca2aff6d204e1a6bcf87dcdfeff67762387dd732ea996686e328" , f "0xeee2dea286807024447c942504eabc49d8a1c6ae3a5e39d06c6bacc494b27a0f" - ) |] + ) + |] ; [| ( f "0x3a27c00d2f8fb3151b1e1541a3e1b07675e3e845bffd0e881af40a62c1478927" , f "0x0c0a989a07ebb54984c209d7a54ab1ee25ba2590f30409aa8881215aa4d64c15" - ) |] + ) + |] ; [| ( f "0x801344e8ca859542d1f61ccf2fdb3d778e24ef1c70c87f8b6c833458ce797a3a" , f "0x1352f996af6eb9edb923c52e70fd8408e9ae9d2edc8d49755e59b99aa6139e06" - ) |] + ) + |] ; [| ( f "0xe4a5c6bbfddabd87bf71fe8c588d213aeb59632ba1ad487ac2189e6b9ee72b09" , f "0x9543e2010b581fe2311027d641828448205701aed2d56715ade224a00716830d" - ) |] + ) + |] ; [| ( f "0xca393ef75305593f5edc9e1170ad774add9238b6f8beb086cfa8149efb049c25" , f "0xe8da3c0af6d0437fb5f001c17fc52d8f367b01b7e96ef90314aabf08d463f911" - ) |] + ) + |] ; [| ( f "0xe8eb7148b56ce168f18572388da867e518a383139d1e75e3dafdc5eebd61cb3c" , f "0x7597d19b8905fa70ee0300d13d8d5e021440f9cae342a11d9ac64fe83a2ba023" - ) |] + ) + |] ; [| ( f "0xb77034cf59a3335687a76143dd78abf94d6abd3a2db59c83726d23da68b5b512" , f "0x7979a28c54fe78db180662a4e4294c96ed48c3d65514db1ac9123aeee27ff136" - ) |] + ) + |] ; [| ( f "0x238aa4c17db783db69596bfd6bb731d607b36a2ec1c99975e8bb2fac40aa4e38" , f "0x84907deaedc7577a2c245dbdedfe2d5acc3651a0792214b2cbd88f2d3645ae35" - ) |] + ) + |] ; [| ( f "0x9c2dd564860b4397f422a030a0588d74c720d98cb9db3e3d5253edf76343a714" , f "0x9a9202a7116ef6b983178cf95d149a0b9e29ad227896f9f2ef421e1e319f153d" - ) |] + ) + |] ; [| ( f "0x978a4ac25ba3f038d0241824ec563d46e7ece19b1aba5b1285995b438529aa2e" , f "0x3632cd1d73dfba9f524f89a1595f110d081f3847228144ba77ad6a4f9b61470d" - ) |] + ) + |] ; [| ( f "0xc3cb56b8c835287db6eab52cf7d39779b386925c74c6b18a73cac14e3cb7c81e" , f "0xe2c3961b5047b909bf6ecabe3493ac786f888bcddc4d8d3d0ed55ccbdd583c21" - ) |] + ) + |] ; [| ( f "0xb7f2d4b619aed046e10d9e12b3ee931d7aef3bfbbb74a7a7264703bc6a747b1b" , f "0xc8dea675ebbb5552b65294fd70ef577871305dc6b108b0e958f2853f42eb8f24" - ) |] + ) + |] ; [| ( f "0x481679de7c33818eb8136d9a2194cc735b74b8a01bd43e94469a9897b5c33818" , f "0xf4fa305d1f2f6ae93d6c3f93c077208d83fa635d7e2d3835abf4ed06f4b06c3e" - ) |] + ) + |] ; [| ( f "0x9580d28ef3b1bb4428a30ac7f86d1ad2c6f9e7c623f46ffeb777c75c52eca73b" , f "0x744d6b031884489b8b9b01dd5b4387be07e10325778e908bf8281cb2fe73931f" - ) |] + ) + |] ; [| ( f "0xcf65ad089f940ba05340fad5c66bd1a6af99cda488f96d87dd4aa490cdc8a71c" , f "0x60c4573a009a56f3826c482121fddc85ba0922ffe8dbd9f826f76b20e23da227" - ) |] + ) + |] ; [| ( f "0xa1d15db63abcb2241d162eab66e503ed813789408dc2f46eaf92c8b7c4695a0b" , f "0xd4bc32419eec93b790b9322ce82f98acc4fbbfbd7a185214d085eec0b2d80f13" - ) |] + ) + |] ; [| ( f "0x86aa7fbac3c77087cff521f9c34c44762e5ed7f9ac7e63faea45e945db8a0d2a" , f "0xd2cdfb91362b99df651259f39cfbe339a208b92dbced18a71b96d4f246e9a001" - ) |] + ) + |] ; [| ( f "0xb393fc350517759ba1894c9aade2f5f32635ae6998145f417aebcc753cfaa403" , f "0x458eabd811a2f710140a9e106a5b2659092c980da876a030e916beb8fac7b815" - ) |] + ) + |] ; [| ( f "0xb5e70f7ac99dc48e6630acc097b85e9f48cdfa9ba22e7bd41c1bcf6d83eef324" , f "0x28159f12926799a52647b0c453eadd3844a49f24dcb44df9bdc4b970256db824" - ) |] + ) + |] ; [| ( f "0x0df2f6a2aac292231bbb57daa47b94b28239e8aec8ff5a795393d7c6fc915d10" , f "0x0c104c54546821e1714ae5a004df49863d12ac9fe67f71790f7a57540f8d1930" - ) |] + ) + |] ; [| ( f "0x615c03ec7d3327bd64a19bffc977549e5ba19c75c37061a6c0a2a281739ec03b" , f "0x1776af2c756cdaeee962844bb8b210308d48e12de765684a2acd300405656e05" - ) |] + ) + |] ; [| ( f "0x5a2a2aeb93cda93f8e24fd1e89ee151b426edd0f13aa050c28db40fd6653fe2a" , f "0x5cd7e8dc445a064e17b4c2339f2eb04627da416c3c59f299e4cace6d34838e3b" - ) |] + ) + |] ; [| ( f "0x2c80a4b32f762ee43574036d2f7c0a8fe4d8305dc5fcb4bd77de42555c8f4b24" , f "0x4eed6a75fbc95adbfbc236f006e6b22568867630ed13d4e7dee308be52910732" - ) |] + ) + |] ; [| ( f "0x6f5d64c3cedaa8707f1d926928fa74ced6e6c1a29f3443dbf3de89d510909f18" , f "0xe54917b51136e8aa1232f170c9d72a073e525abad1389c52eece8b661859f939" - ) |] + ) + |] ; [| ( f "0xcf63866c9228b0441677664193ccf5b09a136602a6704b951f8d6db280aa7517" , f "0x616b64add8f49347e1c736f67022833cfd6308ac7dec3e365952640e0414a115" - ) |] + ) + |] ; [| ( f "0x318b1c0aefae8f29f1f702a90e2648e6f50858bf408be007671aea1bc0f5722e" , f "0xb206fc8c0814e0434ac2e2c5dfc7aab7e1d9e49ba145925ec831ee52a16a3821" - ) |] + ) + |] ; [| ( f "0x68e8d84c5cc86651900905f10011057bed13a3696612d62764262de1a188080f" , f "0x26ef0aba46385390f4f8764657af9f75683b5b7d6e3b63e2de562944fdbe243e" - ) |] + ) + |] ; [| ( f "0x25e22cb93b1b314d1d768ffe16cf8e5692ced6dbff4c2b8e4506a82d0fda2f0e" , f "0xa1456c4489e8710265da8a8d91c76b74ca640220c39e5b234934846910ea8213" - ) |] + ) + |] ; [| ( f "0xa039e8fbc062c421b48b07437c23950e229bac78813ae584a711b0837be76b07" , f "0x3bed77e31f5446355bdbcb93bf68aabc1c84f522c9230708582ed82f1c13402a" - ) |] + ) + |] ; [| ( f "0xcfa4cb4428c601911cc70c180e85631fac720195cf674b520e9e5ac9f9c3890f" , f "0xa9b97201e6cf7c42a4ec0b6be5d1a9980af05d8e8297a676640ccdd3fbd5510d" - ) |] + ) + |] ; [| ( f "0x9b4125d23b40e8f850785d56e45144d7be8b4195887806a8bdcb9ba42f8de218" , f "0xfdb2287b3017f629b41aa05ea0bfdb8559c6b0fd5a9cebc1dc2cbb2151408909" - ) |] + ) + |] ; [| ( f "0xefa3efb6dfab4749a2eccb929b19ad79b7419e6ec24c8c03aecc4dcde3566329" , f "0x1b766fc78fb91b65671b401c7bff84d0f7fefdbfb4432b29d9f91aab6d9ca711" - ) |] + ) + |] ; [| ( f "0xc084dcefd258c820ad20e38467b03b2433174b20f0141e353c0995774309c334" , f "0x16f3714ffbcd0755131cd7cd2a318332f04ce3dacce869b6f8738d3091a4e214" - ) |] + ) + |] ; [| ( f "0x3f69f25257c77e58fe5572c86df88ffa18812926c938f0634d1cb6829ec62411" , f "0xe77f991e67044a9cfc481e558e653a4c4e867e1e7a3e5fe0656fac4c367f7409" - ) |] |] + ) + |] + |] ; [| [| ( f "0xc5bebe629ec0c38463f54f0a6e3bfac23099e3ee52ae4160ad4236ad49f3ff1b" , f "0x4d5f0ff30b877347094daf97558236713f784dd2935a40caa3bab0801b363810" - ) |] + ) + |] ; [| ( f "0xe4d097ed739fa4cda65a21e843474b539159a5a7de359665f11e31fa8f8f7015" , f "0x4521e7debb85b1bc7d7ca1b0168578de23117078bb036835de5b15b057f62411" - ) |] + ) + |] ; [| ( f "0x117862aef703a9cdfb80eb9ba205c006d711a8ca30b1e75a6af785c5a27f4f22" , f "0x04c7beb9a2634c13f4ad8f86fdd745e0abe81120b335a8d96896e545f0f6ba0a" - ) |] + ) + |] ; [| ( f "0x391deff0480251e3bc590a46055fe8d73f5876040b98a98b5fa3d931cb2c3634" , f "0x306d27f38db5751da3f78b0a36366de919ef669d085597aa2497d76c017d1d00" - ) |] + ) + |] ; [| ( f "0x5efc9899e10bf8b69ee9d7ca70a730cfb1974cf7efda50a589f0b53668a29b14" , f "0x21dacbcf2e54cbbfe9152d3e9df1f60f71eff1a72d7d1c21b805fd8894828312" - ) |] + ) + |] ; [| ( f "0xb0210dc853cc44240ad2e19a032f00d782ec453aed909f787ed2279486337c33" , f "0xa0992c8a7eda1ff7eb2ee483f0adef806a5ea585b017bbcd9632e0b63c50d227" - ) |] + ) + |] ; [| ( f "0xc1a2836095ee82e2907cc99f15ff1336249555a64fc20aa635bc868b294c6f1f" , f "0x69dd55db0d6e61f0f78d2e7d244e97ccbd0e10fc268443a414fec12c6c12d129" - ) |] + ) + |] ; [| ( f "0x021a90e38d802d0e2c4162ea1c4af40c9071a9540d10df52b17a6213c0269e0e" , f "0x03900c2a00aa40e605f5c803a4d77656bc85c5f5563d3f91847cdf232993e415" - ) |] + ) + |] ; [| ( f "0xb11506d1c25fc50fcc64578922893c05528a1bcd91050e9d8975eecf26c19d1b" , f "0x8c6e345a280aec9f411a9f742ed8d066c0110535106039d855597d2c0a29ec16" - ) |] + ) + |] ; [| ( f "0xd1761080ff8ce70c4a1ec65c2b7701c750f97569055e4b0046cfb573026fb92c" , f "0x6d4a48317ed6eb41e383b04b2dad3d46d0ef639a6cccf5f0d2a5b98692d8c122" - ) |] + ) + |] ; [| ( f "0x470837bf345ac180f6fdceffbe865cdced0f3816593b48e9975deca27a606217" , f "0xa02530e1cb8b5d8545ee0d57fe0c19f302426ca0b9102f0413734c16f553cc0d" - ) |] + ) + |] ; [| ( f "0x65f4527babfbc3209397941b1c48f3c28737e9c327a4ccfb7331d3a0af8ed737" , f "0x98b07cdd72610010e2f2cbcc6aebfdd38675723c2a378d9eecfd536618e0e638" - ) |] + ) + |] ; [| ( f "0x1f3eea3e7d6606da578337112bee24a5e609900509ea2b049a4f3efcec455024" , f "0x92a3bd0f3d5209f854062320a0c4e0846ac5ff9d59ab2f9eb0fd5b64cd923e33" - ) |] + ) + |] ; [| ( f "0xb47a9798f25c74d8ab1f8cfdaf6510a18b0e3effcd08e29321b584abd5c6593c" , f "0xf494a2b74d17d55eb27ad911055e0028b2b5468e2d66f338b92b8b687c59b807" - ) |] + ) + |] ; [| ( f "0xe7b2a4b175ae1e2b8048bbaa491147cdfadd508e5e399c80b845f37c2524161a" , f "0xa4bb7db780365a46df90bc9beaa010828d6fcd8e80bb7f0c6182f0f32eb3a61d" - ) |] + ) + |] ; [| ( f "0x3e84fc38d2424ddec7ae89e3b12e3d3dbc1ba443521df9a8eabdc2287157120f" , f "0xc8d20df8757c3cf40d980c3d225f3b3c63dff2776a42f02128a944e2c833a210" - ) |] + ) + |] ; [| ( f "0xae33f2b90994b1a16cea908b9259a06a67bbddaf6a537930727da3a89f3add29" , f "0xdc8e33e19465f6c87304f34afa3742e9fcc368e89f0a395586231f06148af60e" - ) |] + ) + |] ; [| ( f "0xedf6d96f1be1209b85af70f25300cdb9e4231aca396705381e6457be393f420a" , f "0x0f87bf24a7d3f30f25bfdc65080e06f883639180faa9cdfce79a63193df4f509" - ) |] + ) + |] ; [| ( f "0xdc0c25a3de4d92b3167312af06440bf767eae82801a168b8510c5dc9b8215f12" , f "0xe4e3a4b8fd70eaf5f433912e139d4e3414357982c78d65e49443aebdc3f56632" - ) |] + ) + |] ; [| ( f "0x116f3e11362f70a303b8684b56f24e88de5903524364835dba930fe64fd2e017" , f "0x2cf7436c38ef2d022005099e4d65de480a7c0e7a7f6db780ea9291aa46f7f93c" - ) |] + ) + |] ; [| ( f "0x8c07bd4e422ab474bf68c65b811a3eb365916e2478746c5240bc5ccaf5b1073d" , f "0xeb3bc148f48745ee6ec83c32215a1a924cf7928846e8faa8d1503e8a8656ec35" - ) |] + ) + |] ; [| ( f "0x42e1dbf667fec3ad26e953401fb1b2aedb2ab79b789f73aadabdcc4b057a0b21" , f "0x204f86e25ca92310041a1415aa03e07577346c9cf3df4e5e8fa40fe6ef0c1b0e" - ) |] + ) + |] ; [| ( f "0xc92257f5c93d3ed217985ead391bba6de17961c7567eb6e7b25e0e8fad746e26" , f "0x8f977fd7dc7f3b31ce38dec47b1376165c7e37c3b634107dd1b09f7ef8618a28" - ) |] + ) + |] ; [| ( f "0x14e7e735825c640f824624ba3cd40a92d63ccb3b294b34fe8b059b1fb5651318" , f "0xc95a3d01e6c212328e6cf15baaf33731175a0940637c82d63f92d44f7fbc9910" - ) |] + ) + |] ; [| ( f "0x8c9036fec218f6f7875f492a7ca2412135e1085a5ea37429f86274fcc69e8c37" , f "0x5edfe857e030e5a82ab9361283d6f7f4d033cbaa2fb226d2b14769b5b877f800" - ) |] + ) + |] ; [| ( f "0x008bc1b60a15633ae73dc697f3791aea6449b669eb0173753de7db2633f0cd08" , f "0xb7ce8b95c653244328567bb743f8c60ed7d46502516db807cd8152bf80c2d40a" - ) |] + ) + |] ; [| ( f "0xa83278b4736340f8d7cb7169fee90fe04c5f759aa6dcbc7cfdf168ab871d8c35" , f "0xc936f306b686ebc91ec2c634f80be2e227a1500b1b6dbeb2314bbc1c6243711b" - ) |] + ) + |] ; [| ( f "0xd28e790ad09ddaa2a0d79b329ca89bd51ec1c36fb8ea04c9dafe5892fbd0fc2b" , f "0x67c86f4c2be2afaa41136e3730f2e27ded3b32c232f497622e7b2addc0adea16" - ) |] + ) + |] ; [| ( f "0x009df6bb3f09f3480baae3b547effeab8a9d0e44d76a6abebcb18f6e6ec88f38" , f "0xc9b1e2e1b19968c7537c852106b74527007fe5b590be4b4a80ef937fbcbb9f39" - ) |] + ) + |] ; [| ( f "0xaf1053c850f504688581ad3286c94011d72c913a68cb0b30ec8e218ae61c0400" , f "0x359091bb0166a223363b475614f58307e4f2953ea3748e1561b3a751b848762d" - ) |] + ) + |] ; [| ( f "0x4266411bf825cfefc39e85bccd35c5e9765ddd5709b66135221d7b9cdba1ac33" , f "0xef3f7ad3a64d01abaa8f34c3cd4046a5210913dc5bc92c26bdf12eeacfe0bf38" - ) |] + ) + |] ; [| ( f "0x6560b90172a8cc8f58f5b990ddf4c796cdb781c46d50fd905161f8ca1477de04" , f "0xe38bc046b89e1872129f7494f5414b71d83387b1ae1210f99228ae4472652b1f" - ) |] + ) + |] ; [| ( f "0x75e4f25a3f1f8f8deab6bbebf9da7104868aaf68bbfaf643802c5c69f9f6c93f" , f "0x69173511d4fc623e5f2e0fd8c08696e63c2abda29430bd2f4e2d01a2eeb4c917" - ) |] + ) + |] ; [| ( f "0x569a9a892d8edfd3a801bb69469011048e75b0baa28149dda161105d693fdf17" , f "0x2fc52026750e9abc640395c04c169ea3653273c1b16f0c3280f2211a97f69b20" - ) |] + ) + |] ; [| ( f "0x34f3767ab7f451127a33028cd019e642ebe245e169e10734cf820254f4773e05" , f "0x7556710e724d3fddd1ca8ea1220b14c7261c83a6e0d23f9d85eaa7e950aace33" - ) |] + ) + |] ; [| ( f "0x0e6f4896493429a4d12f9939f506931710f88c80598be5bedc17d52cb9335618" , f "0x38195f4465c330eb20bce1e51ea6e547cc53c2ddc5a17371e5ad126789ff421b" - ) |] + ) + |] ; [| ( f "0xac0e0001c690371df574be28dd1c69ea479c7748b39460a49a8566c4cb43912d" , f "0xed7ff46c17848a32e8fecfd0bbf9e7d270114f812809f172387adb1d5b569226" - ) |] + ) + |] ; [| ( f "0xe841126896e045e63265a561dfe26fe2ee2e51bd240dc0969af81cf5845be32b" , f "0x6eacb5a78848a7a0cfe51d639eb19739d542e247d2689403ea2ed5fca02a0c2c" - ) |] + ) + |] ; [| ( f "0xfacb3bb5eb98a4c33de66dbb33e4c7d6f87e17fc47c4da5241c43950d581d31d" , f "0xa99bce671912e9b4208b40112507e111ec136b4cf4a1fb0c5c2aecd260e99139" - ) |] + ) + |] ; [| ( f "0x369f9cac02fbd936d32d139d1e49cac45c7c785665b3080cc2eb8e728854f102" , f "0x288062030098aa29fb3c154e08b74fd2afad3f6194bab725c987962e5845893f" - ) |] + ) + |] ; [| ( f "0x6fb1eabb76aa1a436eac1ef8113e6c6d7e963d38fc8e5118b441a48406e41719" , f "0x4fb92b5cc5aa962f4a8c19b7cf23ccae953f6d2e3f78305cc098e99107ec492e" - ) |] + ) + |] ; [| ( f "0x274047180ab8921752cd931b5ee599b264847f9c57e319c9640d276e5ea9693f" , f "0x42ac9acbf962a21f89b9644d21787876d7e8f9a7ac848e074ca4413e9fab711b" - ) |] + ) + |] ; [| ( f "0x54f88cf95cedbd01b1f1dabb9c51f8d124234ea58e66595535fc99df2b81fe2d" , f "0x7e34aff1b15100ef45d89504db4dc222a2e399f3c25d5b6e49df6057c1c7a601" - ) |] + ) + |] ; [| ( f "0x598817b55936c13e88d317cdb5dde44e3ceb197b4915ed99ef4f0e4104538126" , f "0x22eaf545dbb29b3fec403fb563ea87fc849b830a5daa5ec68a59fcd1d8de8f30" - ) |] + ) + |] ; [| ( f "0xec9635244920b4e53683d7aa169ae50b73220a7b3e983b3813652fefcdce4d35" , f "0x2351b7c94f446cc56f655524718e722974cc1095ee3723d74958ea8b4bc4fd1e" - ) |] + ) + |] ; [| ( f "0x0575aeeff490dc34fb1b63fbdbf95b36d1d5dafaaf223c9f6558caaf78faa224" , f "0x6471d6db8afa69a505834717f42c0b2f7e51af9d9f08add73bddd56860764f27" - ) |] + ) + |] ; [| ( f "0xd18b4361ebeb0c74a0d01604715f8d35cb079d63a88678997439daff47eb7623" , f "0xb8ff2a12b39b9e1842e61adb3f68c2cf4f9a4c4bf8c6fc9d233fca99c7bf680d" - ) |] + ) + |] ; [| ( f "0xc9994ab72cbdf7cb7013458015f444d9a1eaaa74d4a6d8eb218ded40509a4910" , f "0xd08bff4533e2be31850e66796b4f741ab608b239d5f63a659fa982909f2d3d2e" - ) |] + ) + |] ; [| ( f "0xf62fca0b42aba4891fd23c16a1d2c7e752d28714d6a377d2737d58915a32fb33" , f "0xad76529423b178dfd743c5359b8b0f9aa8805f258ad1419b14196add9fd78a06" - ) |] + ) + |] ; [| ( f "0x8e2ed23cbb97d946bfcbe0995e154cc56b5f55564bf0e83c3ecb344d36695524" , f "0x653256616e9ad6d414b5521ebdc899ad3122f71e769128aeadcfd9e552b8140a" - ) |] + ) + |] ; [| ( f "0x2714104bd653f0a91dcfb50f30a95ead9669a6c3813acea08340aa8a1d921606" , f "0x6156a0d477174396a62bf7a724ed33c1ffa3bfb585bf506352d4998fb375a32f" - ) |] + ) + |] ; [| ( f "0x95d10bcf9744528e525880eeb7362e8b4824b72d4e36a87181af80441c0c8908" , f "0xf940527803702f32f679f9e2c52fca615da486740e3606cb827cdc33398fec03" - ) |] + ) + |] ; [| ( f "0x53c4488d3a24a8f89d489fe130f44be7ce0106be5eaaee12c295badcdf20f923" , f "0x2ee88d44861c88e84ade903cf2c36e54a89fd435260d7751613c28d4db3c2a36" - ) |] + ) + |] ; [| ( f "0x8a7d9869201c82040fd2fdddecf028cc27b53a87bb4227a4ddf569300f28ed0a" , f "0x3c284bc21d9dca35c89be377f41c6e153d5ff2153618f9b6ee884f5404b46836" - ) |] + ) + |] ; [| ( f "0x9ea8dc8151de12af34a6095ae276818f42500039e862f4f2c57946bf5883253f" , f "0x47b51f4c36d3ff7a4c748a405a8f58f76184d7913cc7291bac100f2022d7e807" - ) |] + ) + |] ; [| ( f "0x7ea82a0148c39b37b9234bd152ef489297cd135054a29d6e9c91cac908dbe80b" , f "0x14ccd064e5be01f32464c04b4895c1f4b009631b24ab2664dfe6b0e670617f2c" - ) |] + ) + |] ; [| ( f "0x147999234efb559688003970054d19567eb696482ecdda2ea076cce015797524" , f "0xd159d50d3794562b8115d57b09a61fe34e8d75db4a8d675228d608c48e817909" - ) |] + ) + |] ; [| ( f "0x7d97196d50c309df16fdb2a5b74df59a6c3e0b00d8ff4b92c75185b0bd20552f" , f "0x1db57d8f02ae0bd01426b4ea0be5bbc61187815d235febe8ddb52cc8f412b62b" - ) |] + ) + |] ; [| ( f "0xe4d22e32400b0c969db4f782c91b69cf2f0e1bcc61bfcbad820d9453ed937106" , f "0x0cef02948978ba15a508e7bf39900cdb2ceb34e00c05597efd7027630926c01a" - ) |] + ) + |] ; [| ( f "0x5935e31002dd10aa239fddd8806322a984de4e6e2513f7a58ee6de71d98a3f02" , f "0x02db3f87aab4f0c4a05d70dac5f400061d3f838e79c35d676d5a7c6ac5121929" - ) |] + ) + |] ; [| ( f "0x29141ba9a82e87d9814f449a41e1c7918a073856c0c75c1874437ddfc8a8c925" , f "0x92bc9a0d6487e40c22fddd02308855a82f8c0607dec98a7368a2366e18492400" - ) |] + ) + |] ; [| ( f "0x6555d21abd93a3d6f5a3f44ac8c7b9de64f82cc77fd78a2e639b33a96f9a070f" , f "0x649f90d4cf1c205f6486a6101d90b9bc4a289f632711febfc6f5a479a197a931" - ) |] + ) + |] ; [| ( f "0xc5513ff47b2e1d3aefca333f5a60193abf85461a66468e30dbbb67a1e7fea10b" , f "0xde588fe9eda47615c1ff567c8a049b3d75f2c2dc7064e698c55930c573c48c25" - ) |] + ) + |] ; [| ( f "0x912ac13b24a7a7a963c6b2716ef1eed2935f9e13371b2e722adcee040287f216" , f "0x9fd9d2f1a056e5c8cd30b6bb69321ca61576036cac25421e588535c1a519c80f" - ) |] + ) + |] ; [| ( f "0x944d17f288a716eea1af99301136398e6bb2d465b2ef9c24f1d270697163f315" , f "0x29627758fbd0f375e89a9af4142f7eab6c0d66d4b836f36ac9253c459ef8a91c" - ) |] + ) + |] ; [| ( f "0xef599d3f834354a06a060b2201140ead1a57bd5dc138b610840bc40e4520611b" , f "0xfdafab4665a7a603a6e6cba8756d46d7a15c4e84896de759ce2c78b4a6201321" - ) |] + ) + |] ; [| ( f "0x858488e4d1b94a2487b81d7d78c2871d34d7894b0032cb624d739a3a10e7cf27" , f "0x280dc69ef6890163fe7e4d51ff57397b63b3697e903c0603ff9c3330b299b90f" - ) |] + ) + |] ; [| ( f "0xa98f263ce38f3a558fb98e08dbf26dd7b086ff376a461ddd4f42fb54d5ad5d01" , f "0x56aa73ef966b982f4c9c0fe329c6dfea03576de7b6b0f36eed802aab19dafc35" - ) |] + ) + |] ; [| ( f "0xf8e5b84f0b5f3666ca67983231689b35e334cb6a90c470a7efe479e1b0995307" , f "0x6dcbdf0431b96375ac5a1582acdfa6196d3442300fe6e429fa8922d1ce3ce204" - ) |] + ) + |] ; [| ( f "0xfd3109285d32797b4d82d73938d5047691f94ffb738df9bd083e70e43a01ab12" , f "0x1f61e4bf2371cfe283bfedfdbebc1858db7aba744f021b21a7350c8cdb0a6a04" - ) |] + ) + |] ; [| ( f "0x55f9d14b27aec69fa1b42a95ecdfecc24b3398c49528136bea4db6f4ed26e518" , f "0xd39f012c8987fc099e575c242b199a9bf4daa99b2ee5f20cf17069793a722d2f" - ) |] + ) + |] ; [| ( f "0x55e6a51889fc3f1e5bc00646aeb5355a7b0de9a6d5097169de46f1211028011d" , f "0x8912ee3c6f41c556171c9a88ed6344710e922e35455cee6db54ef5c9b224cc33" - ) |] + ) + |] ; [| ( f "0x6ecd6244988a3f1ece42eb7243352549db606b44426c8ce0b6b59faa5bb15339" , f "0x43c3abf08e0f998746551c746beb30e2cff40c093c88dadb98d831b94290c011" - ) |] + ) + |] ; [| ( f "0xf4315d7baa67e2f4a87b3602f6c82a1bfa6970d72612f2e44a32c7cd7d97592e" , f "0x7826495e1b5219d1c0bc4e856ade3be1a5c8b52d109965287565469ce3c1462c" - ) |] + ) + |] ; [| ( f "0x22b7a411ca26329fbf1c6be75c23420152cdd3fc6190f6bab0cb6bb2617c0810" , f "0x80b908419ede5629d7475dce7189046cfd0b9f2f0cbc91ef271833799b9d2d25" - ) |] + ) + |] ; [| ( f "0x144aaab92d18e11f45ec6c45c9e0af32fd9a9d7576d6c77620e58ccb18fb5c1b" , f "0x4664a0d5dfdb8b6bb9ab3c51e42727e7a08764e9f1d3f38c6cae743386102b27" - ) |] + ) + |] ; [| ( f "0xf7a3bbf0a69302d1e1ca0d505ee3223ec63bb8c5576654713271a9c099a4d315" , f "0xfe2b880646423bd20575cb493d3bdc92d6e797703e6c664eba996bc3f77f762d" - ) |] + ) + |] ; [| ( f "0x9eb94b74d798a10690ed9682cc35d7925e219fdb356fd97ea54b75af25bf7d0e" , f "0x8fa11e1ecde64ac78ed0e269c05acb520442a2c69b525d307db8ac97715ae836" - ) |] + ) + |] ; [| ( f "0x116095ac37ccf47a2bb950cc616ced6f9da551c6077817c6dd266ca1701a8b06" , f "0x84f71bb5733c9ef77d899f49e8eef198c02576b23ecbe2ba67a165d95db04b1b" - ) |] + ) + |] ; [| ( f "0x4516c7aa5dd4a0b22b0ba20dd69c39f5d6330e0e04b040a0445d09f50b11533c" , f "0xb1a23b3af66111e898cbf1da835af1852965edf0adcedec09693581ddf6dba32" - ) |] + ) + |] ; [| ( f "0x5940104aba56c7ed5110b45e7b3044ce97f194a409778f4e4750b804205b4209" , f "0x2a18cfdf79bba31ad21827d499172630e92f8d4c9c38de1b4d2ddb1e95b5042c" - ) |] + ) + |] ; [| ( f "0x8e36bad1699396cbf01eaf446aa6efacbcfe4e3bda33ecec917dc910f0aeab1c" , f "0x3f2477f3a674647998e3c5c112d9d3f8ef01187e1f1c24fb6978b11983a4d916" - ) |] + ) + |] ; [| ( f "0x86a1e9767ecc9424847ddbcc73444523125c6a2eb1d43fd2f1fde2289634eb27" , f "0x529231ea62c5f4f11a4c7c385dc09728acf5f3c1f0841d1587e2d8ff9ec36030" - ) |] + ) + |] ; [| ( f "0x848e5ff34c3260941b941b0926dc3b4d0389fbae412ccf2f8a8587317c72ee11" , f "0xc14cbfe5124a1a9636b9569b0d2e62214327792a1ca40a4195ce81364b684016" - ) |] + ) + |] ; [| ( f "0x3cb7d25ca1dd70060efdf3336a1b50df7f0bd339bf1615a6ddaaf7d5da4fb417" , f "0x16aa446d9169170309f94c89eaa45f9b5d6811fc5bc5f238a9ef869c8908363b" - ) |] + ) + |] ; [| ( f "0x8ea2ec9af18c21a1c50dfe70ec8de339b4fe22256b202b37bb15271152179e06" , f "0xce278d19eb9f34b13f7a5fa4191953d34ce6b28612f6cda8b77134d122289934" - ) |] + ) + |] ; [| ( f "0x6de23ce989dfc666a91b7c34d83c02a4d203b4dd20b9d7701e2151bbf8a51419" , f "0xb3e92a65cd42e54a1a755af2834d22e7d4a461250fd03410e33dbb7c4a054216" - ) |] + ) + |] ; [| ( f "0x817acc943cca2f81118f8494c61e4e452be62143e15f0f179804b811149aa90e" , f "0x52f53e0fdcc3ed5698b4c282490030ac1a62fcc138e3daf9a3f0afca85ccf433" - ) |] + ) + |] ; [| ( f "0x5c4fcbf468fb6fd69da5cc9367f11a0d228ed6a0213ad0504f8551917efe020e" , f "0x552d68d0b8c4643eaa125c1730b445c2c1a70ced31e6b38a136d7624bb88f53e" - ) |] + ) + |] ; [| ( f "0x074af9da3c43333b068797325828f849884581a457586434353dcb6c682c6e04" , f "0xf781097e14c765af18287bffa65e8b174ca79a8346785acecbff0333f6be5801" - ) |] + ) + |] ; [| ( f "0xe297e308ac6bfada3a8b3039ff51bfc9e85e0c1ac7bf910bac87b0088d542a11" , f "0x3f0be84aa78468be542cb52afc5e27e6e5a2f70df9ccf1d4da22bc9c1700662f" - ) |] + ) + |] ; [| ( f "0x3e69e784ae07b2e924a950ac3945f2a4a11b97c6f390672d7bcd035cce6e1b1b" , f "0x7758a3d041e37f874959366e72030f0b91f5b8cf2e6b7ac2cfb8bf25515f5d1a" - ) |] + ) + |] ; [| ( f "0xbbe0f5b35c297cc7beaf4ac00b8e68540a47ab4043c70ff4494cdb3c0b609e35" , f "0x28da3fc41a40e330c0a34e85ea4c9a0702fbfc17cf7b0598cc80817fb1aa5c18" - ) |] + ) + |] ; [| ( f "0xebc23bc84017adaba0729debc6d36cedd0373b9f23ba6020de9280be9442cb3b" , f "0xfc3681299667e7d44cdcb1c294ad10aa4ac942c7a2b74310e3191bebb131e611" - ) |] + ) + |] ; [| ( f "0xb3108e247c4754df03bde192c0e21c7334e813455ba49b126242d42d5f131b12" , f "0xf0e7fbff227c00853adf123746b2198f1d07318087c3b3468295b9a111262e2d" - ) |] + ) + |] ; [| ( f "0x7cdae791c4142d67614ab329b786c546a16e3a585941090503c16b36b57f0c17" , f "0x9eab5f235777eb77380aa0128f8672f8ca9a5173255336e919f8112b079dc71f" - ) |] + ) + |] ; [| ( f "0xd647aed3facddfc7a7fe76e0a2a9957f697d579cd7c7a9b295379ee8673e953d" , f "0x55e9239fa59379c86a9a126b71ac81ff23c9810265d2933602cef4e80a238d3b" - ) |] + ) + |] ; [| ( f "0x0ddc8f322d8012ae9dc109ab7f9015855b6c348de706183328f1d2218e050401" , f "0x5ce12878aaf70104769e2b53d9a24a3ac093da468e3988597039f37ac48c1436" - ) |] + ) + |] ; [| ( f "0xf0460d386bef859c6a053753e506e5ff1a0424f041d63023a284da814b554b3c" , f "0xf52b7aa0e82c5b49320a239464568683cc44214db9f9df3be037282f7e978c23" - ) |] + ) + |] ; [| ( f "0x5b5aafc0d7c62491def44ebf940bbb540715cb7ed1195254327a73133f731113" , f "0x83fb4f775db5ea98107a3bccdeaba0b09f247de71a53e6a26e6f7275d3d4303d" - ) |] + ) + |] ; [| ( f "0x0e88634d799e9ae788acbd94d0e14f723420b38e0df9fae950a7e98f6de94914" , f "0x25af1be76ee12824a9592c30c7e0813e59fcfe3a5934ae83e2adbfd09a106700" - ) |] + ) + |] ; [| ( f "0xe371540df45bfbc76bbcd87ad4f0f28549b738b2c736bde8960c3d5943d4f00f" , f "0x6941cc6c6f0409639690485853f5b2e1fa2cf91d03ed9c2a0d180c83e40ce72f" - ) |] + ) + |] ; [| ( f "0xe1dcea8734b91e3d95ed04d4df2a87648d7596145986312a569bfc52d91d1f3d" , f "0x6acaf8b7003c768e2213d239f0fc8930f4d160ac8eeef6b06ee6f3a9c13a5433" - ) |] + ) + |] ; [| ( f "0xa48df0505065140fb311a59d7f5310c08102002dda1b55a07d1eea7ebb61fb3b" , f "0x3fb0b1f37e3957274db88beaaa577d0bcb684677a1b258b6924420f93c60ae11" - ) |] + ) + |] ; [| ( f "0x9348d21cc435b9749084d4b8ccfcf08b47b19318fe5af79d6998adb962a1b223" , f "0xcc3370fbc0f9cfb4886536d67602e236bee7807b43b17d96b236ff251dc9ba3a" - ) |] + ) + |] ; [| ( f "0x262355f1e9e9f8857aa5441fc185cc13b7068fa99234342a31a005f7e3849b2e" , f "0x48cb2de784761c393cf976b2d416cfcbd0bd07223cd326b19cc4940e02fda035" - ) |] + ) + |] ; [| ( f "0xed12e4ff0515e979238f856deb3dd46b6dbb91c3057d544b36f689807518952e" , f "0x5cfddc8f4d36699b798d98a9b8776ff385fc1f995f92a138786abff746fdbf36" - ) |] + ) + |] ; [| ( f "0x22efd6b5f0a3000dedb1f727123e1061d69c4b58643a800fce278d4d1346872a" , f "0x73df87671893c892fff793f0d309611edc1353e209eff3ed6f0b8fb1c69f2721" - ) |] + ) + |] ; [| ( f "0x490eaa2f27f1d0db4becb572f9eebc2f558c62f1cc6fbe32809f37f0c5d4dc3e" , f "0xec191bc52ef33a98331f9aa3da80155ff11f4aa559bf60f93f444a4b440fea34" - ) |] + ) + |] ; [| ( f "0xbed5b639ef3504f2e3f5500288403e13f2af1c2e60d267f2ecb2fe843a311037" , f "0xacb8538808378264d230925ef20ff6538a6f97680ba7d00e1373eaed84c58821" - ) |] + ) + |] ; [| ( f "0x79ec518be2b42a426fec67ee95099fbfa03bd500cd02ab61a48529b609ceb50a" , f "0x639fe906f299e3dd868220981cf6139c4b152ae115123f71aec4a3ce8c462301" - ) |] + ) + |] ; [| ( f "0xb8cd67b3d8638f61943891d1d3641fd0e52161602638de3b396f22755ec50728" , f "0x492aca4977fcccf547758de75e37b78745bc89d28fa262503e76b9531fa28f36" - ) |] + ) + |] ; [| ( f "0xe7197e953ee0aadd8175e85b55e6561a19ef4067b53de766bf9c2bd3f62ca02c" , f "0x302952f56b7c1a8d254fb72a702fce063fa1c681513a9817c5a98f3aa27f3505" - ) |] + ) + |] ; [| ( f "0xeecf547ca48301540f07b7f01f747a4d8681b36aad149ca84319ba8c38019522" , f "0x9040039c23cb421960961bc71df24a61caeab5fc9df5ef2f897951713cc54723" - ) |] + ) + |] ; [| ( f "0x53e6e30ac181082ce044a1e1dbb6a2568c59899b86f6aa991c6a7fd81005a63f" , f "0xd4ea4112c0ef9dcdda78364f44cfe1712c0d063f6e27be88eb4b585322d41819" - ) |] + ) + |] ; [| ( f "0x2dc22be11ea7603af156c0df97bca9bb963b79d8b782832c0b14fe4988f53c0a" , f "0x8cab558abbf4e89e2c69081ac010ef4c578fafdee330c9ea8946628e71b5f11f" - ) |] + ) + |] ; [| ( f "0x82e2c85d478b348cf14c2c9ac0e7b7f84b2042b16c7cec3dec5aaf44cc5f5709" , f "0x53183a6e8d8ed63c39d6588f42cea18d92ee5d1c36a3afb9ba5df6241591d932" - ) |] + ) + |] ; [| ( f "0x436f1a6c44f70cb39fc6e0eea1b2e0005b54ae70672acf50edd499c2fff03527" , f "0x80bba90ecf2bd62e6f9f20c305dff16dbfd47325ffcf55336be7bb55790ddf04" - ) |] + ) + |] ; [| ( f "0x6c4d90f8b890a33fae572f778c3b625d599b26a17f0ba98873010ee41c789715" , f "0xff526de64ecf455aa98286086382a8a52983a982a423291865929ab34017ef30" - ) |] + ) + |] ; [| ( f "0x078cbc108a99fb4f38b8cf31ad614d979dd81bcc05b59ee2d500b55393573e3e" , f "0x78aaacd3b207110708cf493b2fbaa0170a4366a8ad5b8b25f81b7e62cdf8313f" - ) |] + ) + |] ; [| ( f "0xc9d1bb5640d660aa538331fea0051e436c2a5728ebe533e8789ba41b1345a717" , f "0xebaf2b6de2fa194dced5c3cdea8c13ea641079cf361372f11e9889f7d98c873f" - ) |] + ) + |] ; [| ( f "0x0f3a22582ec96176c9f62c1c32be1f80a3d247177b098ad12983f04baa5cf32a" , f "0x57c7ce8515ae6c4c0a7ad0960077fdf3ab9c0c10907dc07698fd4cc2b2e79726" - ) |] + ) + |] ; [| ( f "0x2afd734982490bf5ca68baa0b543b4e8d7bc36894c75ad6ceb4b7276457efa1d" , f "0x0b4b7161d7595756e703e1282a9a44311fe9cf2ec9ceafd9a2be045aa72be221" - ) |] + ) + |] ; [| ( f "0xbce2b54aad40a51eb020aad0e67a3c978ace98a935afc50917f5f1a77fe78d3d" , f "0x984408eabe5cb40c7cb8127448e91578db0f38570eed7fdce6822748cbe2732d" - ) |] + ) + |] ; [| ( f "0x1a30477717fb14b8cd697cdf7aa6f9d94701ce2c720f62395b4d0d7e4365b13b" , f "0x7c5db889e624e4bf9feb61b7feac69909671ab2b7b9faf71f5ff7dc25be08436" - ) |] + ) + |] ; [| ( f "0x18c4e5f3276836787531b91fa170aa727acda70b7d008ddb1a727a150632a02d" , f "0x8a9171ab0931af092bdb740c04ead279e5f493bdaca9acf39247310a9b3ace18" - ) |] + ) + |] ; [| ( f "0x04ca369f69da6dccad8bfd5be40f7637cb44d47b566ea93a0cf37ebeb0246f26" , f "0x18889b6134f0c735fce2c18958033281d677e39d0cbec93a19c75b5c1d538d0e" - ) |] + ) + |] ; [| ( f "0xb661e25e51f24a15e6d36342aa369935db32b1a67ccd965fdd150879f697691f" , f "0x3637f695a1874f0e688a7dd33f460f5674e7d58c97d163e00bd475d775c2b11a" - ) |] |] + ) + |] + |] ; [| [| ( f "0xc8367ce6bea037d756b88d3277927fa9630bd3105e58bc1493c13e417d73512e" , f "0xb5473bc0c686a4e48132b4f390c72c1d2bf58781b3ab95100adca88607381602" - ) |] + ) + |] ; [| ( f "0xa0a62119301dd3213e199970bbfcfa96393e965c2a87ad8026c483ef35efe20a" , f "0xe7043758b1f8fea3110df334aca12d4cb1f3fc953aa3646d9c5025741aea7624" - ) |] + ) + |] ; [| ( f "0xaa110b2d269a824d84febd45de760025775daf7409950e24450f394ddb49850f" , f "0xd0751ae8f6262ababb836dee60b3d2ad670870ed760d064c6e61a36b78cbf40f" - ) |] + ) + |] ; [| ( f "0x31ccd6c3da0584ba6e627b10eb0e7d02ceb7e0a38de56dc08c5012a49165613e" , f "0xeb2224d6ad934b81749f119d13a8618f7a6a0fcaf6226b53342d7f8faab94606" - ) |] + ) + |] ; [| ( f "0xe5f376528d1495891fb00eda0c5f81885d980441e2d777d0a614f66b3311d22d" , f "0x56e6c3885efd8bf116a96eca2ea6835534346d09518abb5a120be709b758d921" - ) |] + ) + |] ; [| ( f "0xf247bfd09898b1ca3c498d6ca946e3f1a51f93c757d55c71e957bef996c73323" , f "0x6df70fca74c4cfa99412bd8ca0f9717f4c715f2f291cad7e3e021c151cc1de27" - ) |] + ) + |] ; [| ( f "0xa16fee332432ac0b845944f4539ef374e7f8e9016d7482067064be21a4d11701" , f "0xbb823db0cf4c8273d97cfc2c1c42c8dd4c1df77cb81ad39112994f4bf669532b" - ) |] + ) + |] ; [| ( f "0xdd51d52d0d342fd0e2d3f16a738e9e6ac4a6eac2a5d71e1ce9801749e164aa22" , f "0x988aa8258ecde5123d349ca66433eab2d01763f98f56e294c3923434f2056213" - ) |] + ) + |] ; [| ( f "0x011b73ee5d96cf4fad4ef70f1906b02b314adebc359bbb3a718d93ea5ab34914" , f "0x7cb849819cc2776a7426935393b38f76058208b77081b831cad92bb8b57ea538" - ) |] + ) + |] ; [| ( f "0x755fbcdd90ad4261bb2c64abddb623e6be1b012c294f2f043a1996330321d414" , f "0xcda7199a7ac18ea207c6fb9e1f03d691b043d22b02fddbe1895ff0624c6d880b" - ) |] + ) + |] ; [| ( f "0x9920935cb6b9d64ed9cea3cdec5ce76cd1e6c1f9acc31d9d960854371de7e835" , f "0x0018e628329840b814c6ae36a501cf578d48ca374c258e6da288bd02c2de5c2d" - ) |] + ) + |] ; [| ( f "0xe423b84355a21c38d8bd97366bad8c47b36e419fdb35383c55ed5c387f19551e" , f "0x9d6e3eac1a10b31fb003b3bb9b081b9643676352512fe155fb6079d2a18fa613" - ) |] + ) + |] ; [| ( f "0x13f74947f9af312725bd66f26cc96e977afef0755afad5d012f5a02ea1090014" , f "0xf0b94f068dbc7c6a4a8bfe2f13a230089a51639be1cadd538646ff0a4b4be107" - ) |] + ) + |] ; [| ( f "0x833181c41ac0ecfb8cedfe1fe16e813db565f4674895a1fc26bbfebf560b412f" , f "0x47b031830d917e4c31dad6d043032b787cec04fd070a053a5e499fa3c270b117" - ) |] + ) + |] ; [| ( f "0x498628dcf11af30f410b1667116383aaf8256b82f4b14040832b13d09f2f4839" , f "0x9d1a016a115c7c635ecdeb7f6560c8bba0eca09acc68d97a5c799d3bea22aa36" - ) |] + ) + |] ; [| ( f "0x4c98d044efa7060f9033887bbcdd2d6cf8d8bb56843584d7ce1b612522d4ff14" , f "0xd00747af4aef9385fdf3e00325f8fa1445a2573499a4866a68da2936bc71430f" - ) |] + ) + |] ; [| ( f "0x8d08d73acbf50216fe14555fb1340611c6b5283ea581ba40ac5e126a7622963f" , f "0xfcdc3ecb874d8742d142ca5ecfe2fc0a15e68cb387d58bc86470e1d635627f11" - ) |] + ) + |] ; [| ( f "0x8cfce7888452ca0c3542b0e514a5b4fedcc2594094296909fea212b4b973601e" , f "0x86a00bafe453a664efbc18f69415416e02f3ab1cf628b0c890503acd9714ff03" - ) |] + ) + |] ; [| ( f "0xbb74a7f1f58d229df6ce5cbbd88b12260adfd00fa0092af2b629977e28229f07" , f "0x8c6c478132b34078bb8cd493ee6c989cc5db6ce2964a7dcac678759cb235503b" - ) |] + ) + |] ; [| ( f "0x654f4509f0657bcc6c4557118004e7e0292537d30b89a1653fbf56d851b8a805" , f "0x7c5be0b9edbf5ddf1fe1b333719350c8fc3f23d62abd1a9f3233bb297e002a04" - ) |] + ) + |] ; [| ( f "0x263c1fc9029f84a025c3a9771c804644dbab59f506a32c12272b63be85c14017" , f "0x26c1d3194b9542da7c86f3f613017618ab38f1effaab19ab6c50e17c19882b36" - ) |] + ) + |] ; [| ( f "0x4ac6eae1fa31e13263432be12097258c8d9f0e297e8fa12198f855a9bcc7461b" , f "0x241c5547266e0b3e3ef22fc2c06b0cf0f5b0c3863e51eb398fe44c3f61c22d31" - ) |] + ) + |] ; [| ( f "0x87993e65226d861874bd17b404bfb42121bb936628bd6e1797338fd724e17535" , f "0x803dedb317268047e7622c491c5b48733d13d427d4c34e38efebe45b397fc816" - ) |] + ) + |] ; [| ( f "0x0a88f04a782bf10869eee3d5a3cd3c7441c322132b6ec0b42aa9906bb793b624" , f "0x45ffe3e4a34217479270334289480e9a80e1488d1be5272d3a1f84e31afbd41d" - ) |] + ) + |] ; [| ( f "0x5740e2bab3d0336aeb8d0c2836987c799483b3149f22b474f076b1a08cd23103" , f "0x67d74eb704eb006e6ca393f10b3613f68a7765d4721eb4ecd7e22af58445e920" - ) |] + ) + |] ; [| ( f "0xd9f448c5844adb54eaebdc0d8a415c940e5dbf0d2e80235b5d923c628329a919" , f "0x2471543ad76538d43dd2776d2f0c5f9e0bd677bcb550ee0d8f68ae85128a5320" - ) |] + ) + |] ; [| ( f "0xbd08307e69e2c52225ecce2a8526da827e0e4ae51f456f2aaef1b1f88f7b2f24" , f "0xed10765e123e97772597b8fa31992c72ec9e367cd679ac1ede82e4c1d6fc9f18" - ) |] + ) + |] ; [| ( f "0xff18c0272e0e2c373c9685a0d1dd00cd157815813e9350e60903dfb14685382f" , f "0x17efe08c328e3c6b81c97f71cdcf85cc27e4f008df47a73dd962c1dd68a65a3c" - ) |] + ) + |] ; [| ( f "0xb70211b45c41ecabf46fc69a7f3d68121ed6481e4b562ca1c2bc54e973cac810" , f "0x453b85f2498e1fd7c4ca60cfcb7e80e412bff8fab3beae5660bba3f947662107" - ) |] + ) + |] ; [| ( f "0x8a4044980fa17b21a392135093e3d2d9e7b9007bf6e95ea10205b033c7f4bc10" , f "0x6161771735b1fe3f64be486fa8962bc4a32aabd8d5a0bc3dc735551b7f394d2f" - ) |] + ) + |] ; [| ( f "0xb648aed367aa5f3c799e8169d83ec9f1e61d99544da0c6ca13b5ab58b3993a2a" , f "0xb4a1d72196890debd5925b8eda0ec3fe5a96e7a82d26d789a90bd7cd74a6c622" - ) |] + ) + |] ; [| ( f "0x1834ef903d345359f68576105b14f2bff5f50013edc89696fd43f4669b6f3f31" , f "0x92e5799b4f7de41c0bc4be9e3b68c63ffb82870304f00147aae0434b94159918" - ) |] + ) + |] ; [| ( f "0x9ea898b5691d045a94160a32d55b079bcdae1c0049ca3e1d58c0445a27d66c1a" , f "0xb1d5e61f6ab1336a162e7050e1b1c74bc015f7f51bd2c5b5ccd679e78b1da90c" - ) |] + ) + |] ; [| ( f "0xfabe0ca14e4aec03cd9241701176fea66837a37fbe7de2ec7b0f58f78379fd21" , f "0xe0d1b717c2344bceb72299cf476ec1fec93d38df506d9f462f4b2149d7e8552b" - ) |] + ) + |] ; [| ( f "0x9c2f674c215bf1d976cc6d5902ecd96bd81c143efa278c0ecfe2c6e8e07b9a25" , f "0x0acef6f81f6fd03a1245eb69257c3d11f7141fc5d8face9af2685e58883bfc3c" - ) |] + ) + |] ; [| ( f "0x66ad1a841a6498e5f8a196185fe8cf3c6b7d09680098915368ed148273d2bb28" , f "0xf4a9bebb7155aeea3a6e8502d60055d9c1d144194563b3ec30a69868019cc314" - ) |] + ) + |] ; [| ( f "0x2dc63ae6dc3a3472ee2c0de04547edaa57ab0c7be51099f85bec21f4230c200b" , f "0xe526e279440c10af019dbf2b565557ea798aa914740188034a0b420699748425" - ) |] + ) + |] ; [| ( f "0x7b86d9eb3621a0834b1c45f70640d86023924aad876d2857b5e4b996f1356d03" , f "0x135a477d492fcdb5a9e50dd2985e3177246ce9dc74044224033e06dce0062423" - ) |] + ) + |] ; [| ( f "0x7b2b086ad8757898e28f033eb6ebb8cf5ad22b12d5451d8dec24e55aa49a073c" , f "0x1c679f54d0df910b43324703ee2fdcac8e2bfdd0465d816fff591e5ef9976b36" - ) |] + ) + |] ; [| ( f "0x9e83f09546238078c821a2e306a7461eb551cc06921e4b2c9c2f8ea7a694e330" , f "0x34bcfab4cb47d97b7f16b1d7368017dc8a1d0ddbe7922b4892996d0d73533433" - ) |] + ) + |] ; [| ( f "0x1cb5dacb751641ff26b1f0c7f145fe73b28166aa13b57d2c17f5e95de178d704" , f "0x54cb65dfbacda81f4565c4ba0c83d89de6e7f805445834d8ca631b7556e34d13" - ) |] + ) + |] ; [| ( f "0x47e9a67ca58deaa3048aea6192c309a5e5604687efb38bcfb75de3534f806927" , f "0x2f81d29a4a3791a00f7b635acd13c3714242a5c1bb919516b313d492654bd417" - ) |] + ) + |] ; [| ( f "0x57775d6f1fb459af25b59e12d72335ad5126e45e41ecfd931d1809da7f603622" , f "0xe1cd7d81e3ae8d3b04a9b5278ae7ddf6b617d2efcd17e2b6f825f8384092450b" - ) |] + ) + |] ; [| ( f "0x8a188452f00d123510f9020d6a9d0cb03abb8cfc03f830f2217645d5a0a90c0f" , f "0x45fe28280ef1910f8b15938cabcf17ed729ea0bb3292d8e613e33fb4016c6c1a" - ) |] + ) + |] ; [| ( f "0x3c2adb40586b462fb702a36f38d480a392c6ef1894e2a2c86bfcc11f14ae3235" , f "0x7514a5fa86974ca4639e15f085d0ed72b129df4c93ec8c2de10213bc8c32ab1a" - ) |] + ) + |] ; [| ( f "0xc83de0b072c3462ba5055cc25c7449373c2c6a2b3f373c34af303ab4fce1d634" , f "0x232a4cea93c11c4c3c8ebc9b800e435d863df234360dbd593c868bf76e551322" - ) |] + ) + |] ; [| ( f "0xcc60145874768b55780bd49a8c5b72b621e7a08231c898e279aecdaa1df6a015" , f "0x0973e7e94538b6bef4ff93ed817151ec93437c2eb2d95f0bf4f600c295c24200" - ) |] + ) + |] ; [| ( f "0x49e33c92ca57d2f4dbd16ff6bfa39832a614f5e90eae2813a2b755e3f07fcc39" , f "0xb04d063383201e45c79cf2df05db68135946976e865f8239c4764214c6f50b1f" - ) |] + ) + |] ; [| ( f "0x66c3f56a4eb84622fa6a204a401d95c00e618db21f3b1bc89512d8c04328413e" , f "0x79399c18c3129954e3d6b16d18e6fad69cf86c7bdadf62fab5ded4a2b151f100" - ) |] + ) + |] ; [| ( f "0x9ee9775535afb3fd25e9f64e9ca8400661ac0131dda010d65dec5a2aa76dbc05" , f "0x30f15a42dcc83816c628fb103eb5def39671362364941fddca9398321e455210" - ) |] + ) + |] ; [| ( f "0x45556fd2b50b3716a353e2e50f04408ae9bceadafca7c4e617fe3c045f2ee20f" , f "0x093f40238c59360fcf00d1cf6935e46f527e8d69aa45fe7e3fc6a38ed2310d22" - ) |] + ) + |] ; [| ( f "0xb3927c8c4e047490f49b5a274fd514ebaf1ab7a4d6ffcbf895a59e288ef26a03" , f "0x521ec1e4890b46670a4e13741b93feed4ca73331c6c25cde2543c424eb7a6424" - ) |] + ) + |] ; [| ( f "0xda4b8525802af8e7e82b9153651eb7411ee702b91048f2ef0fd9012a2b58742f" , f "0x99787fe08ed1424c76bd6a10092cedb013fc78482877c37f1567666a84c17034" - ) |] + ) + |] ; [| ( f "0xf825d7b06779fcae071d244f27f741658e94fef82a41c7a25a9ddbd1aa8bde3f" , f "0x843e54f5118ce6eeea5afa9acc16ae1ae8b0fe966d60e928a0716be7de50693f" - ) |] + ) + |] ; [| ( f "0x400c338105a2e4ed1c3c5533141bbe1252bcc114a1320a661b190648c831ba0b" , f "0x4ff8738d7601708063692a031e11c3678ecf37da77d619918f614f46ab80242e" - ) |] + ) + |] ; [| ( f "0x6f6080f188eb8dcbfcd22def4ee63cd7a6d805f338ad83dd8fa5837be897152b" , f "0x18ae8b4cb275f5249008ffec5f697eba91f02771a70655bb7190a6a218e31502" - ) |] + ) + |] ; [| ( f "0x21366dee695e9117c36732cd1524453ed31980779ecc17399af6b1839148a904" , f "0x95ab97395d840501e550428307bb8e6f17aa10bf9ab4d2947f9b56b71444143e" - ) |] + ) + |] ; [| ( f "0x7840a08ef5212483269cc793ebd4eee6cf5ca3427454fce8927689762e0b153b" , f "0xc17e86f5be359a0eb78ca5824b2838a60c93d65cff1f5627dc7022ec62654f1e" - ) |] + ) + |] ; [| ( f "0x03f509fd882edba5e87d2bf72dbb14b7968e2b3a0138ce55faf3127b1e8f2726" , f "0x0691136cf4f0ede1b7738b94a950fb7b35baddf80a0296a6b6f6f5b929fe4235" - ) |] + ) + |] ; [| ( f "0x2af5dee3966a069b6d57aa37e6a2cd66c44bac113ed9863353acd45a173ebf2a" , f "0x0aa5e8bd4f33bca1077883d45b99934c4b88f6fb85973371b554210a4b024032" - ) |] + ) + |] ; [| ( f "0x29e48027b239cf03df3c58a9f723dd4f8cedefbc81d5ac3ae16c9e24dff3942e" , f "0x78786c95b486a7bda4d7c2c0eea8194e4324c2165b3fdcad3f10bea4a780852b" - ) |] + ) + |] ; [| ( f "0x529e72b0aa6e2fd1f05aaebc55ec883d07b8a1ba0803dd851ef6a066dfaf4a1d" , f "0x1faa5d898243f8d6a3c2bd8a3f377fc7cfb02f9bf39916798dde92ba331ffc24" - ) |] + ) + |] ; [| ( f "0x25a4238e15e14aaacbedd596a10311b37cb2f051e5c8f9d4e2cd2f0427c10b21" , f "0x36eaf7c52a2edc07a929a2dec698e44755b14670b581f9812e4544be70194902" - ) |] + ) + |] ; [| ( f "0x185d4896181e7cbef7cca0475600229311f327a922138750cd507ec6dabde214" , f "0x761af5c9b8c493a1574f15b2416846ac6de0798d68edce5999921496b17f6103" - ) |] + ) + |] ; [| ( f "0xb409485f52acc25e17c16589bc5729a7acb81ebc4e57e680fc6069400b79d711" , f "0x7d5af5d66bbc23d63e1b3a57ad7df70021d64a9c5cc7f6750468717cf390ac07" - ) |] + ) + |] ; [| ( f "0x0a6289bac575263bde8628a3b430616dcba5e5dff00992ac155392fc7af13c0e" , f "0xcf313b2b6479030ae110952c71ea0aa10301ef6fcc3b64aa33ab8153504b9b25" - ) |] + ) + |] ; [| ( f "0x3e1a1e3d97614506244cc944e8047580f3c76b46f947f9c4a2c09d5f2581821a" , f "0x354c3cdbab44814c2c23dd84077160f6c8f5b21677c37505e0fadda23f74f22e" - ) |] + ) + |] ; [| ( f "0xf5b7cddad100a202e9002695897fda5cc24cafc53780ab674a0d63f70080f72f" , f "0x22cd0b164fc8465a182ff9c03c13a09cfc26390ec60f15070b21ba07b8446b29" - ) |] + ) + |] ; [| ( f "0x19776ca35d60e90e022c381dc3bd9ba2cf81a0921ea447de8e29ad4ed1da1323" , f "0x3636318961ade6b9b98a504b6caa9abe321cc1c0a520eebe9c6a20fc7662e22d" - ) |] + ) + |] ; [| ( f "0x46e3c584593fd054a144f45f2fa4347d1680b2c849c76fd056e75d8744e6c323" , f "0xf10bb65c327c96cdb19ecbb80521971a92e16517681589e1890beb0c696b522f" - ) |] + ) + |] ; [| ( f "0xf22229463be98319f3af898a22a936dfe4555e6c8c11b6f4b4edd3387f560a2f" , f "0xdf84219444a446b07bc62a69320e75adc473e42898a0d092b2f944d5d33ac836" - ) |] + ) + |] ; [| ( f "0x4d236e256bf3d4285d0ef5f7b6e5d8802a5bc4fad1e413ffc689d2b60857b42f" , f "0x1a4dbd58f7f2a1039db02a2825a6365bde65aba39e6ffc67f9df0f315a601b15" - ) |] + ) + |] ; [| ( f "0xb4436447a2e21b2c2f131a54a0f91421a9bda7a139b4fdac2245863c9ba3a43e" , f "0x4166ccc55f14f758fe33afe5cabecc1d4636c006772a0d264e8f3f2e419bdb15" - ) |] + ) + |] ; [| ( f "0xb894d75d97e3fc2e2a67f5b28571c3683a9e845fff5097de004ea28fe4535227" , f "0x7ac4b496caf099dfa4060a95d98109b098dc0a703230377ab44d61319960be11" - ) |] + ) + |] ; [| ( f "0x7876484d40555cbc5a4cd488be6ff0da934d739a2fbbbb58e9d3a314243d1f30" , f "0xa9881547a0456f9ad1a5bf40b74956f89b55d496c8f86b38c6d41282e8c0e934" - ) |] + ) + |] ; [| ( f "0x385466d93e452ca56dd51bcb5faf26182d00405e61e78d36c39b8fc478958c2e" , f "0x107f6d337cbd0466081e8a848d448d620f2a3803bc930f71efbd222406b9be1d" - ) |] + ) + |] ; [| ( f "0xca73a9bd1fd9643f97e9e09bf435e06242b7261452408ada3e8f1629d50c4b20" , f "0xc589375f4b9312947fdb4a712c3e61a7b3d197fae742f10255e6eecb9f227e13" - ) |] + ) + |] ; [| ( f "0x489b9cf9ff68ce58ebf576a492a9a981bbeda0fac0f6726aa890802416859b12" , f "0x119e728fd829ef5174deaac93d583d9dce2078a4cc9bcd4c67aa8c11d2fd663a" - ) |] + ) + |] ; [| ( f "0xd2c60c9402e6b9fac4baa07a15d80aa666cd024f56bfae10ff195f5bea95391a" , f "0xa43cb0b9426258d698155b30bdeb44846d385e28a803f08f286b6df60765e512" - ) |] + ) + |] ; [| ( f "0xc5d86697e630653c21f01ea5c4ac2372de73f49de4bfa25e89ccb39997353c15" , f "0x3b75efd1a6d8ecf1d3f4f4149a3f6c85f1d650109255f571341d8f5ad5e66939" - ) |] + ) + |] ; [| ( f "0xa72cd2b8e22d8784806050a3174a5491d2c6ecce17701e4b225d4debed7f8b10" , f "0xc7d2490020e922469c07d0f74a0cf96076e9f1e690bddba2cdaabc690e81592c" - ) |] + ) + |] ; [| ( f "0xc6eb8141f13e341af92d6a04cd0c007e2a7d12271dfa36d4595010580ef77135" , f "0xf5f375ebb23f3d5eb378fd4db316687ca42841b6bb37734136b354b47c3e6d1e" - ) |] + ) + |] ; [| ( f "0x1ab2c7578934f2e687b0e1540973c3dc0bc7a6e3ae2c948fc1864b85f52cfc22" , f "0x8d69a1a9864b9739a414d1372c2e483ffebbc66e6f9c6d82db28fa04beede528" - ) |] + ) + |] ; [| ( f "0x6c32b7f3a1c8b249b93aa2aa730deb33d5a960c9022720582e5bc1739104fa30" , f "0xc2fba44b1b4166345dc33e2be52de8c01021fc67b0d164f0c6eab1d7942b480d" - ) |] + ) + |] ; [| ( f "0x99dc1ca4c744a1a7d3f770f805de43055f25372409ea8cb11580cb299e6dc604" , f "0xfa63b30efa07b86ee2b69708c41e491ff33c9f7c5d45057b112578ef6c098c1f" - ) |] + ) + |] ; [| ( f "0x56ee9f82774e6d8670da73f291765dc87c821f7c51bfcb0ff210939a4332f033" , f "0x24e931d55d27ed1694abf6b072fd1f0aee758bc3c348e5382461182e6db7ec33" - ) |] + ) + |] ; [| ( f "0x58133394940f36bd99c52179d5912355437f5c84ef85204e1ad525e96fc4ec0b" , f "0xafa011cbdd8f57da49f609e0b42db78319dc42aaf5710289003ac2e8f79dea16" - ) |] + ) + |] ; [| ( f "0xa003d232f9a71f2441ed3f9acb5089070129f1fc301dc531d0daef7d879c0939" , f "0xf6bb150914c69fd6bf400611d7869dd742edd3e0c5f132aecf2fb7b90d693d02" - ) |] + ) + |] ; [| ( f "0xfa3863b3c2e8f0f9b2d9e594c0a60fb5a899d00c54e24b08ca42c6f82cddd535" , f "0x0d9e0125d1c08f1a024ed7d22b09d77951fb23d378be67f60a5573b7fd0c503b" - ) |] + ) + |] ; [| ( f "0x629fdf19a8f2fea9ad886b75475ed34033cd8faf161cdc2465886977e411a32b" , f "0xd803b2a5c0759c18243c3dd57b60e3489af7252f1832497072183cf842884e2d" - ) |] + ) + |] ; [| ( f "0x04cbb2da02cec1ca836245306e1eb66786a1e7aa4aaea7d6d22e1c5916063f01" , f "0xc14f4c4cd9b65f8dbf5f5ef3312167eab15e05eb7a8034cb001cb4c3cc6c7435" - ) |] + ) + |] ; [| ( f "0x0606b3cfe86cfa66552c5135277af897840c20f40f41b02c62b0150df90e0f1f" , f "0xec8c6961dc8e1d378259ae2c38d5d048d2f2fc7c9f01f5cf9b8c576c9bca9900" - ) |] + ) + |] ; [| ( f "0xdfa94356265e4b580a99d15cae124891b32e0ae57c41afe84a2ac7a264298a31" , f "0x8732bea4e5219ad62497563f934aa5ed5fdfac8e0e34efc5f69246591d11ab2b" - ) |] + ) + |] ; [| ( f "0x9e46d7c85dffa35440c84733e883885a065e28e3c3a25e9c699c355090db2323" , f "0xe90a57e509902a50e1835197f32443d41c537982654fd6d2f4b935d2e907ea05" - ) |] + ) + |] ; [| ( f "0xa36eb9f980a78030cdc05d61b0080db8544cee56be0e834c7a6bb718880d090a" , f "0xbc4fb2317d19311c31d3845401764062251e61002aa08428652df4e312a40900" - ) |] + ) + |] ; [| ( f "0xbae1f346fad12e8af62300390a952919667f767e74fec009647ba34e8cdf641b" , f "0x46a7562ba28379b9d174c6217ae6373484c555d26323288649a44ac67ef93f35" - ) |] + ) + |] ; [| ( f "0x061cfc82009506144a08c108a47df30af67c8398715bc325ef66d44eee811216" , f "0x188beedaf55b5c571145e89a98d3f42714226f5b3b0d67e3603580b1afe1ab03" - ) |] + ) + |] ; [| ( f "0xab2a91dc1231e26fa9924ca9ffecc7f155eb50a806da45554be73dade3d5fd02" , f "0x5a86aaa4ca73e9cb9110f1cd2d073c31bcc2aff229610f058b458db2a854b328" - ) |] + ) + |] ; [| ( f "0x6f9906f5bd75e777236ee6a2e99232be3b3fc204e48789e80f9bdff5d80d5412" , f "0x7ff819909bafa1dd6b1080a5ecc8da6fe9d0958f3697f4f8b332d39eb6f96505" - ) |] + ) + |] ; [| ( f "0x9826212e9dd7572f016183b2302385ada713de7ce2f956782d4f19bc583cd339" , f "0xc1e03d378051b63ffeb091baccf1e9afc10f49d80f2949ecfa75d24807d22800" - ) |] + ) + |] ; [| ( f "0x540f3da08513b96cb044465f3f305d9809a58644c0ac1aa2441fbd2c08630503" , f "0x60153169d474f2b3e3de3286f1b7ce4686b4232c4656e052ee69a3be6fe54b02" - ) |] + ) + |] ; [| ( f "0x44c7aba59000373f67b2848ee7259f83adcf4eef3578fa1f895425cab0b5ff0a" , f "0x5d8d6a868734d13cf6b65fcbe15c873915dfad82f2c9a6650527c7f2f48ce914" - ) |] + ) + |] ; [| ( f "0xd6652db23d2c4b54663d85b5a66c9cd904c224a20ec7de47334b1ac8215ac727" , f "0x9eeef5d0b1a7da426b696f480cdd3a7365eb1f625c5811fdf5c8ad2ab0219604" - ) |] + ) + |] ; [| ( f "0x513ed72b1eccbfc26a1b33143494a86da9fc9cfa581e69e86901907119930237" , f "0x1a7216ddb808a1a7fa84a66cbf8809e448fc6fec24842f9064bb2a3df905d337" - ) |] + ) + |] ; [| ( f "0x9a9b2daea8db44830a73a1a2d54fba35e18f044d2f002a360f827ed6e4857512" , f "0x1d6ee8f0d09611a36edbebd8108dbfbbcd306c3194cb5f368a9a73a7bf516d32" - ) |] + ) + |] ; [| ( f "0x7b67b03d95a5188c3111bf3112103260ff42d1e79f56dca622eb703129afcd19" , f "0xb3b2e2fe62a2e0038c6fed765a244bfdb19e642072bc55c00ee205aeb4d8e20b" - ) |] + ) + |] ; [| ( f "0xb263c7ce099844a71f8788763e42d96e5cc55f303497ce7e02c93ce4104bad33" , f "0x74ded278299bdff84a72c6b442cefbd5b70731c676117018141ef555f0b5341e" - ) |] + ) + |] ; [| ( f "0x300e31fd275c30ff3598c7cbd796908405a895693dade38299723612d58a733e" , f "0x8a5718a09377803bd837c69ed2f9331ae4fb545ab0d4800bf32a7b7a35376510" - ) |] + ) + |] ; [| ( f "0x52c76b3b45ab95f4b37c4bcdf276335066a67b69b80566b7a77ff440c176fc3a" , f "0xe207eef99f4b84f376b2025ec60d9ade5e23c6ad43dcd555ed63d69cfba43a31" - ) |] + ) + |] ; [| ( f "0xb6a431439c8e67da9e78e7148bcb9ada4cdb7d2521aee475c7a6f73eec533801" , f "0x5b15e968a24e1e018f1afb3afb35fc715e0e4a5b94a59713b468d24ef0f1a313" - ) |] + ) + |] ; [| ( f "0xd456ef5312972fdf0e0971818c688e5728a169dc6c6336acfe707a37c8a81904" , f "0xff9579eba10b32ca80ef2adfa7438af43f648ce907c4831b0c550222b269d42b" - ) |] + ) + |] ; [| ( f "0x93d220e2943e9fffd216b1ec14df693713b2be5c77e1b5f22c246f6c313dd31d" , f "0x33f0437a114d46bce0640641f994ad1956fa9d8ad0f3ab959980d56f4b333336" - ) |] + ) + |] ; [| ( f "0x808e53b0b7fca0fce7702c7473fe58afded45ca4f05481ac81c2a151faafe302" , f "0x0734e853433b127f2375f16d07497b50470da27ee3e566955d49cb5f450def27" - ) |] + ) + |] ; [| ( f "0x71a576a8d94290c3f4c2ab733fdbe584a370c27a77941877f51a91313b26242f" , f "0xf75a638394bc4e90e8820436ae6ede4e77d34d3b1e7c53408e440784abc2af15" - ) |] + ) + |] ; [| ( f "0x9a4deb256ac71ad061bfdee3bbe68e43a3b5031a2c48b87251e2f26d5ddbb314" , f "0x75fbcd054a56e8d27a754f06b42b0b76eb1cc6899aec7a5f679dfe53ddb14814" - ) |] + ) + |] ; [| ( f "0xdea82194b14b241007e91c7b6cedac3dd146ab4f25425bee6bdb87c8fd5e762a" , f "0x93041f816019877cda2f2fa00e752c3551304e4f91bb7846122e4a8fbf601a3e" - ) |] + ) + |] ; [| ( f "0xc3471909166d3d5b6d98d3cbfe2904d07ccbe3223e7f9bc2c6a3c4e6aba07e26" , f "0x9eea52f8d5794fe72702e13a20e29153c2b856546310db139bf9ee65a11c200d" - ) |] + ) + |] ; [| ( f "0x8d2becb380d1029128312bbd4d47276f4aba3f1f199c9242e31d63f8f929ee0c" , f "0xf0ecba57150337d4e253ba538331ec19e56093677e11078fcb9d796b562ee720" - ) |] + ) + |] ; [| ( f "0x0596ec521001b27ab72c7c896365209bf13f4a8a0acb832a7d1a4eddee7cf01f" , f "0x92f811ef699204fb4ecba0ae764f1167a4fa5cc937fe50e0ebc3acee2f132431" - ) |] + ) + |] ; [| ( f "0x69534af81df8414c3112c2d3c8c6f91c0772c558d1fc7dad6e705aa680a93e29" , f "0xb4ec46c608f15a26d15e9e2c0a962de583dacaad72e2b391eb95a267f7bcc121" - ) |] + ) + |] ; [| ( f "0x58306ff45b9a48a98d14dde32bd180230a20d1fd8c97c64bd7b31920b5ec9c31" , f "0x80dff5131846606721b06979a4aecfb4e4b2dbedf58da03c0d8c76f0d64a702a" - ) |] + ) + |] ; [| ( f "0x9eb630438e5e0e26839a158fa4b6128ff8583a2c8a9fe4c0b56dc673bc50fc2b" , f "0x8c532c75fba6148be6dff25a0afe4e1b6e896ee23113854e73b7ef1686c1032b" - ) |] + ) + |] ; [| ( f "0xcc1b8c411e9b64a32e9fa09bb7c8698ed2c50f67bbb37777f9309ac6ba02872b" , f "0x80bafc3ea7aa2b2200cf9e4a947839442d33adaa9861de44e70b09186781673e" - ) |] + ) + |] ; [| ( f "0xc1ea0cec23a25aab979c3c719a6db5e392fe595a84f1cf340b30b2e25179bf01" , f "0x4218c186e2ed790a1110f0dbd1eb1fab367bc51f49709a6ec764d722c18b192f" - ) |] + ) + |] ; [| ( f "0x587543a5dd47f338761461c2095709b74d4c199db6f3696ce0fa884e9a58e632" , f "0x8f73b61ebfbb84ea8171b4ccde01769982c2a1ac4b6d0769ac1e9f5385ac6c23" - ) |] + ) + |] ; [| ( f "0xe970ed714c9d11c1c3e02a551d18363ee6082ae90cecc3d13e9f59827c74e313" , f "0xe75b0ec4875f37182f8e19d28a64bc673256a92b9a1b1f9b7db6f8ac656b8808" - ) |] + ) + |] ; [| ( f "0xa23628fcf352ab9313998a8fff1b21597157989211f8cd924e053f27fec5140c" , f "0x00da2812a0dc38e6b3501ef4f1a21a22e8f0b2af68a55ba89e452464b6a69b3f" - ) |] + ) + |] ; [| ( f "0x77e4f0ba674fc27ed29881bd687c91e8db9e2e9a4999d226762112f0259b7d1a" , f "0xd4f0db0c6b10c9a9b6db0126cfb647182ed3497c6fea9ef7d02e62cdfcdeed17" - ) |] |] |] + ) + |] + |] + |] end diff --git a/src/lib/zexe_backend/pasta/vesta_based_plonk.ml b/src/lib/zexe_backend/pasta/vesta_based_plonk.ml index a5381a4e52b..4b40349b7f9 100644 --- a/src/lib/zexe_backend/pasta/vesta_based_plonk.ml +++ b/src/lib/zexe_backend/pasta/vesta_based_plonk.ml @@ -36,7 +36,7 @@ module R1CS_constraint_system = let params = Sponge.Params.( map pasta_p ~f:(fun x -> - Field.of_bigint (Bigint256.of_decimal_string x) )) + Field.of_bigint (Bigint256.of_decimal_string x))) end) module Var = Var @@ -48,9 +48,10 @@ let lagrange : int -> Marlin_plonk_bindings.Pasta_fp_urs.Poly_comm.t array = Precomputed.Lagrange_precomputations.( vesta.(index_of_domain_log2 domain_log2)) ~f:(fun unshifted -> - { Poly_comm.unshifted= + { Poly_comm.unshifted = Array.map unshifted ~f:(fun c -> Or_infinity.Finite c) - ; shifted= None } ) ) + ; shifted = None + })) let with_lagrange f (vk : Verification_key.t) = f (lagrange vk.domain.log_size_of_group) vk @@ -95,7 +96,7 @@ module Proof = Plonk_dlog_proof.Make (struct let batch_verify = with_lagranges (fun lgrs vks ts -> - Async.In_thread.run (fun () -> batch_verify lgrs vks ts) ) + Async.In_thread.run (fun () -> batch_verify lgrs vks ts)) let create_aux ~f:create (pk : Keypair.t) primary auxiliary prev_chals prev_comms = @@ -120,15 +121,10 @@ module Proof = Plonk_dlog_proof.Make (struct let create_async (pk : Keypair.t) primary auxiliary prev_chals prev_comms = create_aux pk primary auxiliary prev_chals prev_comms - ~f:(fun pk - ~primary_input - ~auxiliary_input - ~prev_challenges - ~prev_sgs - -> + ~f:(fun pk ~primary_input ~auxiliary_input ~prev_challenges ~prev_sgs -> Async.In_thread.run (fun () -> create pk ~primary_input ~auxiliary_input ~prev_challenges - ~prev_sgs ) ) + ~prev_sgs)) let create (pk : Keypair.t) primary auxiliary prev_chals prev_comms = create_aux pk primary auxiliary prev_chals prev_comms ~f:create diff --git a/src/lib/zexe_backend/zexe_backend_common/bigint.ml b/src/lib/zexe_backend/zexe_backend_common/bigint.ml index 5e21e29eb23..0bd8d5db62a 100644 --- a/src/lib/zexe_backend/zexe_backend_common/bigint.ml +++ b/src/lib/zexe_backend/zexe_backend_common/bigint.ml @@ -46,7 +46,7 @@ end module Make (B : Bindings) (M : sig - val length_in_bytes : int + val length_in_bytes : int end) : Intf with type t = B.t = struct include B diff --git a/src/lib/zexe_backend/zexe_backend_common/curve.ml b/src/lib/zexe_backend/zexe_backend_common/curve.ml index d63659521c2..0f0c3f4756b 100644 --- a/src/lib/zexe_backend/zexe_backend_common/curve.ml +++ b/src/lib/zexe_backend/zexe_backend_common/curve.ml @@ -62,18 +62,17 @@ end module Make (BaseField : Field_intf) (ScalarField : sig - type t + type t end) (Params : sig val a : BaseField.t val b : BaseField.t end) (C : Input_intf - with module BaseField := BaseField - and module ScalarField := ScalarField) = + with module BaseField := BaseField + and module ScalarField := ScalarField) = struct - include ( - C : module type of C with type t = C.t with module Affine := C.Affine ) + include (C : module type of C with type t = C.t with module Affine := C.Affine) let one = one () @@ -97,7 +96,7 @@ struct module T = struct type t = BaseField.Stable.Latest.t * BaseField.Stable.Latest.t [@@deriving - version {asserted}, equal, bin_io, sexp, compare, yojson, hash] + version { asserted }, equal, bin_io, sexp, compare, yojson, hash] end include T @@ -127,15 +126,15 @@ struct let%test "cannot deserialize invalid points" = (* y^2 = x^3 + a x + b - pick c at random - let (x, y) = (c^2, c^3) + pick c at random + let (x, y) = (c^2, c^3) - Then the above equation becomes - c^6 = c^6 + (a c^2 + b) + Then the above equation becomes + c^6 = c^6 + (a c^2 + b) - a c^3 + b is almost certainly nonzero (and for our curves, with a = 0, it always is) - so this point is almost certainly (and for our curves, always) invalid - *) + a c^3 + b is almost certainly nonzero (and for our curves, with a = 0, it always is) + so this point is almost certainly (and for our curves, always) invalid + *) let invalid = let open BaseField in let c = random () in diff --git a/src/lib/zexe_backend/zexe_backend_common/dlog_plonk_based_keypair.ml b/src/lib/zexe_backend/zexe_backend_common/dlog_plonk_based_keypair.ml index d5962f469e4..d4a82d5ac26 100644 --- a/src/lib/zexe_backend/zexe_backend_common/dlog_plonk_based_keypair.ml +++ b/src/lib/zexe_backend/zexe_backend_common/dlog_plonk_based_keypair.ml @@ -74,8 +74,7 @@ module type Inputs_intf = sig end module Verifier_index : sig - type t = - (Scalar_field.t, Urs.t, Poly_comm.Backend.t) Plonk_verifier_index.t + type t = (Scalar_field.t, Urs.t, Poly_comm.Backend.t) Plonk_verifier_index.t val create : Index.t -> t end @@ -86,8 +85,9 @@ module Make (Inputs : Inputs_intf) = struct open Inputs type t = - { index: Index.t - ; cs: (Gate_vector.t, Scalar_field.t) Plonk_constraint_system.t } + { index : Index.t + ; cs : (Gate_vector.t, Scalar_field.t) Plonk_constraint_system.t + } let name = sprintf "%s_%d_v3" name (Pickles_types.Nat.to_int Rounds.n) @@ -95,9 +95,7 @@ module Make (Inputs : Inputs_intf) = struct let urs_info = Set_once.create () in let urs = ref None in let degree = 1 lsl Pickles_types.Nat.to_int Rounds.n in - let set_urs_info specs = - Set_once.set_exn urs_info Lexing.dummy_pos specs - in + let set_urs_info specs = Set_once.set_exn urs_info Lexing.dummy_pos specs in let load () = match !urs with | Some urs -> @@ -121,9 +119,9 @@ module Make (Inputs : Inputs_intf) = struct | None -> Or_error.errorf "Could not read the URS from disk; its format did \ - not match the expected format" ) ) + not match the expected format")) (fun _ urs path -> - Or_error.try_with (fun () -> Urs.write urs path) ) + Or_error.try_with (fun () -> Urs.write urs path)) in let u = match Key_cache.Sync.read specs store () with @@ -137,7 +135,7 @@ module Make (Inputs : Inputs_intf) = struct | On_disk _ -> true | S3 _ -> - false )) + false)) store () urs in urs @@ -158,14 +156,14 @@ module Make (Inputs : Inputs_intf) = struct let h = List.hd_exn x in let t = List.last_exn x in Gate_vector.wrap gates - {row= conv t.row; col= t.col} - {row= conv h.row; col= h.col} ) ; + { row = conv t.row; col = t.col } + { row = conv h.row; col = h.col }) ; let index = Index.create gates (Set_once.get_exn cs.public_input_size [%here]) (load_urs ()) in - {index; cs} + { index; cs } let vk t = Verifier_index.create t.index diff --git a/src/lib/zexe_backend/zexe_backend_common/dlog_urs.ml b/src/lib/zexe_backend/zexe_backend_common/dlog_urs.ml index 075c4b8b3ed..7e5733f6c23 100644 --- a/src/lib/zexe_backend/zexe_backend_common/dlog_urs.ml +++ b/src/lib/zexe_backend/zexe_backend_common/dlog_urs.ml @@ -30,9 +30,7 @@ module Make (Inputs : Inputs_intf) = struct let urs_info = Set_once.create () in let urs = ref None in let degree = 1 lsl Pickles_types.Nat.to_int Rounds.n in - let set_urs_info specs = - Set_once.set_exn urs_info Lexing.dummy_pos specs - in + let set_urs_info specs = Set_once.set_exn urs_info Lexing.dummy_pos specs in let load () = match !urs with | Some urs -> @@ -50,7 +48,7 @@ module Make (Inputs : Inputs_intf) = struct (fun () -> name) (fun () ~path -> Or_error.try_with (fun () -> Urs.read path)) (fun _ urs path -> - Or_error.try_with (fun () -> Urs.write urs path) ) + Or_error.try_with (fun () -> Urs.write urs path)) in let u = match Key_cache.Sync.read specs store () with @@ -64,7 +62,7 @@ module Make (Inputs : Inputs_intf) = struct | On_disk _ -> true | S3 _ -> - false )) + false)) store () urs in urs diff --git a/src/lib/zexe_backend/zexe_backend_common/endoscale_round.ml b/src/lib/zexe_backend/zexe_backend_common/endoscale_round.ml index 346afbaa6a3..4f9a635a910 100644 --- a/src/lib/zexe_backend/zexe_backend_common/endoscale_round.ml +++ b/src/lib/zexe_backend/zexe_backend_common/endoscale_round.ml @@ -5,46 +5,49 @@ module H_list = Snarky_backendless.H_list module Stable = struct module V1 = struct type 'a t = - { b2i1: 'a - ; xt: 'a - ; b2i: 'a - ; xq: 'a - ; yt: 'a - ; xp: 'a - ; l1: 'a - ; yp: 'a - ; xs: 'a - ; ys: 'a } + { b2i1 : 'a + ; xt : 'a + ; b2i : 'a + ; xq : 'a + ; yt : 'a + ; xp : 'a + ; l1 : 'a + ; yp : 'a + ; xs : 'a + ; ys : 'a + } [@@deriving sexp, fields, hlist] end end] let typ g = Snarky_backendless.Typ.of_hlistable - [g; g; g; g; g; g; g; g; g; g] + [ g; g; g; g; g; g; g; g; g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist -let map {b2i1; xt; b2i; xq; yt; xp; l1; yp; xs; ys} ~f = - { b2i1= f b2i1 - ; xt= f xt - ; b2i= f b2i - ; xq= f xq - ; yt= f yt - ; xp= f xp - ; l1= f l1 - ; yp= f yp - ; xs= f xs - ; ys= f ys } +let map { b2i1; xt; b2i; xq; yt; xp; l1; yp; xs; ys } ~f = + { b2i1 = f b2i1 + ; xt = f xt + ; b2i = f b2i + ; xq = f xq + ; yt = f yt + ; xp = f xp + ; l1 = f l1 + ; yp = f yp + ; xs = f xs + ; ys = f ys + } let map2 t1 t2 ~f = - { b2i1= f t1.b2i1 t2.b2i1 - ; xt= f t1.xt t2.xt - ; b2i= f t1.b2i t2.b2i - ; xq= f t1.xq t2.xq - ; yt= f t1.yt t2.yt - ; xp= f t1.xp t2.xp - ; l1= f t1.l1 t2.l1 - ; yp= f t1.yp t2.yp - ; xs= f t1.xs t2.xs - ; ys= f t1.ys t2.ys } + { b2i1 = f t1.b2i1 t2.b2i1 + ; xt = f t1.xt t2.xt + ; b2i = f t1.b2i t2.b2i + ; xq = f t1.xq t2.xq + ; yt = f t1.yt t2.yt + ; xp = f t1.xp t2.xp + ; l1 = f t1.l1 t2.l1 + ; yp = f t1.yp t2.yp + ; xs = f t1.xs t2.xs + ; ys = f t1.ys t2.ys + } diff --git a/src/lib/zexe_backend/zexe_backend_common/field.ml b/src/lib/zexe_backend/zexe_backend_common/field.ml index b7425a089b8..48e78e059dc 100644 --- a/src/lib/zexe_backend/zexe_backend_common/field.ml +++ b/src/lib/zexe_backend/zexe_backend_common/field.ml @@ -134,9 +134,9 @@ end module Make (F : Input_intf) : S_with_version - with type Stable.V1.t = F.t - and module Bigint = F.Bigint - and module Vector = F.Vector = struct + with type Stable.V1.t = F.t + and module Bigint = F.Bigint + and module Vector = F.Vector = struct include F let size = size () @@ -145,7 +145,7 @@ module Make (F : Input_intf) : module Stable = struct module V1 = struct - type t = F.t [@@deriving version {asserted}] + type t = F.t [@@deriving version { asserted }] include Binable.Of_binable (Bigint) @@ -224,7 +224,7 @@ module Make (F : Input_intf) : let of_bits bs = List.fold (List.rev bs) ~init:zero ~f:(fun acc b -> let acc = add acc acc in - if b then add acc one else acc ) + if b then add acc one else acc) let%test_unit "sexp round trip" = let t = random () in @@ -274,5 +274,6 @@ module Make (F : Input_intf) : (Quickcheck.Generator.list_with_length Int.(size_in_bits - 1) Bool.quickcheck_generator) - ~f:(fun bs -> [%test_eq: bool list] (bs @ [false]) (to_bits (of_bits bs))) + ~f:(fun bs -> + [%test_eq: bool list] (bs @ [ false ]) (to_bits (of_bits bs))) end diff --git a/src/lib/zexe_backend/zexe_backend_common/plonk_constraint_system.ml b/src/lib/zexe_backend/zexe_backend_common/plonk_constraint_system.ml index 7472c707de3..96f1955c83f 100644 --- a/src/lib/zexe_backend/zexe_backend_common/plonk_constraint_system.ml +++ b/src/lib/zexe_backend/zexe_backend_common/plonk_constraint_system.ml @@ -28,18 +28,19 @@ end module Gate_spec = struct type ('row, 'f) t = - { kind: Plonk_gate.Kind.t - ; row: 'row - ; lrow: 'row - ; lcol: Plonk_gate.Col.t - ; rrow: 'row - ; rcol: Plonk_gate.Col.t - ; orow: 'row - ; ocol: Plonk_gate.Col.t - ; coeffs: 'f array } + { kind : Plonk_gate.Kind.t + ; row : 'row + ; lrow : 'row + ; lcol : Plonk_gate.Col.t + ; rrow : 'row + ; rcol : Plonk_gate.Col.t + ; orow : 'row + ; ocol : Plonk_gate.Col.t + ; coeffs : 'f array + } let map_rows t ~f = - {t with row= f t.row; lrow= f t.lrow; rrow= f t.rrow; orow= f t.orow} + { t with row = f t.row; lrow = f t.lrow; rrow = f t.rrow; orow = f t.orow } end module Hash_state = struct @@ -58,42 +59,43 @@ module Plonk_constraint = struct module T = struct type ('v, 'f) t = - | Basic of {l: 'f * 'v; r: 'f * 'v; o: 'f * 'v; m: 'f; c: 'f} - | Poseidon of {state: 'v array array} - | EC_add of {p1: 'v * 'v; p2: 'v * 'v; p3: 'v * 'v} - | EC_scale of {state: 'v Scale_round.t array} - | EC_endoscale of {state: 'v Endoscale_round.t array} + | Basic of { l : 'f * 'v; r : 'f * 'v; o : 'f * 'v; m : 'f; c : 'f } + | Poseidon of { state : 'v array array } + | EC_add of { p1 : 'v * 'v; p2 : 'v * 'v; p3 : 'v * 'v } + | EC_scale of { state : 'v Scale_round.t array } + | EC_endoscale of { state : 'v Endoscale_round.t array } [@@deriving sexp] let map (type a b f) (t : (a, f) t) ~(f : a -> b) = let fp (x, y) = (f x, f y) in match t with - | Basic {l; r; o; m; c} -> + | Basic { l; r; o; m; c } -> let p (x, y) = (x, f y) in - Basic {l= p l; r= p r; o= p o; m; c} - | Poseidon {state} -> - Poseidon {state= Array.map ~f:(fun x -> Array.map ~f x) state} - | EC_add {p1; p2; p3} -> - EC_add {p1= fp p1; p2= fp p2; p3= fp p3} - | EC_scale {state} -> - EC_scale {state= Array.map ~f:(fun x -> Scale_round.map ~f x) state} - | EC_endoscale {state} -> + Basic { l = p l; r = p r; o = p o; m; c } + | Poseidon { state } -> + Poseidon { state = Array.map ~f:(fun x -> Array.map ~f x) state } + | EC_add { p1; p2; p3 } -> + EC_add { p1 = fp p1; p2 = fp p2; p3 = fp p3 } + | EC_scale { state } -> + EC_scale + { state = Array.map ~f:(fun x -> Scale_round.map ~f x) state } + | EC_endoscale { state } -> EC_endoscale - {state= Array.map ~f:(fun x -> Endoscale_round.map ~f x) state} + { state = Array.map ~f:(fun x -> Endoscale_round.map ~f x) state } let eval (type v f) (module F : Snarky_backendless.Field_intf.S with type t = f) (eval_one : v -> f) (t : (v, f) t) = match t with (* cl * vl + cr * vr + co * vo + m * vl*vr + c = 0 *) - | Basic {l= cl, vl; r= cr, vr; o= co, vo; m; c} -> + | Basic { l = cl, vl; r = cr, vr; o = co, vo; m; c } -> let vl = eval_one vl in let vr = eval_one vr in let vo = eval_one vo in let open F in let res = List.reduce_exn ~f:add - [mul cl vl; mul cr vr; mul co vo; mul m (mul vl vr); c] + [ mul cl vl; mul cr vr; mul co vo; mul m (mul vl vr); c ] in if not (equal zero res) then ( Core.eprintf @@ -117,7 +119,7 @@ module Plonk_constraint = struct end module Position = struct - type t = {row: Row.t; col: Plonk_gate.Col.t} + type t = { row : Row.t; col : Plonk_gate.Col.t } end module Internal_var = Core_kernel.Unique_id.Int () @@ -149,17 +151,18 @@ module V = struct end type ('a, 'f) t = - { equivalence_classes: Position.t list V.Table.t + { equivalence_classes : Position.t list V.Table.t (* How to compute each internal variable (as a linaer combination of other variables) *) - ; internal_vars: (('f * V.t) list * 'f option) Internal_var.Table.t - ; mutable rows_rev: V.t option array list - ; mutable gates: - [`Finalized | `Unfinalized_rev of (Row.t, 'f) Gate_spec.t list] - ; mutable next_row: int - ; mutable hash: Hash_state.t - ; mutable constraints: int - ; public_input_size: int Core_kernel.Set_once.t - ; mutable auxiliary_input_size: int } + ; internal_vars : (('f * V.t) list * 'f option) Internal_var.Table.t + ; mutable rows_rev : V.t option array list + ; mutable gates : + [ `Finalized | `Unfinalized_rev of (Row.t, 'f) Gate_spec.t list ] + ; mutable next_row : int + ; mutable hash : Hash_state.t + ; mutable constraints : int + ; public_input_size : int Core_kernel.Set_once.t + ; mutable auxiliary_input_size : int + } module Hash = Core.Md5 @@ -170,7 +173,7 @@ let zk_rows = 2 module Make (Fp : Field.S) (Gates : Gate_vector_intf with type field := Fp.t) (Params : sig - val params : Fp.t Params.t + val params : Fp.t Params.t end) = struct open Core @@ -191,7 +194,7 @@ struct Bytes.set int_buf i (Char.of_int_exn ((index lsr (8 * i)) land 255)) done ; - H.feed_bytes acc int_buf ) + H.feed_bytes acc int_buf) in let cvars xs = List.concat_map xs ~f:(fun x -> @@ -200,46 +203,46 @@ struct Snarky_backendless.Cvar.to_constant_and_terms x ~equal ~add ~mul ~zero ~one) in - Option.value_map c ~default:[] ~f:(fun c -> [(c, 0)]) @ ts ) + Option.value_map c ~default:[] ~f:(fun c -> [ (c, 0) ]) @ ts) |> lc in match constr with | Snarky_backendless.Constraint.Equal (v1, v2) -> let t = H.feed_string t "equal" in - cvars [v1; v2] t + cvars [ v1; v2 ] t | Snarky_backendless.Constraint.Boolean b -> let t = H.feed_string t "boolean" in - cvars [b] t + cvars [ b ] t | Snarky_backendless.Constraint.Square (x, z) -> let t = H.feed_string t "square" in - cvars [x; z] t + cvars [ x; z ] t | Snarky_backendless.Constraint.R1CS (a, b, c) -> let t = H.feed_string t "r1cs" in - cvars [a; b; c] t + cvars [ a; b; c ] t | Plonk_constraint.T constr -> ( - match constr with - | Basic {l; r; o; m; c} -> - let t = H.feed_string t "basic" in - let pr (s, x) acc = fp s acc |> cvars [x] in - t |> pr l |> pr r |> pr o |> fp m |> fp c - | Poseidon {state} -> - let t = H.feed_string t "poseidon" in - let row a = cvars (Array.to_list a) in - Array.fold state ~init:t ~f:(fun acc a -> row a acc) - | EC_add {p1; p2; p3} -> - let t = H.feed_string t "ec_add" in - let pr (x, y) = cvars [x; y] in - t |> pr p1 |> pr p2 |> pr p3 - | EC_scale {state} -> - let t = H.feed_string t "ec_scale" in - Array.fold state ~init:t - ~f:(fun acc {xt; b; yt; xp; l1; yp; xs; ys} -> - cvars [xt; b; yt; xp; l1; yp; xs; ys] acc ) - | EC_endoscale {state} -> - let t = H.feed_string t "ec_endoscale" in - Array.fold state ~init:t - ~f:(fun acc {b2i1; xt; b2i; xq; yt; xp; l1; yp; xs; ys} -> - cvars [b2i1; xt; b2i; xq; yt; xp; l1; yp; xs; ys] acc ) ) + match constr with + | Basic { l; r; o; m; c } -> + let t = H.feed_string t "basic" in + let pr (s, x) acc = fp s acc |> cvars [ x ] in + t |> pr l |> pr r |> pr o |> fp m |> fp c + | Poseidon { state } -> + let t = H.feed_string t "poseidon" in + let row a = cvars (Array.to_list a) in + Array.fold state ~init:t ~f:(fun acc a -> row a acc) + | EC_add { p1; p2; p3 } -> + let t = H.feed_string t "ec_add" in + let pr (x, y) = cvars [ x; y ] in + t |> pr p1 |> pr p2 |> pr p3 + | EC_scale { state } -> + let t = H.feed_string t "ec_scale" in + Array.fold state ~init:t + ~f:(fun acc { xt; b; yt; xp; l1; yp; xs; ys } -> + cvars [ xt; b; yt; xp; l1; yp; xs; ys ] acc) + | EC_endoscale { state } -> + let t = H.feed_string t "ec_endoscale" in + Array.fold state ~init:t + ~f:(fun acc { b2i1; xt; b2i; xq; yt; xp; l1; yp; xs; ys } -> + cvars [ b2i1; xt; b2i; xq; yt; xp; l1; yp; xs; ys ] acc) ) | _ -> failwith "Unsupported constraint" @@ -261,8 +264,7 @@ struct x in let compute ((lc, c) : (Fp.t * V.t) list * Fp.t option) = - List.fold lc ~init:(Option.value c ~default:Fp.zero) - ~f:(fun acc (s, x) -> + List.fold lc ~init:(Option.value c ~default:Fp.zero) ~f:(fun acc (s, x) -> let x = match x with | External x -> @@ -270,7 +272,7 @@ struct | Internal x -> find internal_values x in - Fp.(acc + (s * x)) ) + Fp.(acc + (s * x))) in List.iteri (List.rev sys.rows_rev) ~f:(fun i_after_input row -> let i = i_after_input + public_input_size in @@ -284,7 +286,7 @@ struct let lc = find sys.internal_vars v in let value = compute lc in res.(i).(j) <- value ; - Hashtbl.set internal_values ~key:v ~data:value ) ) ; + Hashtbl.set internal_values ~key:v ~data:value)) ; for r = 0 to zk_rows - 1 do for c = 0 to 2 do res.(num_rows - 1 - r).(c) <- Fp.random () @@ -300,15 +302,16 @@ struct let digest t = Hash_state.digest t.hash let create () = - { public_input_size= Set_once.create () - ; internal_vars= Internal_var.Table.create () - ; gates= `Unfinalized_rev [] (* Gates.create () *) - ; rows_rev= [] - ; next_row= 0 - ; equivalence_classes= V.Table.create () - ; hash= Hash_state.empty - ; constraints= 0 - ; auxiliary_input_size= 0 } + { public_input_size = Set_once.create () + ; internal_vars = Internal_var.Table.create () + ; gates = `Unfinalized_rev [] (* Gates.create () *) + ; rows_rev = [] + ; next_row = 0 + ; equivalence_classes = V.Table.create () + ; hash = Hash_state.empty + ; constraints = 0 + ; auxiliary_input_size = 0 + } (* TODO *) let to_json _ = `List [] @@ -328,11 +331,11 @@ struct let prev = match V.Table.find sys.equivalence_classes key with | Some x -> ( - match List.hd x with Some x -> x | None -> {row; col} ) + match List.hd x with Some x -> x | None -> { row; col } ) | None -> - {row; col} + { row; col } in - V.Table.add_multi sys.equivalence_classes ~key ~data:{row; col} ; + V.Table.add_multi sys.equivalence_classes ~key ~data:{ row; col } ; prev let wire sys key row col = wire' sys key (Row.After_public_input row) col @@ -345,22 +348,23 @@ struct let g = Gates.create () in let n = Set_once.get_exn sys.public_input_size [%here] in (* First, add gates for public input *) - let pub = [|Fp.one; Fp.zero; Fp.zero; Fp.zero; Fp.zero|] in + let pub = [| Fp.one; Fp.zero; Fp.zero; Fp.zero; Fp.zero |] in let pub_input_gate_specs_rev = ref [] in for row = 0 to n - 1 do let lp = wire' sys (V.External (row + 1)) (Row.Public_input row) L in let lp_row = Row.to_absolute ~public_input_size:n lp.row in (* Add to the gate vector *) pub_input_gate_specs_rev := - { Gate_spec.kind= Generic + { Gate_spec.kind = Generic ; row - ; lrow= lp_row - ; lcol= lp.col - ; rrow= row - ; rcol= R - ; orow= row - ; ocol= O - ; coeffs= pub } + ; lrow = lp_row + ; lcol = lp.col + ; rrow = row + ; rcol = R + ; orow = row + ; ocol = O + ; coeffs = pub + } :: !pub_input_gate_specs_rev done ; let offset_row = Row.to_absolute ~public_input_size:n in @@ -374,29 +378,32 @@ struct List.init zk_rows ~f:(fun i -> let row = Row.After_public_input (n + sys.next_row + i) in offset - { kind= Generic + { kind = Generic ; row - ; lrow= row - ; lcol= L - ; rrow= row - ; rcol= R - ; orow= row - ; ocol= O - ; coeffs= zeroes } ) + ; lrow = row + ; lcol = L + ; rrow = row + ; rcol = R + ; orow = row + ; ocol = O + ; coeffs = zeroes + }) in List.rev_append !pub_input_gate_specs_rev (rev_map_append gates random_rows ~f:offset) in List.iter all_gates - ~f:(fun {kind; row; lrow; lcol; rrow; rcol; orow; ocol; coeffs} -> + ~f:(fun { kind; row; lrow; lcol; rrow; rcol; orow; ocol; coeffs } -> Gates.add g { kind - ; wires= + ; wires = { row - ; l= {row= lrow; col= lcol} - ; r= {row= rrow; col= rcol} - ; o= {row= orow; col= ocol} } - ; c= coeffs } ) ; + ; l = { row = lrow; col = lcol } + ; r = { row = rrow; col = rcol } + ; o = { row = orow; col = ocol } + } + ; c = coeffs + }) ; g let finalize t = ignore (finalize_and_get_gates t : Gates.t) @@ -405,13 +412,13 @@ struct Sequence.of_list terms |> Sequence.fold ~init:(c0, i0, [], 0) ~f:(fun (acc, i, ts, n) (c, j) -> if Int.equal i j then (Fp.add acc c, i, ts, n) - else (c, j, (acc, i) :: ts, n + 1) ) + else (c, j, (acc, i) :: ts, n + 1)) let canonicalize x = let c, terms = Fp.( - Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul - ~zero:(of_int 0) ~equal ~one:(of_int 1)) + Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul ~zero:(of_int 0) + ~equal ~one:(of_int 1)) x in let terms = @@ -433,18 +440,19 @@ struct | `Finalized -> failwith "add_row called on finalized constraint system" | `Unfinalized_rev gates -> - sys.gates - <- `Unfinalized_rev - ( { kind= t - ; row= After_public_input sys.next_row - ; lrow= l.row - ; lcol= l.col - ; rrow= r.row - ; rcol= r.col - ; orow= o.row - ; ocol= o.col - ; coeffs= c } - :: gates ) ; + sys.gates <- + `Unfinalized_rev + ( { kind = t + ; row = After_public_input sys.next_row + ; lrow = l.row + ; lcol = l.col + ; rrow = r.row + ; rcol = r.col + ; orow = o.row + ; ocol = o.col + ; coeffs = c + } + :: gates ) ; sys.next_row <- sys.next_row + 1 ; sys.rows_rev <- row :: sys.rows_rev @@ -455,37 +463,37 @@ struct | Some lx -> wire sys lx next_row L | None -> - {row= After_public_input next_row; col= L} + { row = After_public_input next_row; col = L } in let rp = match r with | Some rx -> wire sys rx next_row R | None -> - {row= After_public_input next_row; col= R} + { row = After_public_input next_row; col = R } in let op = match o with | Some ox -> wire sys ox next_row O | None -> - {row= After_public_input next_row; col= O} + { row = After_public_input next_row; col = O } in - add_row sys [|l; r; o|] Generic lp rp op c + add_row sys [| l; r; o |] Generic lp rp op c let completely_reduce sys (terms : (Fp.t * int) list) = (* just adding constrained variables without values *) let rec go = function | [] -> assert false - | [(s, x)] -> + | [ (s, x) ] -> (s, V.External x) | (ls, lx) :: t -> let lx = V.External lx in let rs, rx = go t in - let s1x1_plus_s2x2 = create_internal sys [(ls, lx); (rs, rx)] in + let s1x1_plus_s2x2 = create_internal sys [ (ls, lx); (rs, rx) ] in add_generic_constraint ~l:lx ~r:rx ~o:s1x1_plus_s2x2 - [|ls; rs; Fp.(negate one); Fp.zero; Fp.zero|] + [| ls; rs; Fp.(negate one); Fp.zero; Fp.zero |] sys ; (Fp.one, s1x1_plus_s2x2) in @@ -494,8 +502,8 @@ struct let reduce_lincom sys (x : Fp.t Snarky_backendless.Cvar.t) = let constant, terms = Fp.( - Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul - ~zero:(of_int 0) ~equal ~one:(of_int 1)) + Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul ~zero:(of_int 0) + ~equal ~one:(of_int 1)) x in let terms = @@ -514,22 +522,24 @@ struct match terms with | [] -> assert false - | [(ls, lx)] -> ( - match constant with - | None -> - (ls, `Var (V.External lx)) - | Some c -> - (* res = ls * lx + c *) - let res = create_internal ~constant:c sys [(ls, External lx)] in - add_generic_constraint ~l:(External lx) ~o:res - [|ls; Fp.zero; Fp.(negate one); Fp.zero; c|] - (* Could be here *) - sys ; - (Fp.one, `Var res) ) + | [ (ls, lx) ] -> ( + match constant with + | None -> + (ls, `Var (V.External lx)) + | Some c -> + (* res = ls * lx + c *) + let res = + create_internal ~constant:c sys [ (ls, External lx) ] + in + add_generic_constraint ~l:(External lx) ~o:res + [| ls; Fp.zero; Fp.(negate one); Fp.zero; c |] + (* Could be here *) + sys ; + (Fp.one, `Var res) ) | (ls, lx) :: tl -> let rs, rx = completely_reduce sys tl in let res = - create_internal ?constant sys [(ls, External lx); (rs, rx)] + create_internal ?constant sys [ (ls, External lx); (rs, rx) ] in (* res = ls * lx + rs * rx + c *) add_generic_constraint ~l:(External lx) ~r:rx ~o:res @@ -537,7 +547,8 @@ struct ; rs ; Fp.(negate one) ; Fp.zero - ; (match constant with Some x -> x | None -> Fp.zero) |] + ; (match constant with Some x -> x | None -> Fp.zero) + |] (* Could be here *) sys ; (Fp.one, `Var res) ) @@ -565,16 +576,16 @@ struct | `Var x -> if Fp.equal s Fp.one then x else - let sx = create_internal sys [(s, x)] in + let sx = create_internal sys [ (s, x) ] in (* s * x - sx = 0 *) add_generic_constraint ~l:x ~o:sx - [|s; Fp.zero; Fp.(negate one); Fp.zero; Fp.zero|] + [| s; Fp.zero; Fp.(negate one); Fp.zero; Fp.zero |] sys ; sx | `Constant -> let x = create_internal sys ~constant:s [] in add_generic_constraint ~l:x - [|Fp.one; Fp.zero; Fp.zero; Fp.zero; Fp.negate s|] + [| Fp.one; Fp.zero; Fp.zero; Fp.zero; Fp.negate s |] sys ; x in @@ -587,16 +598,16 @@ struct sl^2 * xl * xl - so * xo = 0 *) add_generic_constraint ~l:xl ~r:xl ~o:xo - [|Fp.zero; Fp.zero; Fp.negate so; Fp.(sl * sl); Fp.zero|] + [| Fp.zero; Fp.zero; Fp.negate so; Fp.(sl * sl); Fp.zero |] sys | `Var xl, `Constant -> add_generic_constraint ~l:xl ~r:xl - [|Fp.zero; Fp.zero; Fp.zero; Fp.(sl * sl); Fp.negate so|] + [| Fp.zero; Fp.zero; Fp.zero; Fp.(sl * sl); Fp.negate so |] sys | `Constant, `Var xo -> (* sl^2 = so * xo *) add_generic_constraint ~o:xo - [|Fp.zero; Fp.zero; so; Fp.zero; Fp.negate (Fp.square sl)|] + [| Fp.zero; Fp.zero; so; Fp.zero; Fp.negate (Fp.square sl) |] sys | `Constant, `Constant -> assert (Fp.(equal (square sl) so)) ) @@ -608,33 +619,33 @@ struct - s1 s2 (x1 x2) + s3 x3 = 0 *) add_generic_constraint ~l:x1 ~r:x2 ~o:x3 - [|Fp.zero; Fp.zero; s3; Fp.(negate s1 * s2); Fp.zero|] + [| Fp.zero; Fp.zero; s3; Fp.(negate s1 * s2); Fp.zero |] sys | `Var x1, `Var x2, `Constant -> add_generic_constraint ~l:x1 ~r:x2 - [|Fp.zero; Fp.zero; Fp.zero; Fp.(s1 * s2); Fp.negate s3|] + [| Fp.zero; Fp.zero; Fp.zero; Fp.(s1 * s2); Fp.negate s3 |] sys | `Var x1, `Constant, `Var x3 -> (* s1 x1 * s2 = s3 x3 *) add_generic_constraint ~l:x1 ~o:x3 - [|Fp.(s1 * s2); Fp.zero; Fp.negate s3; Fp.zero; Fp.zero|] + [| Fp.(s1 * s2); Fp.zero; Fp.negate s3; Fp.zero; Fp.zero |] sys | `Constant, `Var x2, `Var x3 -> add_generic_constraint ~r:x2 ~o:x3 - [|Fp.zero; Fp.(s1 * s2); Fp.negate s3; Fp.zero; Fp.zero|] + [| Fp.zero; Fp.(s1 * s2); Fp.negate s3; Fp.zero; Fp.zero |] sys | `Var x1, `Constant, `Constant -> add_generic_constraint ~l:x1 - [|Fp.(s1 * s2); Fp.zero; Fp.zero; Fp.zero; Fp.negate s3|] + [| Fp.(s1 * s2); Fp.zero; Fp.zero; Fp.zero; Fp.negate s3 |] sys | `Constant, `Var x2, `Constant -> add_generic_constraint ~r:x2 - [|Fp.zero; Fp.(s1 * s2); Fp.zero; Fp.zero; Fp.negate s3|] + [| Fp.zero; Fp.(s1 * s2); Fp.zero; Fp.zero; Fp.negate s3 |] sys | `Constant, `Constant, `Var x3 -> add_generic_constraint ~o:x3 - [|Fp.zero; Fp.zero; s3; Fp.zero; Fp.(negate s1 * s2)|] + [| Fp.zero; Fp.zero; s3; Fp.zero; Fp.(negate s1 * s2) |] sys | `Constant, `Constant, `Constant -> assert (Fp.(equal s3 Fp.(s1 * s2))) ) @@ -644,7 +655,7 @@ struct | `Var x -> (* -x + x * x = 0 *) add_generic_constraint ~l:x ~r:x - [|Fp.(negate one); Fp.zero; Fp.zero; Fp.one; Fp.zero|] + [| Fp.(negate one); Fp.zero; Fp.zero; Fp.one; Fp.zero |] sys | `Constant -> assert (Fp.(equal s (s * s))) ) @@ -656,43 +667,43 @@ struct *) if not (Fp.equal s1 s2) then add_generic_constraint ~l:x1 ~r:x2 - [|s1; Fp.(negate s2); Fp.zero; Fp.zero; Fp.zero|] + [| s1; Fp.(negate s2); Fp.zero; Fp.zero; Fp.zero |] sys (* TODO: optimize by not adding generic costraint but rather permuting the vars *) else add_generic_constraint ~l:x1 ~r:x2 - [|s1; Fp.(negate s2); Fp.zero; Fp.zero; Fp.zero|] + [| s1; Fp.(negate s2); Fp.zero; Fp.zero; Fp.zero |] sys | `Var x1, `Constant -> add_generic_constraint ~l:x1 - [|s1; Fp.zero; Fp.zero; Fp.zero; Fp.negate s2|] + [| s1; Fp.zero; Fp.zero; Fp.zero; Fp.negate s2 |] sys | `Constant, `Var x2 -> add_generic_constraint ~r:x2 - [|Fp.zero; s2; Fp.zero; Fp.zero; Fp.negate s1|] + [| Fp.zero; s2; Fp.zero; Fp.zero; Fp.negate s1 |] sys | `Constant, `Constant -> assert (Fp.(equal s1 s2)) ) - | Plonk_constraint.T (Basic {l; r; o; m; c}) -> + | Plonk_constraint.T (Basic { l; r; o; m; c }) -> (* 0 - = l.s * l.x - + r.s * r.x - + o.s * o.x - + m * (l.x * r.x) - + c - = - l.s * l.s' * l.x' - + r.s * r.s' * r.x' - + o.s * o.s' * o.x' - + m * (l.s' * l.x' * r.s' * r.x') - + c - = - (l.s * l.s') * l.x' - + (r.s * r.s') * r.x' - + (o.s * o.s') * o.x' - + (m * l.s' * r.s') * l.x' r.x' - + c - *) + = l.s * l.x + + r.s * r.x + + o.s * o.x + + m * (l.x * r.x) + + c + = + l.s * l.s' * l.x' + + r.s * r.s' * r.x' + + o.s * o.s' * o.x' + + m * (l.s' * l.x' * r.s' * r.x') + + c + = + (l.s * l.s') * l.x' + + (r.s * r.s') * r.x' + + (o.s * o.s') * o.x' + + (m * l.s' * r.s') * l.x' r.x' + + c + *) (* TODO: This is sub-optimal *) let c = ref c in let red_pr (s, x) = @@ -705,17 +716,17 @@ struct (s', Some (Fp.(s * s'), x)) in (* l.s * l.x - + r.s * r.x - + o.s * o.x - + m * (l.x * r.x) - + c - = - l.s * l.s' * l.x' - + r.s * r.x - + o.s * o.x - + m * (l.x * r.x) - + c - = + + r.s * r.x + + o.s * o.x + + m * (l.x * r.x) + + c + = + l.s * l.s' * l.x' + + r.s * r.x + + o.s * o.x + + m * (l.x * r.x) + + c + = *) let l_s', l = red_pr l in let r_s', r = red_pr r in @@ -731,9 +742,9 @@ struct failwith "Must use non-constant cvar in plonk constraints" in add_generic_constraint ?l:(var l) ?r:(var r) ?o:(var o) - [|coeff l; coeff r; coeff o; m; !c|] + [| coeff l; coeff r; coeff o; m; !c |] sys - | Plonk_constraint.T (Poseidon {state}) -> + | Plonk_constraint.T (Poseidon { state }) -> let reduce_state sys (s : Fp.t Snarky_backendless.Cvar.t array array) : V.t array array = Array.map ~f:(Array.map ~f:reduce_to_v) s @@ -742,7 +753,7 @@ struct let add_round_state array ind = let prev = Array.mapi array ~f:(fun i x -> - wire sys x sys.next_row (index_to_col i) ) + wire sys x sys.next_row (index_to_col i)) in add_row sys (Array.map array ~f:(fun x -> Some x)) @@ -754,18 +765,18 @@ struct if i = Array.length state - 1 then let prev = Array.mapi perm ~f:(fun i x -> - wire sys x sys.next_row (index_to_col i) ) + wire sys x sys.next_row (index_to_col i)) in add_row sys (Array.map perm ~f:(fun x -> Some x)) Zero prev.(0) prev.(1) prev.(2) - [|Fp.zero; Fp.zero; Fp.zero; Fp.zero; Fp.zero|] - else add_round_state perm i ) + [| Fp.zero; Fp.zero; Fp.zero; Fp.zero; Fp.zero |] + else add_round_state perm i) state - | Plonk_constraint.T (EC_add {p1; p2; p3}) -> + | Plonk_constraint.T (EC_add { p1; p2; p3 }) -> let red = - Array.map [|p1; p2; p3|] ~f:(fun (x, y) -> - (reduce_to_v x, reduce_to_v y) ) + Array.map [| p1; p2; p3 |] ~f:(fun (x, y) -> + (reduce_to_v x, reduce_to_v y)) in let y = Array.mapi @@ -784,7 +795,7 @@ struct (Array.map red ~f:(fun (x, _) -> Some x)) Add2 x.(0) x.(1) x.(2) [||] ; () - | Plonk_constraint.T (EC_scale {state}) -> + | Plonk_constraint.T (EC_scale { state }) -> let i = ref 0 in let add_ecscale_round (round : V.t Scale_round.t) = let xt = wire sys round.xt sys.next_row L in @@ -797,20 +808,20 @@ struct let xt1 = wire sys round.xt (sys.next_row + 2) R in let ys = wire sys round.ys (sys.next_row + 2) O in add_row sys - [|Some round.xt; Some round.b; Some round.yt|] + [| Some round.xt; Some round.b; Some round.yt |] Vbmul1 xt b yt [||] ; add_row sys - [|Some round.xp; Some round.l1; Some round.yp|] + [| Some round.xp; Some round.l1; Some round.yp |] Vbmul2 xp l1 yp [||] ; add_row sys - [|Some round.xs; Some round.xt; Some round.ys|] + [| Some round.xs; Some round.xt; Some round.ys |] Vbmul3 xs xt1 ys [||] in Array.iter ~f:(fun round -> add_ecscale_round round ; incr i) (Array.map state ~f:(Scale_round.map ~f:reduce_to_v)) ; () - | Plonk_constraint.T (EC_endoscale {state}) -> + | Plonk_constraint.T (EC_endoscale { state }) -> let add_endoscale_round (round : V.t Endoscale_round.t) = let b2i1 = wire sys round.b2i1 sys.next_row L in let xt = wire sys round.xt sys.next_row R in @@ -824,18 +835,18 @@ struct let xq1 = wire sys round.xq (sys.next_row + 3) R in let ys = wire sys round.ys (sys.next_row + 3) O in add_row sys - [|Some round.b2i1; Some round.xt; None|] + [| Some round.b2i1; Some round.xt; None |] Endomul1 b2i1 xt - {row= After_public_input sys.next_row; col= O} + { row = After_public_input sys.next_row; col = O } [||] ; add_row sys - [|Some round.b2i; Some round.xq; Some round.yt|] + [| Some round.b2i; Some round.xq; Some round.yt |] Endomul2 b2i xq yt [||] ; add_row sys - [|Some round.xp; Some round.l1; Some round.yp|] + [| Some round.xp; Some round.l1; Some round.yp |] Endomul3 xp l1 yp [||] ; add_row sys - [|Some round.xs; Some round.xq; Some round.ys|] + [| Some round.xs; Some round.xq; Some round.ys |] Endomul4 xs xq1 ys [||] in Array.iter diff --git a/src/lib/zexe_backend/zexe_backend_common/plonk_dlog_proof.ml b/src/lib/zexe_backend/zexe_backend_common/plonk_dlog_proof.ml index 7ec44faa5a0..9dbc2f11ba0 100644 --- a/src/lib/zexe_backend/zexe_backend_common/plonk_dlog_proof.ml +++ b/src/lib/zexe_backend/zexe_backend_common/plonk_dlog_proof.ml @@ -112,7 +112,7 @@ module Challenge_polynomial = struct [%%versioned module Stable = struct module V1 = struct - type ('g, 'fq) t = {challenges: 'fq array; commitment: 'g} + type ('g, 'fq) t = { challenges : 'fq array; commitment : 'g } [@@deriving version, bin_io, sexp, compare, yojson] let to_latest = Fn.id @@ -141,7 +141,7 @@ module Make (Inputs : Inputs_intf) = struct end] type ('g, 'fq) t_ = ('g, 'fq) Challenge_polynomial.t = - {challenges: 'fq array; commitment: 'g} + { challenges : 'fq array; commitment : 'g } end type message = Challenge_polynomial.t list @@ -163,20 +163,22 @@ module Make (Inputs : Inputs_intf) = struct let to_latest = Fn.id type 'a creator = - messages:( G.Affine.t - , G.Affine.t Or_infinity.t ) - Dlog_plonk_types.Messages.Stable.V1.t - -> openings:( G.Affine.t - , Fq.t - , Fq.t Dlog_plonk_types.Pc_array.t ) - Dlog_plonk_types.Openings.Stable.V1.t + messages: + ( G.Affine.t + , G.Affine.t Or_infinity.t ) + Dlog_plonk_types.Messages.Stable.V1.t + -> openings: + ( G.Affine.t + , Fq.t + , Fq.t Dlog_plonk_types.Pc_array.t ) + Dlog_plonk_types.Openings.Stable.V1.t -> 'a let map_creator c ~f ~messages ~openings = f (c ~messages ~openings) let create ~messages ~openings = let open Dlog_plonk_types.Proof in - {messages; openings} + { messages; openings } end end] end) @@ -188,8 +190,7 @@ module Make (Inputs : Inputs_intf) = struct end with type t := t ) - [%%define_locally - Stable.Latest.(create)] + [%%define_locally Stable.Latest.(create)] let g t f = G.Affine.of_backend (f t) @@ -204,11 +205,12 @@ module Make (Inputs : Inputs_intf) = struct G.Affine.t * G.Affine.t = (g (fst t), g (snd t)) in - { Dlog_plonk_types.Openings.Bulletproof.lr= Array.map ~f:gpair t.lr - ; z_1= t.z1 - ; z_2= t.z2 - ; delta= g t.delta - ; sg= g t.sg } + { Dlog_plonk_types.Openings.Bulletproof.lr = Array.map ~f:gpair t.lr + ; z_1 = t.z1 + ; z_2 = t.z2 + ; delta = g t.delta + ; sg = g t.sg + } let of_backend (t : Backend.t) : t = let proof = opening_proof_of_backend t.proof in @@ -216,14 +218,15 @@ module Make (Inputs : Inputs_intf) = struct (fst t.evals, snd t.evals) |> Tuple_lib.Double.map ~f:(fun e -> let open Evaluations_backend in - { Dlog_plonk_types.Evals.l= e.l - ; r= e.r - ; o= e.o - ; z= e.z - ; t= e.t - ; f= e.f - ; sigma1= e.sigma1 - ; sigma2= e.sigma2 } ) + { Dlog_plonk_types.Evals.l = e.l + ; r = e.r + ; o = e.o + ; z = e.z + ; t = e.t + ; f = e.f + ; sigma1 = e.sigma1 + ; sigma2 = e.sigma2 + }) in let wo x = match Poly_comm.of_backend_without_degree_bound x with @@ -241,16 +244,18 @@ module Make (Inputs : Inputs_intf) = struct in create ~messages: - { l_comm= wo t.messages.l_comm - ; r_comm= wo t.messages.r_comm - ; o_comm= wo t.messages.o_comm - ; z_comm= wo t.messages.z_comm - ; t_comm= w t.messages.t_comm } - ~openings:{proof; evals} - - let eval_to_backend {Dlog_plonk_types.Evals.l; r; o; z; t; f; sigma1; sigma2} - : Evaluations_backend.t = - {l; r; o; z; t; f; sigma1; sigma2} + { l_comm = wo t.messages.l_comm + ; r_comm = wo t.messages.r_comm + ; o_comm = wo t.messages.o_comm + ; z_comm = wo t.messages.z_comm + ; t_comm = w t.messages.t_comm + } + ~openings:{ proof; evals } + + let eval_to_backend + { Dlog_plonk_types.Evals.l; r; o; z; t; f; sigma1; sigma2 } : + Evaluations_backend.t = + { l; r; o; z; t; f; sigma1; sigma2 } let vec_to_array (type t elt) (module V : Snarky_intf.Vector.S with type t = t and type elt = elt) @@ -258,28 +263,33 @@ module Make (Inputs : Inputs_intf) = struct Array.init (V.length v) ~f:(V.get v) let to_backend' (chal_polys : Challenge_polynomial.t list) primary_input - ({ messages= {l_comm; r_comm; o_comm; z_comm; t_comm} - ; openings= {proof= {lr; z_1; z_2; delta; sg}; evals= evals0, evals1} } : + ({ messages = { l_comm; r_comm; o_comm; z_comm; t_comm } + ; openings = + { proof = { lr; z_1; z_2; delta; sg }; evals = evals0, evals1 } + } : t) : Backend.t = let g x = G.Affine.to_backend (Or_infinity.Finite x) in let pcw t = Poly_comm.to_backend (`With_degree_bound t) in let pcwo t = Poly_comm.to_backend (`Without_degree_bound t) in let lr = Array.map lr ~f:(fun (x, y) -> (g x, g y)) in - { messages= - { l_comm= pcwo l_comm - ; r_comm= pcwo r_comm - ; o_comm= pcwo o_comm - ; z_comm= pcwo z_comm - ; t_comm= pcw t_comm } - ; proof= {lr; delta= g delta; z1= z_1; z2= z_2; sg= g sg} - ; evals= (eval_to_backend evals0, eval_to_backend evals1) - ; public= primary_input - ; prev_challenges= + { messages = + { l_comm = pcwo l_comm + ; r_comm = pcwo r_comm + ; o_comm = pcwo o_comm + ; z_comm = pcwo z_comm + ; t_comm = pcw t_comm + } + ; proof = { lr; delta = g delta; z1 = z_1; z2 = z_2; sg = g sg } + ; evals = (eval_to_backend evals0, eval_to_backend evals1) + ; public = primary_input + ; prev_challenges = Array.of_list_map chal_polys - ~f:(fun {Challenge_polynomial.commitment; challenges} -> + ~f:(fun { Challenge_polynomial.commitment; challenges } -> ( challenges - , { Marlin_plonk_bindings.Types.Poly_comm.shifted= None - ; unshifted= [|Or_infinity.Finite commitment|] } ) ) } + , { Marlin_plonk_bindings.Types.Poly_comm.shifted = None + ; unshifted = [| Or_infinity.Finite commitment |] + } )) + } let to_backend chal_polys primary_input t = to_backend' chal_polys (List.to_array primary_input) t @@ -289,14 +299,14 @@ module Make (Inputs : Inputs_intf) = struct match (message : message option) with Some s -> s | None -> [] in let challenges = - List.map chal_polys ~f:(fun {Challenge_polynomial.challenges; _} -> - challenges ) + List.map chal_polys ~f:(fun { Challenge_polynomial.challenges; _ } -> + challenges) |> Array.concat in let commitments = Array.of_list_map chal_polys - ~f:(fun {Challenge_polynomial.commitment; _} -> - G.Affine.to_backend (Finite commitment) ) + ~f:(fun { Challenge_polynomial.commitment; _ } -> + G.Affine.to_backend (Finite commitment)) in let res = Backend.create pk primary auxiliary challenges commitments in of_backend res @@ -306,14 +316,14 @@ module Make (Inputs : Inputs_intf) = struct match (message : message option) with Some s -> s | None -> [] in let challenges = - List.map chal_polys ~f:(fun {Challenge_polynomial.challenges; _} -> - challenges ) + List.map chal_polys ~f:(fun { Challenge_polynomial.challenges; _ } -> + challenges) |> Array.concat in let commitments = Array.of_list_map chal_polys - ~f:(fun {Challenge_polynomial.commitment; _} -> - G.Affine.to_backend (Finite commitment) ) + ~f:(fun { Challenge_polynomial.commitment; _ } -> + G.Affine.to_backend (Finite commitment)) in let%map.Async.Deferred res = Backend.create_async pk primary auxiliary challenges commitments @@ -325,7 +335,7 @@ module Make (Inputs : Inputs_intf) = struct let vks_and_v = Array.of_list_map ts ~f:(fun (vk, t, xs, m) -> let p = to_backend' (Option.value ~default:[] m) (conv xs) t in - (vk, p) ) + (vk, p)) in Backend.batch_verify (Array.map ~f:fst vks_and_v) diff --git a/src/lib/zexe_backend/zexe_backend_common/poly_comm.ml b/src/lib/zexe_backend/zexe_backend_common/poly_comm.ml index f44a08df73b..38a03e75cfb 100644 --- a/src/lib/zexe_backend/zexe_backend_common/poly_comm.ml +++ b/src/lib/zexe_backend/zexe_backend_common/poly_comm.ml @@ -57,14 +57,15 @@ module Make (Inputs : Inputs_intf) = struct (commitment : (Base_field.t * Base_field.t) Or_infinity.t Dlog_plonk_types.Poly_comm.With_degree_bound.t) : Backend.t = - {shifted= Some commitment.shifted; unshifted= commitment.unshifted} + { shifted = Some commitment.shifted; unshifted = commitment.unshifted } let without_degree_bound_to_backend (commitment : (Base_field.t * Base_field.t) Dlog_plonk_types.Poly_comm.Without_degree_bound.t) : Backend.t = - { shifted= None - ; unshifted= Array.map ~f:(fun x -> Or_infinity.Finite x) commitment } + { shifted = None + ; unshifted = Array.map ~f:(fun x -> Or_infinity.Finite x) commitment + } let to_backend (t : t) : Backend.t = let t = @@ -85,18 +86,19 @@ module Make (Inputs : Inputs_intf) = struct | None -> assert false | Some shifted -> - `With_degree_bound {With_degree_bound.unshifted= t.unshifted; shifted} + `With_degree_bound + { With_degree_bound.unshifted = t.unshifted; shifted } let of_backend_without_degree_bound (t : Backend.t) = let open Dlog_plonk_types.Poly_comm in match t with - | {unshifted; shifted= None} -> + | { unshifted; shifted = None } -> `Without_degree_bound (Array.map unshifted ~f:(function | Infinity -> assert false | Finite g -> - g )) + g)) | _ -> assert false end diff --git a/src/lib/zexe_backend/zexe_backend_common/scale_round.ml b/src/lib/zexe_backend/zexe_backend_common/scale_round.ml index ff0cdf42b0b..50087429d7b 100644 --- a/src/lib/zexe_backend/zexe_backend_common/scale_round.ml +++ b/src/lib/zexe_backend/zexe_backend_common/scale_round.ml @@ -3,25 +3,35 @@ open Core_kernel [%%versioned module Stable = struct module V1 = struct - type 'a t = {xt: 'a; b: 'a; yt: 'a; xp: 'a; l1: 'a; yp: 'a; xs: 'a; ys: 'a} + type 'a t = + { xt : 'a; b : 'a; yt : 'a; xp : 'a; l1 : 'a; yp : 'a; xs : 'a; ys : 'a } [@@deriving sexp, fields, hlist] end end] let typ g = - Snarky_backendless.Typ.of_hlistable [g; g; g; g; g; g; g; g] + Snarky_backendless.Typ.of_hlistable [ g; g; g; g; g; g; g; g ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist -let map {xt; b; yt; xp; l1; yp; xs; ys} ~f = - {xt= f xt; b= f b; yt= f yt; xp= f xp; l1= f l1; yp= f yp; xs= f xs; ys= f ys} +let map { xt; b; yt; xp; l1; yp; xs; ys } ~f = + { xt = f xt + ; b = f b + ; yt = f yt + ; xp = f xp + ; l1 = f l1 + ; yp = f yp + ; xs = f xs + ; ys = f ys + } let map2 t1 t2 ~f = - { xt= f t1.xt t2.xt - ; b= f t1.b t2.b - ; yt= f t1.yt t2.yt - ; xp= f t1.xp t2.xp - ; l1= f t1.l1 t2.l1 - ; yp= f t1.yp t2.yp - ; xs= f t1.xs t2.xs - ; ys= f t1.ys t2.ys } + { xt = f t1.xt t2.xt + ; b = f t1.b t2.b + ; yt = f t1.yt t2.yt + ; xp = f t1.xp t2.xp + ; l1 = f t1.l1 t2.l1 + ; yp = f t1.yp t2.yp + ; xs = f t1.xs t2.xs + ; ys = f t1.ys t2.ys + } diff --git a/src/nonconsensus/snark_params/snark_params_nonconsensus.ml b/src/nonconsensus/snark_params/snark_params_nonconsensus.ml index 8ba463a96c9..d81ff573dda 100644 --- a/src/nonconsensus/snark_params/snark_params_nonconsensus.ml +++ b/src/nonconsensus/snark_params/snark_params_nonconsensus.ml @@ -1,10 +1,8 @@ (* snark_params_nonconsensus.ml *) -[%%import -"/src/config.mlh"] +[%%import "/src/config.mlh"] -[%%ifdef -consensus_mechanism] +[%%ifdef consensus_mechanism] [%%error "Snark_params_nonconsensus should not be compiled if there's a consensus \ @@ -14,23 +12,19 @@ consensus_mechanism] open Snarkette -[%%if -curve_size = 255] +[%%if curve_size = 255] (* only size we should be building nonconsensus code for *) [%%else] -[%%show -curve_size] +[%%show curve_size] -[%%error -"invalid value for \"curve_size\""] +[%%error "invalid value for \"curve_size\""] [%%endif] -[%%inject -"ledger_depth", ledger_depth] +[%%inject "ledger_depth", ledger_depth] module Field = struct open Core_kernel @@ -57,8 +51,7 @@ module Field = struct let unpack t = to_bits t let project bits = - Core_kernel.Option.value_exn ~message:"project: invalid bits" - (of_bits bits) + Core_kernel.Option.value_exn ~message:"project: invalid bits" (of_bits bits) end module Tock = struct @@ -88,13 +81,12 @@ module Inner_curve = struct let y2 = (x * square x) + (Coefficients.a * x) + Coefficients.b in if is_square y2 then Some (sqrt y2) else None - [%%define_locally - C.(of_affine, to_affine, to_affine_exn, one, ( + ), negate)] + [%%define_locally C.(of_affine, to_affine, to_affine_exn, one, ( + ), negate)] module Scalar = struct (* though we have bin_io, not versioned here; this type exists for Private_key.t, where it is versioned-asserted and its serialization tested - *) + *) type t = Pasta.Fq.t [@@deriving bin_io_unversioned, sexp] type _unused = unit constraint t = Tock.Field.t