diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index f91113725..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: 2 - -jobs: - build: - docker: - - image: holochain/holonix:latest - steps: - - checkout - - run: - name: pnpm setup - command: nix-shell --run 'npm i -g pnpm' - no_output_timeout: 30s - - run: - name: sim2h server - command: nix-shell --run 'npm run dht:sim2h' - background: true - no_output_timeout: 20m - - run: nix-shell --run 'pnpm install && npm run build && npm run test:integration:test' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 065cd45fe..1fbce59fe 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,7 +3,7 @@ name: Release on: push: tags: - - ^[0-9]+\.[0-9]+\.[0-9]+.* + - '[0-9]+.[0-9]+.[0-9]+**' # on: # push: @@ -62,26 +62,23 @@ jobs: - name: Prepare Nix environment run: nix-shell --command "echo Completed" - name: Install PNPM and dependencies - run: nix-shell --command "npm i pnpm && npx pnpm install --no-frozen-lockfile" - - name: Set up release bundle from template + run: nix-shell --command "pnpm install --no-frozen-lockfile" + - name: Build WASM, dnas, happs run: | - cp -a bundles_templates/* bundles/ RELEASE="${GITHUB_REF#refs/tags/}" - sed -i "s//https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}/g" bundles/full_suite_release_template/happ.yaml - cat bundles/full_suite_release_template/happ.yaml - - name: Build WASM, dnas, happs - run: nix-shell --run 'npm run build:crates' + RELEASE_DOWNLOAD_URL="https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}" + nix-shell --run 'pnpm run build:holochain:release' - name: Build explorer UI and webhapp package - run: nix-shell --run 'npm run build:webhapp' + run: nix-shell --run 'pnpm run build:webhapp' - name: upload bundles env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - gh release upload "${GITHUB_REF#refs/tags/}" "webhapp/hrea.webhapp" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "bundles/full_suite_release_template/hrea_suite.happ" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/agent/hrea_agent.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/agreement/hrea_agreement.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/observation/hrea_observation.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/planning/hrea_planning.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/proposal/hrea_proposal.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/specification/hrea_specification.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/web-app/hrea.webhapp" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/app/full_suite/hrea_suite.happ" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/agent/hrea_agent.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/agreement/hrea_agreement.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/observation/hrea_observation.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/planning/hrea_planning.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/proposal/hrea_proposal.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/specification/hrea_specification.dna" --clobber diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..c42cc0698 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,74 @@ +name: Checks + +on: [push, pull_request] + +jobs: + checks: + timeout-minutes: 55 + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, macos-11] + fail-fast: false + + steps: + - name: Fetch source code + uses: actions/checkout@v2 + + - uses: actions/cache@v2 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + .cargo/bin/ + .cargo/registry/index/ + .cargo/registry/cache/ + .cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Setup Xcode version + if: ${{ runner.os == 'macOS' }} + uses: maxim-lobanov/setup-xcode@v1.2.3 + with: + xcode-version: latest-stable + + - name: Check macOS version + if: ${{ runner.os == 'macOS' }} + run: sw_vers + + - name: Set up nix + uses: cachix/install-nix-action@v16 + with: + nix_path: nixpkgs=channel:nixos-21.05 + extra_nix_config: | + substituters = https://cache.nixos.org https://cache.holo.host https://ci-builds.cachix.org https://holochain-ci.cachix.org + trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= cache.holo.host-1:lNXIXtJgS9Iuw4Cu6X0HINLu9sTfcjEntnrgwMQIMcE= cache.holo.host-2:ZJCkX3AUYZ8soxTLfTb60g+F3MkWD7hkH9y8CgqwhDQ= ci-builds.cachix.org-1:fxB0+h/MMlCpXf6hFsQM31YpHbaQoRmcNPNHwDUkXA4= holochain-ci.cachix.org-1:5IUSkZc0aoRS53rfkvH9Kid40NpyjwCMCzwRTXy+QN8= + + - uses: cachix/cachix-action@v10 + with: + name: ci-builds + authToken: "${{ secrets.CACHIX_TOKEN }}" + + - name: Inspect nix.conf + run: cat ~/.config/nix/nix.conf + + - name: Install recent bash, and set as NIX_BUILD_SHELL # needed by macos, which has an older bash incompatible with nix + if: ${{ runner.os == 'macOS' }} + run: echo "NIX_BUILD_SHELL=$(nix-build -A bashInteractive '')/bin/bash" >> $GITHUB_ENV + + - name: Prepare Nix environment + run: nix-shell --command "echo Completed" + + - name: Install pnpm dependencies + run: nix-shell --pure --run 'pnpm install' + + - name: Build WASM and typescript + run: nix-shell --pure --run 'npm run build' + + - name: Run integration tests + run: nix-shell --pure --run 'npm run test:integration' diff --git a/.gitignore b/.gitignore index e85d26ea2..7b9a2bb08 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,7 @@ .hc .hc_live* .DS_Store -Cargo.lock +#Cargo.lock bundle.json .cargo/ @@ -10,13 +10,9 @@ node_modules/ # Rust build files /target/ # Holochain build files -/happs/**/*.dna -/bundles/**/.hc -/bundles/**/*.happ -/webhapp/**/*.webhapp - -# Manifest Files Under Automation -/bundles/*_template/* +/bundles/dna/* +/bundles/app/* +/webhapp/web-app/*.webhapp # https://github.com/maxlath/backup-github-repo /repo-backup diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 4d96eaae1..000000000 --- a/.travis.yml +++ /dev/null @@ -1,26 +0,0 @@ -language: nix -before_script: nix-shell --run 'npm i -g pnpm' && nix-shell --run 'npm run dht:sim2h &' -script: nix-shell --run 'pnpm install --network-concurrency 1 && npm run build && npm run test:integration:test' -git: - quiet: true - depth: false -# disable default submodule behaviour - submodules: false -# replace submodule URIs with HTTPS (public) ones, then clone -before_install: - - sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules - - git submodule update --init - - pushd thirdparty/happ-agent-registration - - sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules - - git submodule update --init - - popd -branches: - only: - - master - # :NOTE: pull request builds are on, so this takes care of feature branches - - /^(release|hotfix)\/.*/ -# cache: -# directories: -# - "/tmp/holochain/target" -# yarn: true -# cargo: true diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 000000000..d0c2abdf4 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1994 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "bit-set" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +dependencies = [ + "libc", + "num-integer", + "num-traits", + "serde", + "time", + "winapi", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "convert_case" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56e810098d091a972466b55679bde45c8a5c1b034b8dad3e4ac5486296bd097" +dependencies = [ + "clap", + "strum", + "strum_macros", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "darling" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0d720b8683f8dd83c65155f0530560cba68cd2bf395f6513a483caee57ff7f4" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a340f241d2ceed1deb47ae36c4144b2707ec7dd0b649f894cb39bb595986324" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c41b3b7352feb3211a0d743dc5700a4e3b60f51bd2b368892d1e0f9a95f44b" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version", + "syn", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "gcollections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f551fdf23ef80329f754919669147a71c67b6cfe3569cd93b6fabdd62044377" +dependencies = [ + "bit-set", + "num-integer", + "num-traits", + "trilean", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" + +[[package]] +name = "hc_zome_agent_registration" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" +dependencies = [ + "hc_zome_agent_registration_lib", + "hc_zome_agent_registration_rpc", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_agent_registration_hrea" +version = "0.1.0" +dependencies = [ + "hc_zome_agent_registration", +] + +[[package]] +name = "hc_zome_agent_registration_lib" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" +dependencies = [ + "hc_zome_agent_registration_storage", + "hdk", +] + +[[package]] +name = "hc_zome_agent_registration_rpc" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" +dependencies = [ + "holo_hash", + "serde", +] + +[[package]] +name = "hc_zome_agent_registration_storage" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" +dependencies = [ + "hc_zome_agent_registration_storage_consts", + "hdk", +] + +[[package]] +name = "hc_zome_agent_registration_storage_consts" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" + +[[package]] +name = "hc_zome_dna_auth_resolver" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "hc_zome_dna_auth_resolver_rpc", + "hc_zome_dna_auth_resolver_storage", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_hrea" +version = "0.1.0" +dependencies = [ + "hc_zome_dna_auth_resolver", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_lib" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "hc_zome_dna_auth_resolver_rpc", + "hc_zome_dna_auth_resolver_storage", + "hdk", + "holo_hash", + "serde", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_rpc" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "holo_hash", + "holochain_serialized_bytes", + "holochain_zome_types", + "serde", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_storage" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "hdk", + "holo_hash", + "serde", +] + +[[package]] +name = "hc_zome_rea_actions" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", + "vf_actions", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_agreement" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_lib", + "hc_zome_rea_agreement_rpc", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_agreement_index_agreement" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_agreement_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_rpc", + "hc_zome_rea_agreement_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_agreement_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_agreement_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_rpc", + "hc_zome_rea_agreement_storage_consts", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_agreement_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_commitment" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_lib", + "hc_zome_rea_commitment_rpc", + "hc_zome_rea_commitment_storage", + "hc_zome_rea_commitment_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_commitment_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "paste", + "serde", +] + +[[package]] +name = "hc_zome_rea_commitment_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hc_zome_rea_commitment_storage", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_commitment_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_commitment_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_commitment_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_economic_event" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_lib", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_event_zome_api", + "hc_zome_rea_process_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_economic_event_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_storage_consts", + "hc_zome_rea_commitment_storage_consts", + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_economic_resource_storage_consts", + "hc_zome_rea_fulfillment_storage_consts", + "hc_zome_rea_process_storage_consts", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_event_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_economic_event_zome_api", + "hc_zome_rea_economic_resource_lib", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hc_zome_rea_economic_resource_storage_consts", + "hdk", + "hdk_records", + "hdk_relay_pagination", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_economic_event_rpc" +version = "0.1.0" +dependencies = [ + "hdk_relay_pagination", + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_economic_event_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_economic_event_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_economic_event_zome_api" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_resource_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_resource_lib", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hc_zome_rea_economic_resource_zome_api", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_economic_resource_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_resource_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hc_zome_rea_economic_resource_storage_consts", + "hc_zome_rea_economic_resource_zome_api", + "hc_zome_rea_process_storage", + "hc_zome_rea_process_storage_consts", + "hc_zome_rea_resource_specification_storage_consts", + "hdk", + "hdk_records", + "hdk_relay_pagination", + "hdk_semantic_indexes_client_lib", + "paste", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource_rpc" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage_consts", + "hc_zome_rea_resource_specification_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_economic_resource_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_economic_resource_zome_api" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_fulfillment_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage", + "hdk", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_fulfillment_lib_destination" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_fulfillment_lib_origin" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage", + "hc_zome_rea_fulfillment_storage_consts", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_fulfillment_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib_destination", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib_origin", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_fulfillment_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_fulfillment_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_intent" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_lib", + "hc_zome_rea_intent_rpc", + "hc_zome_rea_intent_storage", + "hc_zome_rea_intent_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_intent_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_intent_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_rpc", + "hc_zome_rea_intent_storage", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_intent_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_intent_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_intent_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_process" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_storage_consts", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_intent_storage_consts", + "hc_zome_rea_process_lib", + "hc_zome_rea_process_rpc", + "hc_zome_rea_process_storage_consts", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_storage_consts", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_intent_storage_consts", + "hc_zome_rea_process_lib", + "hc_zome_rea_process_rpc", + "hc_zome_rea_process_storage_consts", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_rpc", + "hc_zome_rea_process_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_process_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_specification_lib", + "hc_zome_rea_process_specification_rpc", + "hc_zome_rea_process_specification_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_process_specification_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_specification_rpc", + "hc_zome_rea_process_specification_storage", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_specification_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_process_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_lib", + "hc_zome_rea_proposal_rpc", + "hc_zome_rea_proposal_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposal_index_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposal_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_rpc", + "hc_zome_rea_proposal_storage", + "hc_zome_rea_proposal_storage_consts", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_proposal_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposal_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposal_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_proposed_intent" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_lib", + "hc_zome_rea_proposed_intent_rpc", + "hc_zome_rea_proposed_intent_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_index_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_rpc", + "hc_zome_rea_proposed_intent_storage", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_proposed_to" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_lib", + "hc_zome_rea_proposed_to_rpc", + "hc_zome_rea_proposed_to_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_to_index_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_to_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_rpc", + "hc_zome_rea_proposed_to_storage", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_proposed_to_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_to_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_to_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_resource_specification" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_lib", + "hc_zome_rea_resource_specification_rpc", + "hc_zome_rea_resource_specification_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_resource_specification_index_specification" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_rpc", + "hc_zome_rea_resource_specification_storage", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_satisfaction_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage", + "hdk", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_satisfaction_lib_destination" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_lib", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_satisfaction_lib_origin" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hc_zome_rea_satisfaction_lib", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_satisfaction_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_lib_destination", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_lib_origin", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_satisfaction_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_satisfaction_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_unit" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_unit_lib", + "hc_zome_rea_unit_rpc", + "hdk", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_unit_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_unit_rpc", + "hc_zome_rea_unit_storage", + "hc_zome_rea_unit_storage_consts", + "hdk", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_unit_rpc" +version = "0.1.0" +dependencies = [ + "hdk_records", + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_unit_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_unit_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_unit_storage_consts" +version = "0.1.0" + +[[package]] +name = "hdk" +version = "0.0.124" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3ac26c39ff5c824046f6c3192dfcf91eb19111569ae67547f4c86e810bba2cd" +dependencies = [ + "hdk_derive", + "holo_hash", + "holochain_wasmer_guest", + "holochain_zome_types", + "paste", + "serde", + "serde_bytes", + "thiserror", + "tracing", + "tracing-core", +] + +[[package]] +name = "hdk_derive" +version = "0.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff276a067b2a6cc6c8e11f40b72151ffdab235f4e9746ab0d91f99a56bbbd50d" +dependencies = [ + "holochain_zome_types", + "paste", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "hdk_records" +version = "0.1.0" +dependencies = [ + "hc_zome_dna_auth_resolver_lib", + "hdk", + "hdk_rpc_errors", + "hdk_semantic_indexes_zome_rpc", + "hdk_uuid_types", + "holo_hash", + "serde", + "serde_maybe_undefined", + "thiserror", +] + +[[package]] +name = "hdk_relay_pagination" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", +] + +[[package]] +name = "hdk_rpc_errors" +version = "0.1.0" +dependencies = [ + "hdk", + "holo_hash", + "serde", + "thiserror", +] + +[[package]] +name = "hdk_semantic_indexes_client_lib" +version = "0.1.0" +dependencies = [ + "hdk", + "hdk_records", + "hdk_semantic_indexes_zome_rpc", + "paste", + "serde", +] + +[[package]] +name = "hdk_semantic_indexes_zome_derive" +version = "0.1.0" +dependencies = [ + "convert_case 0.1.0", + "darling", + "quote", + "syn", +] + +[[package]] +name = "hdk_semantic_indexes_zome_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_dna_auth_resolver_lib", + "hdk", + "hdk_records", + "hdk_relay_pagination", + "hdk_rpc_errors", + "hdk_semantic_indexes_zome_rpc", + "serde", + "serde_maybe_undefined", +] + +[[package]] +name = "hdk_semantic_indexes_zome_rpc" +version = "0.1.0" +dependencies = [ + "hdk_rpc_errors", + "hdk_uuid_types", + "holochain_serialized_bytes", + "serde", +] + +[[package]] +name = "hdk_uuid_types" +version = "0.1.0" +dependencies = [ + "hdk", + "holo_hash", + "serde", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "holo_hash" +version = "0.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d0ccc68b3c7b63b82fc41ec03d4e921ab144102c6af763f8ce729709ca54ad2" +dependencies = [ + "holochain_serialized_bytes", + "kitsune_p2p_dht_arc", + "serde", + "serde_bytes", + "thiserror", +] + +[[package]] +name = "holochain_serialized_bytes" +version = "0.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9805b3e01e7b5c144782a0823db4dc895fec18a9ccd45a492ce7c7bf157a9e38" +dependencies = [ + "holochain_serialized_bytes_derive", + "rmp-serde", + "serde", + "serde-transcode", + "serde_bytes", + "serde_json", + "thiserror", +] + +[[package]] +name = "holochain_serialized_bytes_derive" +version = "0.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1077232d0c427d64feb9e138fa22800e447eafb1810682d6c13beb95333cb32c" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "holochain_wasmer_common" +version = "0.0.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6de9bda7e1b991ce453ef55601405e43d7ef0cafb0108ed0b4755a1398dae05" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_bytes", + "thiserror", +] + +[[package]] +name = "holochain_wasmer_guest" +version = "0.0.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "becdd2a6c662ac81a1c1aeae04eb39a8c6d987d79415fc9f6fff609bb106a90e" +dependencies = [ + "holochain_serialized_bytes", + "holochain_wasmer_common", + "parking_lot", + "serde", + "tracing", +] + +[[package]] +name = "holochain_zome_types" +version = "0.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3941d7e413be0a0298dfd9b31c4d9a87b0d89aca3eadbd00b379f2d4f5478d40" +dependencies = [ + "chrono", + "holo_hash", + "holochain_serialized_bytes", + "holochain_wasmer_common", + "kitsune_p2p_timestamp", + "paste", + "serde", + "serde_bytes", + "subtle", + "thiserror", + "tracing", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "indexmap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "intervallum" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ccecd834666f695ecec3ff0d5fc32e32c91abea91a28fd0aceb4b35a82cee1" +dependencies = [ + "bit-set", + "gcollections", + "num-integer", + "num-traits", + "trilean", +] + +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + +[[package]] +name = "kitsune_p2p_dht_arc" +version = "0.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3203925b13eb83d95825025eb1f4a97f5a4e049e5d8e480783bc30b5a1b3cd2f" +dependencies = [ + "derive_more", + "gcollections", + "intervallum", + "num-traits", + "serde", +] + +[[package]] +name = "kitsune_p2p_timestamp" +version = "0.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1528a9d36c33444a8d70b3264e72df64351d41afd76ceebf0d2103ca81dad6c" +dependencies = [ + "chrono", + "derive_more", + "serde", + "thiserror", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.120" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad5c14e80759d0939d013e6ca49930e59fc53dd8e5009132f76240c179380c09" + +[[package]] +name = "lock_api" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "paste" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" + +[[package]] +name = "pin-project-lite" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" + +[[package]] +name = "proc-macro2" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c" +dependencies = [ + "bitflags", +] + +[[package]] +name = "rmp" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f55e5fa1446c4d5dd1f5daeed2a4fe193071771a2636274d0d7a3b082aa7ad6" +dependencies = [ + "byteorder", + "num-traits", +] + +[[package]] +name = "rmp-serde" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "723ecff9ad04f4ad92fe1c8ca6c20d2196d9286e9c60727c4cb5511629260e9d" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "ryu" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "semver" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a3381e03edd24287172047536f20cabde766e2cd3e65e6b00fb3af51c4f38d" + +[[package]] +name = "serde" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-transcode" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "590c0e25c2a5bb6e85bf5c1bce768ceb86b316e7a01bdf07d2cb4ec2271990e2" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_bytes" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_maybe_undefined" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", +] + +[[package]] +name = "smallvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strum" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bd81eb48f4c437cadc685403cad539345bf703d78e63707418431cecd4522b" + +[[package]] +name = "strum_macros" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87c85aa3f8ea653bfd3ddf25f7ee357ee4d204731f6aa9ad04002306f6e2774c" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "syn" +version = "1.0.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd69e719f31e88618baa1eaa6ee2de5c9a1c004f1e9ecdb58e8352a13f20a01" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" +dependencies = [ + "libc", + "wasi", + "winapi", +] + +[[package]] +name = "tracing" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a1bdf54a7c28a2bbf701e1d2233f6c77f473486b94bee4f9678da5a148dca7f" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e65ce065b4b5c53e73bb28912318cb8c9e9ad3921f1d669eb0e68b4c8143a2b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa31669fa42c09c34d94d8165dd2012e8ff3c66aca50f3bb226b68f216f2706c" +dependencies = [ + "lazy_static", + "valuable", +] + +[[package]] +name = "trilean" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683ba5022fe6dbd7133cad150478ccf51bdb6d861515181e5fc6b4323d4fa424" + +[[package]] +name = "unicode-segmentation" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" + +[[package]] +name = "unicode-width" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "vf_actions" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "vf_attributes_hdk" +version = "0.1.0" +dependencies = [ + "chrono", + "hdk_semantic_indexes_zome_rpc", + "hdk_uuid_types", + "holo_hash", + "holochain_serialized_bytes", + "holochain_zome_types", + "serde", +] + +[[package]] +name = "vf_measurement" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/Cargo.toml b/Cargo.toml index e335a5396..74c3ad1c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ members= [ "lib/hdk_semantic_indexes/rpc", "lib/hdk_semantic_indexes/zome", "lib/hdk_semantic_indexes/zome_derive", - "lib/hdk_type_serialization_macros", + "lib/hdk_uuid_types", "lib/vf_actions", "lib/serde_maybe_undefined", "lib/vf_measurement", diff --git a/README.md b/README.md index 74882345f..8381ff8de 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ These are fully distributed, agent-centric applications. There are no transactio - [Storage constants \(database internals\)](#storage-constants-database-internals) - [Library modules](#library-modules) - [`hdk_records`](#hdk_records) - - [`hdk_type_serialization_macros`](#hdk_type_serialization_macros) + - [`hdk_uuid_types`](#hdk_uuid_types) - [`serde_maybe_undefined`](#serde_maybe_undefined) - [Other names](#other-names) - [License](#license) @@ -134,11 +134,15 @@ For more information on usage and options, see the [GraphQL Client](modules/grap There are a few sets of `*.yaml` configuration files used by Holochain in its build processes. -[**`bundles/`**](bundles/) contains configuration files which group Holochain 'DNA' modules into 'hApp bundles'. A *hApp bundle* contains all backend components accessible by a single UI application. +[**`bundles/`**](bundles/) contains configuration files for: -These bundles are used by the project scripts to run the application locally from this repository. There are also [**`bundles_templates/`**](bundles_templates/) which are used by the Github releases process to build pre-packaged binaries for end-user installation into the [Holochain Launcher](https://github.com/holochain/launcher); in combination with the [**`webhapp/`**](webhapp/) configuration which also packages & associates a user interface. If you aren't developing hREA yourself this is a much easier way to setup the app— simply download the `*.webhapp` file from the [releases page](https://github.com/holo-rea/holo-rea/releases) and open it with the Holochain Launcher. +- `dna_templates`s which group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" (compiled WASM files) into Holochain DNAs. +- `app_templates`s which group Holochain 'DNA' modules into 'hApp bundles'. A *hApp bundle* contains all backend components accessible by a single UI application; and +- `web-app`s which bind a 'hApp bundle' with a (zipped) JavaScript single-page web application that talks to the Holochain backend. -[**`happs/`**](happs/) configuration files group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" into Holochain DNAs. +These bundles are used by the project scripts to run the application locally from this repository, and to build for release. The `*_templates` are first copied to non-`_template` locations and some substitutions made- see `scripts/package-dnas.sh`. In development mode, zome WASMs are referenced; in release mode everything is bundled together into a much larger file which is too big for the Holochain development Sandbox or Tryorama test runner. + +If you aren't developing hREA yourself the bundled release is a much easier way to setup the app— simply download the `*.webhapp` file from the [releases page](https://github.com/holo-rea/holo-rea/releases) and open it with the [Holochain Launcher](https://github.com/holochain/launcher). DNAs are the highest-level units of functionality available in the system. One is available for each of the [modules in the hREA framework](https://github.com/holo-rea/ecosystem/wiki/Modules-in-the-HoloREA-framework). @@ -215,7 +219,7 @@ The Rust crates in [**`lib/`**](lib/) provide some abstract functionality and ty Manages CRUD and indexing operations for entries, including DNA-local and remote-DNA indexing capabilities. Leverages [DNA Auth Resolver](https://github.com/holochain-open-dev/dna-auth-resolver/) to grant capabilities for cross-DNA API calls. -#### `hdk_type_serialization_macros` +#### `hdk_uuid_types` Exports an `addressable_identifier!()` macro which wraps a primitive type implementing `Into` in a struct scoping it to a `DnaHash`; as well as `dna_scoped_string!()` which does the same for `String`. diff --git a/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t b/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t index e17beec55..af5619797 100644 --- a/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t +++ b/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_<%= h.changeCase.snake(zome_name) %>_storage = { path = "../storage" } hc_zome_<%= h.changeCase.snake(zome_name) %>_storage_consts = { path = "../storage_consts" } diff --git a/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t b/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t index e94d95493..e701e087d 100644 --- a/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t +++ b/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t @@ -9,7 +9,7 @@ edition = "2018" [dependencies] # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_records = { path = "../../../lib/hdk_records" } hc_zome_<%= h.changeCase.snake(zome_name) %>_storage_consts = { path = "../storage_consts" } diff --git a/_templates/init-zome/new-index-zome/index.js b/_templates/init-zome/new-index-zome/index.js index 356e37ffd..1354ed580 100644 --- a/_templates/init-zome/new-index-zome/index.js +++ b/_templates/init-zome/new-index-zome/index.js @@ -5,7 +5,7 @@ module.exports = [ { type: 'input', name: 'dna_path', - message: 'Project-relative directory name of the destination DNA? (eg. `happs/observation`)', + message: 'Project-relative directory name of the destination DNA? (eg. `bundles/dna/observation`)', required: true, }, { type: 'input', diff --git a/_templates/init-zome/new-index-zome/zome_Cargo.toml.t b/_templates/init-zome/new-index-zome/zome_Cargo.toml.t index a2f11b184..ed173043f 100644 --- a/_templates/init-zome/new-index-zome/zome_Cargo.toml.t +++ b/_templates/init-zome/new-index-zome/zome_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_records = { path = "../../../../../lib/hdk_records" } vf_attributes_hdk = { path = "../../../../../lib/vf_attributes_hdk" } diff --git a/_templates/init-zome/new-zome/index.js b/_templates/init-zome/new-zome/index.js index 6b040f920..a7111c72c 100644 --- a/_templates/init-zome/new-zome/index.js +++ b/_templates/init-zome/new-zome/index.js @@ -5,7 +5,7 @@ module.exports = [ { type: 'input', name: 'dna_path', - message: 'Project-relative directory name of the destination DNA? (eg. `happs/observation`)', + message: 'Project-relative directory name of the destination DNA? (eg. `bundles/dna/observation`)', required: true, }, { type: 'input', diff --git a/_templates/init-zome/new-zome/zome_Cargo.toml.t b/_templates/init-zome/new-zome/zome_Cargo.toml.t index ddad4de57..7c85ee105 100644 --- a/_templates/init-zome/new-zome/zome_Cargo.toml.t +++ b/_templates/init-zome/new-zome/zome_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_<%= h.changeCase.snake(zome_name) %>_defs = { path = "../../../../../lib/<%= h.changeCase.snake(zome_name) %>/defs" } hc_zome_<%= h.changeCase.snake(zome_name) %>_rpc = { path = "../../../../../lib/<%= h.changeCase.snake(zome_name) %>/rpc" } diff --git a/apps/holorea-graphql-explorer/package.json b/apps/holorea-graphql-explorer/package.json index 6c6784894..d9df7e435 100644 --- a/apps/holorea-graphql-explorer/package.json +++ b/apps/holorea-graphql-explorer/package.json @@ -24,7 +24,7 @@ "rimraf": "^3.0.2" }, "scripts": { - "start": "REACT_APP_HC_APP_ID='hrea_obs_agent' REACT_APP_HC_CONN_URL='ws://localhost:4000' BROWSER=none react-scripts start", + "start": "REACT_APP_HC_APP_ID='hrea_suite' REACT_APP_HC_CONN_URL='ws://localhost:4000' BROWSER=none react-scripts start", "build": "rimraf ui.zip && rimraf build && react-scripts build && cd ./build && bestzip ../ui.zip * ", "test": "react-scripts test", "eject": "react-scripts eject" diff --git a/apps/holorea-graphql-explorer/src/App.tsx b/apps/holorea-graphql-explorer/src/App.tsx index 848968ff7..7d026baff 100644 --- a/apps/holorea-graphql-explorer/src/App.tsx +++ b/apps/holorea-graphql-explorer/src/App.tsx @@ -6,7 +6,7 @@ import GraphiQL, { Fetcher } from 'graphiql' // @ts-ignore import GraphiQLExplorer from 'graphiql-explorer' -import bindSchema, { openConnection, DNAMappings, CellId } from '@valueflows/vf-graphql-holochain' +import bindSchema, { autoConnect } from '@valueflows/vf-graphql-holochain' import 'graphiql/graphiql.css' import './App.css' @@ -28,11 +28,6 @@ interface State { explorerIsOpen: boolean, } -type ActualInstalledCell = { // :TODO: remove this when fixed in tryorama - cell_id: CellId; - role_id: string; -} - class App extends Component { _graphiql?: GraphiQL state = { @@ -49,38 +44,11 @@ class App extends Component { } async connect () { - let dnaMappings: DNAMappings - - // this is allowed to be undefined, but if it - // is, it will fall back to assuming a - // Holochain Launcher environment - let connectionUrl = process.env.REACT_APP_HC_CONN_URL as string - - const { connectionPromise, socketURI } = openConnection(connectionUrl); - const conn = await connectionPromise - const appInfo = await conn.appInfo({ installed_app_id: (process.env.REACT_APP_HC_APP_ID as string) }) - if (!appInfo) { - throw new Error(`appInfo call failed for Holochain app '${process.env.REACT_APP_HC_APP_ID}' - ensure the name is correct and that the agent's app installation has not failed`) - } - - dnaMappings = (appInfo['cell_data'] as unknown[] as ActualInstalledCell[]).reduce((mappings, { cell_id, role_id }) => { - const hrea_cell_match = role_id.match(/hrea_(\w+)_\d+/) - if (!hrea_cell_match) { return mappings } - - mappings[hrea_cell_match[1] as keyof DNAMappings] = cell_id - return mappings - }, {} as DNAMappings) - console.log('Connecting to detected Holochain cells:', dnaMappings) - - const schema = await bindSchema({ - dnaConfig: dnaMappings, - conductorUri: socketURI - }) - // @ts-ignore not sure why this is error/red (Connor) + let { dnaConfig, conductorUri } = await autoConnect() + const schema = await bindSchema({ dnaConfig, conductorUri }) const link = new SchemaLink({ schema }) this.setState({ - // @ts-ignore not sure why this is error/red (Connor) schema, link, fetcher: ((operation: any) => { diff --git a/bundles_templates/full_suite_release_template/happ.yaml b/bundles/app_templates/full_suite/happ.yaml similarity index 68% rename from bundles_templates/full_suite_release_template/happ.yaml rename to bundles/app_templates/full_suite/happ.yaml index d05f52ffe..5e4dcc279 100644 --- a/bundles_templates/full_suite_release_template/happ.yaml +++ b/bundles/app_templates/full_suite/happ.yaml @@ -7,40 +7,40 @@ roles: strategy: create deferred: false dna: - url: "/hrea_specification.dna" + bundled: "/specification/hrea_specification.dna" clone_limit: 0 - id: hrea_observation_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_observation.dna" + bundled: "/observation/hrea_observation.dna" clone_limit: 0 - id: hrea_planning_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_planning.dna" + bundled: "/planning/hrea_planning.dna" clone_limit: 0 - id: hrea_agreement_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_agreement.dna" + bundled: "/agreement/hrea_agreement.dna" clone_limit: 0 - id: hrea_proposal_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_proposal.dna" + bundled: "/proposal/hrea_proposal.dna" clone_limit: 0 - id: hrea_agent_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_agent.dna" + bundled: "/agent/hrea_agent.dna" clone_limit: 0 diff --git a/bundles/dna_templates/agent/dna.yaml b/bundles/dna_templates/agent/dna.yaml new file mode 100644 index 000000000..58d9b5007 --- /dev/null +++ b/bundles/dna_templates/agent/dna.yaml @@ -0,0 +1,7 @@ +manifest_version: "1" +name: "hrea_agent" +uuid: "" +properties: null +zomes: + - name: agent_registration + path: "/target/wasm32-unknown-unknown/release/hc_zome_agent_registration_hrea.wasm" diff --git a/bundles/dna_templates/agreement/dna.yaml b/bundles/dna_templates/agreement/dna.yaml new file mode 100644 index 000000000..1d1ba421c --- /dev/null +++ b/bundles/dna_templates/agreement/dna.yaml @@ -0,0 +1,25 @@ +manifest_version: "1" +name: "hrea_agreement" +uuid: "" +properties: + agreement: + index_zome: agreement_index + agreement_index: + record_storage_zome: agreement + remote_auth: + permissions: + - extern_id: index_agreement_economic_events + allowed_method: [agreement_index, index_agreement_economic_events] + - extern_id: index_agreement_commitments + allowed_method: [agreement_index, index_agreement_commitments] +zomes: + + # application zomes + - name: agreement + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_agreement.wasm" + - name: agreement_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_agreement_index_agreement.wasm" + + # utility zomes + - name: remote_auth + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/dna_templates/observation/dna.yaml b/bundles/dna_templates/observation/dna.yaml new file mode 100644 index 000000000..f7a2ecf12 --- /dev/null +++ b/bundles/dna_templates/observation/dna.yaml @@ -0,0 +1,80 @@ +manifest_version: "1" +name: "hrea_observation" +uuid: "" +properties: + process: + index_zome: process_index + process_index: + record_storage_zome: process + economic_event: + index_zome: economic_event_index + process_index_zome: process_index + economic_resource_index_zome: economic_resource_index + economic_resource_zome: economic_resource + economic_event_index: + record_storage_zome: economic_event + economic_resource: + index_zome: economic_resource_index + economic_resource_index: + record_storage_zome: economic_resource + fulfillment: + index_zome: fulfillment_index + economic_event_index_zome: economic_event_index + fulfillment_index: + record_storage_zome: fulfillment + satisfaction: + index_zome: satisfaction_index + economic_event_index_zome: economic_event_index + satisfaction_index: + record_storage_zome: satisfaction + remote_auth: + permissions: + - extern_id: index_process_committed_inputs + allowed_method: [process_index, index_process_committed_inputs] + - extern_id: index_process_committed_outputs + allowed_method: [process_index, index_process_committed_outputs] + - extern_id: index_process_intended_inputs + allowed_method: [process_index, index_process_intended_inputs] + - extern_id: index_process_intended_outputs + allowed_method: [process_index, index_process_intended_outputs] + + - extern_id: create_fulfillment + allowed_method: [fulfillment, fulfillment_created] + - extern_id: update_fulfillment + allowed_method: [fulfillment, fulfillment_updated] + - extern_id: delete_fulfillment + allowed_method: [fulfillment, fulfillment_deleted] + + - extern_id: create_satisfaction + allowed_method: [satisfaction, satisfaction_created] + - extern_id: update_satisfaction + allowed_method: [satisfaction, satisfaction_updated] + - extern_id: delete_satisfaction + allowed_method: [satisfaction, satisfaction_deleted] +zomes: + # application zomes + - name: economic_event + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event.wasm" + - name: economic_resource + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource.wasm" + - name: process + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_process.wasm" + - name: fulfillment + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_observation.wasm" + - name: satisfaction + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_observation.wasm" + + - name: economic_event_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event_index_observation.wasm" + - name: economic_resource_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource_index_observation.wasm" + - name: process_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_process_index_observation.wasm" + - name: fulfillment_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_observation.wasm" + - name: satisfaction_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_observation.wasm" + + # utility zomes + - name: remote_auth + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/dna_templates/planning/dna.yaml b/bundles/dna_templates/planning/dna.yaml new file mode 100644 index 000000000..0201e49c1 --- /dev/null +++ b/bundles/dna_templates/planning/dna.yaml @@ -0,0 +1,52 @@ +manifest_version: "1" +name: "hrea_planning" +uuid: "" +properties: + commitment: + index_zome: commitment_index + commitment_index: + record_storage_zome: commitment + intent: + index_zome: intent_index + intent_index: + record_storage_zome: intent + fulfillment: + index_zome: fulfillment_index + commitment_index_zome: commitment_index + fulfillment_index: + record_storage_zome: fulfillment + satisfaction: + index_zome: satisfaction_index + intent_index_zome: intent_index + commitment_index_zome: commitment_index + commitment_zome: commitment + satisfaction_index: + record_storage_zome: satisfaction + remote_auth: + permissions: + - extern_id: index_intent_proposed_in + allowed_method: [intent_index, index_intent_proposed_in] +zomes: + + # application zomes + - name: commitment + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_commitment.wasm" + - name: intent + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_intent.wasm" + - name: fulfillment + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_planning.wasm" + - name: satisfaction + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_planning.wasm" + + - name: commitment_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_commitment_index_planning.wasm" + - name: intent_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_intent_index_planning.wasm" + - name: fulfillment_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_planning.wasm" + - name: satisfaction_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_planning.wasm" + + # utility zomes + - name: remote_auth + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/dna_templates/proposal/dna.yaml b/bundles/dna_templates/proposal/dna.yaml new file mode 100644 index 000000000..6f627e219 --- /dev/null +++ b/bundles/dna_templates/proposal/dna.yaml @@ -0,0 +1,31 @@ +manifest_version: "1" +name: "hrea_proposal" +uuid: "" +properties: + proposal: + index_zome: proposal_index + proposal_index: + record_storage_zome: proposal + proposed_intent: + index_zome: proposed_intent_index + proposal_index_zome: proposal_index + proposed_intent_index: + record_storage_zome: proposed_intent + proposed_to: + index_zome: proposed_to_index + proposal_index_zome: proposal_index + proposed_to_index: + record_storage_zome: proposed_to +zomes: + - name: proposal + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposal.wasm" + - name: proposal_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposal_index_proposal.wasm" + - name: proposed_intent + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent.wasm" + - name: proposed_intent_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent_index_proposal.wasm" + - name: proposed_to + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to.wasm" + - name: proposed_to_index + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to_index_proposal.wasm" diff --git a/happs/specification/dna.yaml b/bundles/dna_templates/specification/dna.yaml similarity index 52% rename from happs/specification/dna.yaml rename to bundles/dna_templates/specification/dna.yaml index 85967edbd..3f15dda3c 100644 --- a/happs/specification/dna.yaml +++ b/bundles/dna_templates/specification/dna.yaml @@ -13,16 +13,16 @@ properties: zomes: # application zomes - name: action - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_actions.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_actions.wasm" - name: process_specification - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_specification.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_process_specification.wasm" - name: resource_specification - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification.wasm" - name: resource_specification_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification_index_specification.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification_index_specification.wasm" - name: unit - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_unit.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_unit.wasm" # utility zomes - name: remote_auth - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/full_suite/happ.yaml b/bundles/full_suite/happ.yaml deleted file mode 100644 index 0fa45f978..000000000 --- a/bundles/full_suite/happ.yaml +++ /dev/null @@ -1,25 +0,0 @@ -manifest_version: "1" -name: hrea_suite -description: Complete configuration of all modules in the hREA suite. -roles: - - id: hrea_observation_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../happs/observation/hrea_observation.dna" - clone_limit: 0 - - id: hrea_planning_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../happs/planning/hrea_planning.dna" - clone_limit: 0 - - id: hrea_agreement_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../happs/agreement/hrea_agreement.dna" - clone_limit: 0 diff --git a/bundles/obs_and_agent/happ.yaml b/bundles/obs_and_agent/happ.yaml deleted file mode 100644 index 322695fe9..000000000 --- a/bundles/obs_and_agent/happ.yaml +++ /dev/null @@ -1,18 +0,0 @@ -manifest_version: "1" -name: hrea_obs_agent -description: Observation and agent modules (eg. anonymized public supply chain) -roles: - - id: hrea_observation_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../happs/observation/hrea_observation.dna" - clone_limit: 0 - - id: hrea_agent_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../happs/agent/hrea_agent.dna" - clone_limit: 0 diff --git a/bundles/web-app/web-happ.yaml b/bundles/web-app/web-happ.yaml new file mode 100644 index 000000000..b21a80def --- /dev/null +++ b/bundles/web-app/web-happ.yaml @@ -0,0 +1,7 @@ +--- +manifest_version: "1" +name: hrea +ui: + bundled: "../../apps/holorea-graphql-explorer/ui.zip" +happ_manifest: + bundled: "../app/full_suite/hrea_suite.happ" diff --git a/default.nix b/default.nix index 2b53242f2..3dcdbf638 100644 --- a/default.nix +++ b/default.nix @@ -1,9 +1,9 @@ let - holonixRev = "2f7b8047d6314f64fca34394a52d465c18b2f4d5"; + holonixRev = "52158409f9b76b442e592e8f06632b0e57a6c365"; holonixPath = builtins.fetchTarball "https://github.com/holochain/holonix/archive/${holonixRev}.tar.gz"; holonix = import (holonixPath) { - holochainVersionId = "v0_0_123"; + holochainVersionId = "v0_0_127"; }; nixpkgs = holonix.pkgs; in nixpkgs.mkShell { @@ -12,5 +12,6 @@ in nixpkgs.mkShell { # :TODO: binaryen, wasm-opt? # Additional packages go here nodejs-16_x + nodePackages.pnpm ]; } diff --git a/docs/README.md b/docs/README.md index 19b725f83..c15896155 100644 --- a/docs/README.md +++ b/docs/README.md @@ -3,7 +3,7 @@ - [Quick start](#quick-start) - - [Install Required Binaries](#install-required-binaries) + - [Install required binaries](#install-required-binaries) - [Setup the project](#setup-the-project) - [Running](#running) - [Contributing](#contributing) @@ -16,6 +16,8 @@ - [File headers](#file-headers) - [Known issues](#known-issues) - [Gotchas](#gotchas) +- [Publishing](#publishing) + - [Publishing Node packages](#publishing-node-packages) - [Multi-project setup](#multi-project-setup) @@ -73,7 +75,7 @@ Scripts in this repository respond to the following env vars: Execution parameters: - `HOLOCHAIN_APP_PORT` sets the websocket port for the app interface when running the conductor in a development sandbox. See the `dht:conductor` script in `package.json`. -- `HOLOCHAIN_DNA_UTIL_PATH` works similarly to `TRYORAMA_HOLOCHAIN_PATH`, but for the `hc` binary that ships with Holochain. It is called to finalise packaging the DNA bundles in `happs/` and to run the dev environment conductor. +- `HOLOCHAIN_DNA_UTIL_PATH` works similarly to `TRYORAMA_HOLOCHAIN_PATH`, but for the `hc` binary that ships with Holochain. It is called to finalise packaging the bundles in `bundles/` and to run the dev environment conductor. Build parameters: @@ -83,15 +85,15 @@ Test parameters: - `TRYORAMA_HOLOCHAIN_PATH` determines the path to the `holochain` binary which will ultimately execute all tests. If unset, `holochain` will be presumed to be on the user's `$PATH`. - `GRAPHQL_DEBUG=1` will enable debug output for the parameters transmitted and received by the GraphQL connection used in tests. -- `WASM_LOG=debug` `RUST_LOG=error` `RUST_BACKTRACE=1` are all set when executing the integration test suite. +- `WASM_LOG=debug` `RUST_LOG="debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error,"` `RUST_BACKTRACE=1` are all set when executing the integration test suite. ### Debugging Most of the time during development, you won't want to run the whole test suite but rather just those tests you're currently working on. The usual workflow when developing a module in isolation is: -1. `npm run build:crates` from the repository root to rebuild the module(s) you are working on. -2. `WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 npx tape test/**/*.js` from the `test` directory to run specific tests, substituting a path to an individual file. Note the [env vars](#environment-variables) used here are needed to obtain debug output from the zome code. +1. `npm run build:holochain:dev` from the repository root to rebuild the module(s) you are working on. +2. `WASM_LOG=debug RUST_LOG="debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error," RUST_BACKTRACE=1 npx tape test/**/*.js` from the `test` directory to run specific tests, substituting a path to an individual file. Note the [env vars](#environment-variables) used here are needed to obtain debug output from the zome code. Getting debug output printed to the screen depends on where you are logging from. @@ -107,6 +109,13 @@ Getting debug output printed to the screen depends on where you are logging from Debug output from the Holochain conductor can be noisy, which is why all test scripts coded in `package.json` pipe the test output to [faucet](https://github.com/substack/faucet). Remember that you can always add nonsense strings to your debug output and pipe things into `| grep 'XXXX'` instead of `| npx faucet` if you need to locate something specific and the text is overwhelming. +Another way to reduce noice from the Holochain conductor logs, if you want to go down to debug level logs, is to use a config +var like the following for RUST_LOG, which `holochain` will respect: +``` +RUST_LOG="debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error" +``` +You can [learn more here](https://rust-lang-nursery.github.io/rust-cookbook/development_tools/debugging/config_log.html). + ### Advanced execution If you look at the commands in `package.json` you will see that they are namespaced into groups of functionality. You can also see which commands depend on each other. Most of the time it will be more efficient to understand the command structure and run individual commands than it will be to boot the whole system together. @@ -182,6 +191,20 @@ You can configure your editor to automatically add new header comment blocks to - These errors are often encountered when confusing cross-DNA link fields for same-DNA links. Check that you are using the appropriate helpers for the link type (`_index` vs `_remote_index` helpers). +## Publishing + +### Publishing Node packages + +The JavaScript API client modules are published to NPM with PNPM. **You must use PNPM** to publish these, since packages contain PNPM-specific workspace metadata that NPM does not know how to deal with. + +- Ensure all packages requiring publication have their `version` field in `package.json` updated to reflect the next version to be published. +- Ensure a successful `pnpm run build` completes after the version updates are made. +- Run `pnpm -r publish --access public` from the root directory to publish all packages with new versions. + +TODO: instructions for publishing Rust crates + +TODO: instructions for publishing built DNA & zome artifacts to Holochain Devhub + ## Multi-project setup diff --git a/docs/Workflow-automation.md b/docs/Workflow-automation.md index b1bef15e3..74c0f5fe9 100644 --- a/docs/Workflow-automation.md +++ b/docs/Workflow-automation.md @@ -26,13 +26,13 @@ Whenever you find yourself doing something repetitive, consider adding a Hygen t ## Creating new DNAs -1. `cd happs/` +1. `cd bundles/dna/` 2. `hc init ` scaffolds a new DNA folder named `NEW_DNA_NAME`. 3. Edit `app.json` in the newly created folder as appropriate. 4. Remove these generated files from the newly created directory: - `test/` (integration tests are all contained in the top-level `test` directory) - `.gitignore` (already taken care of via project-global ignore file) -5. Wire up a new `build` sub-command in the toplevel `package.json`; eg. `"build:dna_obs": "cd happs/observation && hc package"`. Do not forget to add the new build step to the base NPM `build` script. +5. Wire up a new `build` sub-command in the toplevel `package.json`; eg. `"build:dna_obs": "cd bundles/dna/observation && hc package"`. Do not forget to add the new build step to the base NPM `build` script. 6. Edit `conductor-config.toml` as appropriate to include instance configuration & bridging for any new DHTs to be loaded from this DNA in the local test environment. diff --git a/docs/completions.md b/docs/completions.md new file mode 100644 index 000000000..5d90ee500 --- /dev/null +++ b/docs/completions.md @@ -0,0 +1,222 @@ +# holo-rea Graphql API Completions & Statuses + +[![hackmd-github-sync-badge](https://hackmd.io/CWcN1gbER9ioLVy8xGUj1Q/badge)](https://hackmd.io/CWcN1gbER9ioLVy8xGUj1Q) + +From the point-of-view of someone calling through Graphql, what is the overall status of each function that exists in the graphql schema. Many functions are not yet implemented so it is important to know that upfront, which are, and which aren't. + +A filtered list of related github issues for tracking these work statuses, so that you can contribute, or report or discuss issues, can be found here: https://github.com/holo-rea/holo-rea/labels/graphql-api + +## GraphQL Implementation + +### Mutations + +#### Implemented & Tested +- [x] createEconomicEvent + - [x] fixed - `newInventoriedResource` `name` property is not persisted - [issue #202](https://github.com/holo-rea/holo-rea/issues/202) +- [x] createUnit +- [x] createProcess + +#### Implemented & Not Yet Tested +- [x] createAgreement +- [x] updateAgreement +- [x] deleteAgreement +- [x] createCommitment +- [x] updateCommitment +- [x] deleteCommitment +- [x] updateEconomicEvent +- [x] deleteEconomicEvent +- [x] createFulfillment +- [x] updateFulfillment +- [x] deleteFulfillment +- [x] updateEconomicResource +- [x] createIntent +- [x] updateIntent +- [x] deleteIntent +- [x] updateProcess +- [x] deleteProcess +- [x] createProcessSpecification +- [x] updateProcessSpecification +- [x] deleteProcessSpecification +- [x] createProposal +- [x] updateProposal +- [x] deleteProposal +- [x] proposeIntent +- [x] deleteProposedIntent +- [x] proposeTo +- [x] deleteProposedTo +- [x] updateResourceSpecification +- [x] deleteResourceSpecification +- [x] createSatisfaction +- [x] updateSatisfaction +- [x] deleteSatisfaction +- [x] updateUnit +- [x] deleteUnit + +#### Partially Implemented +- [x] createResourceSpecification + - [ ] lacking `defaultUnitOfResource` - [issue #155](https://github.com/holo-rea/holo-rea/issues/155) + +#### Has Minor Bug + +#### Has Fatal Bug + +#### Not Yet Implemented +- [ ] deleteEconomicResource - [issue #67](https://github.com/holo-rea/holo-rea/issues/67) +- [ ] createProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] updateProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] deleteProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] createPerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updatePerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deletePerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) + + + +### Queries + +#### Implemented & Tested +- [x] action +- [x] actions +- [x] unit +- [x] economicEvent + +#### Implemented & Not Yet Tested +- [x] agreement +- [x] commitment +- [x] resourceSpecification +- [x] processSpecification +- [x] process +- [x] intent +- [x] fulfillment +- [x] satisfaction +- [x] proposal + +__Has Partial Implementation__ +- [x] myAgent + - [ ] TODO: define what's lacking +- [x] agent + - [ ] TODO: define what's lacking +- [x] economicResources + - [ ] lacking pagination - [issue #85](https://github.com/holo-rea/holo-rea/issues/85) +- [x] economicEvents + - [ ] lacking pagination - [issue #85](https://github.com/holo-rea/holo-rea/issues/85) +- [x] economicResource + - [ ] `primaryAccountable` is not implemented - [issue #133](https://github.com/holo-rea/holo-rea/issues/133) + +__Has Minor Bug__ + + +__Has Fatal Bug__ +- [ ] agents (response always gives empty array, wrongly - [issue #210](https://github.com/holo-rea/holo-rea/issues/210)) + +__Not Yet Implemented__ +- [ ] proposals - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] satisfactions - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] fulfillments - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] intents - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] commitments - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] processes - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] productBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] productBatches - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) and [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] units - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] processSpecifications - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] resourceSpecifications - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] agreements - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] organization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] organizations - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] person - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] people - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationships - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationshipRoles - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) + +### Resolvers + +(https://www.apollographql.com/docs/apollo-server/data/resolvers/) +Connor todo + + + +## System of Record Comparison + +All of the implementation details should be sourced from the [Valueflows RDF Turtle file](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL) (here's a [Formatted View](http://150.146.207.114/lode/extract?owlapi=true&url=https://lab.allmende.io/valueflows/valueflows/-/raw/master/release-doc-in-process/all_vf.TTL)), which is the system of record. While you are looking around, please note that the objects themselves don't have property definitions. The properties themselves define which objects the apply to in the `rdfs:domain` field. The range of values the properties can take is defined by the `rdfs:range` field This is because RDF views these things like arrows or maps, going from the domain to the range. + +The top level objects found in the spec are: + +**Key** +| symbol | meaning | +| ------------------- | --------------------- | +| :grey_exclamation: | Not used | +| - | Not found/not started | +| :hammer_and_wrench: | In progress | +| :heavy_check_mark: | Done | +| K | Knowledge Layer | +| P | Planning Layer | +| O | Observation Layer | + +**Outside Ontologies** +| RDF Object | vf-schema file | zome | comments | +| --------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------- | +| [foaf:Agent](http://xmlns.com/foaf/spec/) | :grey_exclamation: [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql) | :grey_exclamation: | | +| [org:Organization](https://www.w3.org/TR/vocab-org/) | :grey_exclamation: | :grey_exclamation: | | +| [om2:Measure](https://raw.githubusercontent.com/HajoRijgersberg/OM/master/om-2.0.rdf) | [measurement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/measurement.gql#L64) | [lib/vf_measurement](https://github.com/holo-rea/holo-rea/blob/sprout/lib/vf_measurement/src/lib.rs#L19) | | +| [om2:Unit](https://raw.githubusercontent.com/HajoRijgersberg/OM/master/om-2.0.rdf) | :grey_exclamation: [measurement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/measurement.gql#L48) | :grey_exclamation: [rea_unit](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_unit) | This is a technicality. The general shape of it is correct, however the ontology represents a hierarchy of units that are not correctly reflected in the backend since it only stores a label and a symbol. The full ontology allows for more flexibility with prefixes, dimension, exponent, etc.; it has enough information to allow conversion between units. It would be hard to implement without a triple-store. | +| [geo:SpatialThing](https://www.w3.org/2003/01/geo/) | [geolocation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/geolocation.gql#L15) | - | | +| [time](https://www.w3.org/2006/time#) | :grey_exclamation: | :grey_exclamation: | vf-schema: The GraphQL spec only uses the `DateTime and Duration` scalars. | +| [cd:created](https://www.dublincore.org/specifications/dublin-core/dcmi-terms/#created) | :grey_exclamation: | :grey_exclamation: | vf-schema: GraphQL spec only uses the `DateTime` scalar. | +| [skos:note](https://www.w3.org/TR/skos-reference/#note) | :grey_exclamation: | :grey_exclamation: | vf-schema: Just a `String`. | +| [dtype:numericUnion](http://www.linkedmodel.org/schema/dtype#numericUnion) | :grey_exclamation: | :grey_exclamation: | This is only needed for the [`om2:hasNumericalValue`](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L549), so it's only internal. | + + +You may notice there is no specification of an Agent. This is because the Valueflows RDF spec uses the [FOAF Vocabulary](http://xmlns.com/foaf/spec/) and the [Organization Ontology](https://www.w3.org/TR/vocab-org/). The holo-rea project has [it's own set of concepts right now](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql). + +| layer | RDF object | vf-schemas file | zome | hrea "module" or DNA | comments | +| ------| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| K | [Scenario Definition](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L124) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L44) | - | - | | +| K | [Process Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L104) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L70) | [rea_process_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | | +| K | [Resource Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L92) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L45) | [rea_resource_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_resource_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | zome: Missing `resource_classified_as`, `default_unit_of_resource`. | +| K | [Action](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L32) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L19) | [rea_action](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_action/zome) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | vf-schema: Missing `containedEffect`, `locationEffect`. zome: Same as vf-schema. | +| K | [Agent Relationship Role](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L74) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L126) | - | - | vf-schema: Missing `roleBehavior`. | +| K | [Role Behavior](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L80) | - | - | - | vf-schema: This doesn't seem to be implemented yet. | +| K | [Recipe Exchange](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L118) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L106) | - | - | | +| K | [Recipe Flow](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L112) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L53) | - | - | | +| K | [Recipe Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L98) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L84) | - | - | | +| K | [Recipe Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L18) | - | - | | +| P | [Scenario](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L16) | - | - | | +| P | [Plan](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L133) | [plan](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/plan.gql#L16) | - | - | vf-schema: has extra fields `deletable` and `inScopeOf` are these for internal use? | +| P, O | [Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L196) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L155) | [rea_process](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `plannedIn` What is `unplannedEvents`? For the inverse relationships, do we want to group all `Intent`s, `Commitment`s, and `EconomicEvent`s together in the `inputs` and `outputs`? How is `track` and `trace` being handled? dna: Has extra `before` and `after` fields. `planned_within` is present, despite no implementation (because it just points to an `entryHash`.) This is often placed in with Observation layer, or on the line between Observation and Planning. | +| P | [Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L139) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L94) | [rea_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_intent) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | vf-schema: Missing `provider`, `reciever`, `atLocation`. Has a `satisfiedBy` inverse map to `Satisfaction`'s `satisfies`. | +| P | [Proposed Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L151) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L49) | [rea_proposed_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_intent) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | | +| P | [Proposal](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L145) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L16) | [rea_proposal](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposal) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | vf-schema: Missing `eligibleLocation`. Has a `publishes` inverse map to `ProposedIntent`'s `publishedIn`. zome: same. | +| P | [Proposed To](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L157) | [proposal.agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/bridging/proposal.agent.gql#L20) | [rea_proposed_to](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_to) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | | +| P | [Commitment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L163) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L24) | [rea_commitment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_commitment) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | vf-schema: Missing `atLocation` and `clauseOf`. Has `fullfilledBy` and `satisfies` inverse maps to `Fulfillment`'s`fulfill` and `Satisfation`'s `satisfiedBy`. zome: has `plan` instead of `planed_within`. | +| P | [Satisfaction](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L169) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L188) | [rea_satisfaction](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_satisfaction) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | zome: allows `satisfied_by` to only be either one `EconomicEvent` or `Commitment`. Is this correct? | +| P | [Agreement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [agreement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agreement.gql#L19) | [rea_agreement](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_agreement) | [agreement](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/agreement) | | +| P | [Claim](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L18) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | +| O | [Economic Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L190) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L83) | [rea_economic_resource](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_resource) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `currentLocation`. Has `contains`, `track`, `trace` maps as additions. | +| O | [dfc:ProductBatch](http://www.virtual-assembly.org/DataFoodConsortium/BusinessOntology) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L139) | - | - | vf-schema: Missing links to `identifies`, but that probably doesn't matter for our use case. | +| O | [Economic Event](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L202) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L19) | [rea_economic_event](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_event) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `realizationOf`, `image`, `provider`, `receiver`, `atLocation`, `toLocation`. Has `track` and `trace` going to `ProductionFlowItem`s. zome: Missing `to_location`. | +| O | [Appreciation](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L232) | [appreciation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/appreciation.gql#L17) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | +| P, O | [Fulfillment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L214) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L166) | [rea_fulfillment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_fulfillment) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | !! Discrepancy between "layer" and "vf-schema" files. FIXME | +| O | [Settlement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L226) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L61) | - | - | | +| O | [Agent Relationship](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L208) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L104) | - | - | | + +There are internal system objects used to help specify the rules of logic around the actions: + +* [Resource Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L1278) +* [Contained Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L68) +* [Location Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L62) +* [Onhand Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L56) +* [Input/Output](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L44) +* [Pairs With](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L38) + +In the gql version, these are just strings (need to learn more specifics). diff --git a/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml b/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml index e9655016c..b1b5a3181 100644 --- a/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml +++ b/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_economic_event_defs = { path = "../../../../../lib/rea_economic_event/defs" } hc_zome_rea_economic_event_lib = { path = "../../../../../lib/rea_economic_event/lib" } diff --git a/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml b/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml index f53a1164a..b0ae572d0 100644 --- a/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml +++ b/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_economic_resource_storage_consts = { path = "../../../../../lib/rea_economic_resource/storage_consts" } hc_zome_rea_economic_resource_defs = { path = "../../../../../lib/rea_economic_resource/defs" } diff --git a/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml b/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml index c889e821e..b6b9b62fd 100644 --- a/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml +++ b/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_resource_specification_defs = { path = "../../../../../lib/rea_resource_specification/defs" } hc_zome_rea_resource_specification_rpc = { path = "../../../../../lib/rea_resource_specification/rpc" } diff --git a/happs/agent/dna.yaml b/happs/agent/dna.yaml deleted file mode 100644 index ced7b289c..000000000 --- a/happs/agent/dna.yaml +++ /dev/null @@ -1,7 +0,0 @@ -manifest_version: "1" -name: "hrea_agent" -uuid: "" -properties: null -zomes: - - name: agent_registration - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_agent_registration_hrea.wasm" diff --git a/happs/agreement/dna.yaml b/happs/agreement/dna.yaml deleted file mode 100644 index b698fe638..000000000 --- a/happs/agreement/dna.yaml +++ /dev/null @@ -1,22 +0,0 @@ -manifest_version: "1" -name: "hrea_agreement" -uuid: "" -properties: - remote_auth: - permissions: - # :TODO: actually these need to be rearchitected for modular indexing behaviour - - extern_id: index_realized_events - allowed_method: [agreement_index, index_realized_events] - - extern_id: index_agreement_clauses - allowed_method: [agreement_index, index_agreement_clauses] -zomes: - - # application zomes - - name: agreement - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement.wasm" - - name: agreement_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement_index_agreement.wasm" - - # utility zomes - - name: remote_auth - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/happs/observation/dna.yaml b/happs/observation/dna.yaml deleted file mode 100644 index 7bbaaaea1..000000000 --- a/happs/observation/dna.yaml +++ /dev/null @@ -1,78 +0,0 @@ -manifest_version: "1" -name: "hrea_observation" -uuid: "" -properties: - process: - index_zome: process_index - process_index: - record_storage_zome: process - economic_event: - index_zome: economic_event_index - process_index_zome: process_index - economic_resource_index_zome: economic_resource_index - economic_resource_zome: economic_resource - economic_event_index: - record_storage_zome: economic_event - economic_resource: - index_zome: economic_resource_index - economic_resource_index: - record_storage_zome: economic_resource - fulfillment: - index_zome: fulfillment_index - fulfillment_index: - record_storage_zome: fulfillment - satisfaction: - index_zome: satisfaction_index - satisfaction_index: - record_storage_zome: satisfaction - remote_auth: - permissions: - - extern_id: index_process_input_commitments - allowed_method: [process, index_input_commitments] - - extern_id: index_process_output_commitments - allowed_method: [process, index_output_commitments] - - extern_id: index_process_input_intents - allowed_method: [process, index_input_intents] - - extern_id: index_process_output_intents - allowed_method: [process, index_output_intents] - - - extern_id: create_fulfillment - allowed_method: [fulfillment, fulfillment_created] - - extern_id: update_fulfillment - allowed_method: [fulfillment, fulfillment_updated] - - extern_id: delete_fulfillment - allowed_method: [fulfillment, fulfillment_deleted] - - - extern_id: create_satisfaction - allowed_method: [satisfaction, satisfaction_created] - - extern_id: update_satisfaction - allowed_method: [satisfaction, satisfaction_updated] - - extern_id: delete_satisfaction - allowed_method: [satisfaction, satisfaction_deleted] -zomes: - # application zomes - - name: economic_event - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event.wasm" - - name: economic_resource - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource.wasm" - - name: process - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_process.wasm" - - name: fulfillment - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_observation.wasm" - - name: satisfaction - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_observation.wasm" - - - name: economic_event_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event_index_observation.wasm" - - name: economic_resource_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource_index_observation.wasm" - - name: process_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_index_observation.wasm" - - name: fulfillment_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_observation.wasm" - - name: satisfaction_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_observation.wasm" - - # utility zomes - - name: remote_auth - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/happs/planning/dna.yaml b/happs/planning/dna.yaml deleted file mode 100644 index 92dfe65bf..000000000 --- a/happs/planning/dna.yaml +++ /dev/null @@ -1,48 +0,0 @@ -manifest_version: "1" -name: "hrea_planning" -uuid: "" -properties: - commitment: - index_zome: commitment_index - commitment_index: - record_storage_zome: commitment - intent: - index_zome: intent_index - intent_index: - record_storage_zome: intent - fulfillment: - index_zome: fulfillment_index - commitment_index_zome: commitment_index - fulfillment_index: - record_storage_zome: fulfillment - satisfaction: - index_zome: satisfaction_index - intent_index_zome: intent_index - commitment_index_zome: commitment_index - commitment_zome: commitment - satisfaction_index: - record_storage_zome: satisfaction - remote_auth: - permissions: - - extern_id: index_intent_proposals - allowed_method: [intent_index, index_intent_proposed_in] -zomes: - - # application zomes - - name: commitment - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment.wasm" - - name: intent - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent.wasm" - - name: fulfillment - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_planning.wasm" - - name: satisfaction - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_planning.wasm" - - - name: commitment_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment_index_planning.wasm" - - name: intent_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent_index_planning.wasm" - - name: fulfillment_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_planning.wasm" - - name: satisfaction_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_planning.wasm" diff --git a/happs/proposal/dna.yaml b/happs/proposal/dna.yaml deleted file mode 100644 index 1f57c32b6..000000000 --- a/happs/proposal/dna.yaml +++ /dev/null @@ -1,21 +0,0 @@ -manifest_version: "1" -name: "hrea_proposal" -uuid: "" -properties: - proposal: - index_zome: proposal_index - proposal_index: - record_storage_zome: proposal -zomes: - - name: proposal - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal.wasm" - - name: proposal_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal_index_proposal.wasm" - - name: proposed_intent - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent.wasm" - - name: proposed_intent_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent_index_proposal.wasm" - - name: proposed_to - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to.wasm" - - name: proposed_to_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to_index_proposal.wasm" diff --git a/lib/hdk_records/Cargo.toml b/lib/hdk_records/Cargo.toml index ab56361e2..b761f1920 100644 --- a/lib/hdk_records/Cargo.toml +++ b/lib/hdk_records/Cargo.toml @@ -14,7 +14,7 @@ serde_maybe_undefined = { path = "../serde_maybe_undefined" } hdk_rpc_errors = { path = "../hdk_rpc_errors" } hdk_semantic_indexes_zome_rpc = { path = "../hdk_semantic_indexes/rpc" } hc_zome_dna_auth_resolver_lib = {git = "https://github.com/holochain-open-dev/dna-auth-resolver", rev = "b1adec5", package = "hc_zome_dna_auth_resolver_lib"} -hdk_type_serialization_macros = { path = "../hdk_type_serialization_macros" } +hdk_uuid_types = { path = "../hdk_uuid_types" } [lib] crate-type = ["lib"] diff --git a/lib/hdk_records/src/anchored_record_helpers.rs b/lib/hdk_records/src/anchored_record_helpers.rs index dea7d3013..69f9b06fc 100644 --- a/lib/hdk_records/src/anchored_record_helpers.rs +++ b/lib/hdk_records/src/anchored_record_helpers.rs @@ -10,9 +10,7 @@ * @since 2021-09-15 */ use hdk::prelude::*; -use hdk::hash_path::path::Component; -use hdk_type_serialization_macros::{ - RevisionHash, +use hdk_uuid_types::{ DnaAddressable, DnaIdentifiable, }; @@ -22,17 +20,17 @@ use crate::{ Identified, Identifiable, UniquelyIdentifiable, Updateable, UpdateableIdentifier, }, - link_helpers::get_linked_addresses, + link_helpers::{ + get_linked_addresses, + get_linked_tags, + get_linked_headers, + }, identity_helpers::calculate_identity_address, records::{ create_record, read_record_entry_by_identity, - // read_record_entry_by_header, - get_latest_header_hash, }, entries::{ - try_entry_from_element, - try_decode_entry, get_entry_by_header, update_entry, delete_entry, @@ -79,16 +77,13 @@ fn read_entry_anchor_id( identity_path_address: &EntryHash, ) -> RecordAPIResult { - let mut addrs = get_linked_addresses(identity_path_address, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; - let entry_hash = addrs.pop().ok_or(DataIntegrityError::IndexNotFound((*identity_path_address).clone()))?; - - let path_element = get(entry_hash, GetOptions::default())?; - let entry = try_entry_from_element(path_element.as_ref())?; - let path: Path = try_decode_entry(entry.to_owned())?; - let components: &Vec = path.as_ref(); - let last_component = components.last().unwrap(); - - Ok(last_component.try_into()?) + let mut tags = get_linked_tags(identity_path_address, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + tags.pop() + .map(|t| { + let bytes = &t.into_inner()[3..]; + Ok(String::from_utf8(bytes.to_vec())?) + }) + .ok_or(DataIntegrityError::IndexNotFound((*identity_path_address).clone()))? } /// Given the `EntryHash` of an anchor `Path`, query the identity of the associated entry @@ -112,13 +107,13 @@ fn read_anchor_identity( pub fn read_anchored_record_entry( entry_type_root_path: &S, id_string: I, -) -> RecordAPIResult<(RevisionHash, A, T)> +) -> RecordAPIResult<(HeaderHash, A, T)> where S: AsRef, I: AsRef, T: std::fmt::Debug, B: DnaAddressable, A: DnaIdentifiable, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, Entry: TryFrom, R: std::fmt::Debug + Identified, { @@ -138,14 +133,15 @@ pub fn read_anchored_record_entry( pub fn create_anchored_record( entry_def_id: &S, create_payload: C, -) -> RecordAPIResult<(RevisionHash, A, I)> +) -> RecordAPIResult<(HeaderHash, A, I)> where S: AsRef, B: DnaAddressable, A: DnaIdentifiable, C: Into + UniquelyIdentifiable, I: Identifiable, WasmError: From, - Entry: TryFrom, + Entry: TryFrom + TryFrom, + CreateInput: TryFrom, R: Clone + Identified, { // determine unique anchor index key @@ -155,14 +151,9 @@ pub fn create_anchored_record( // write base record and identity index path let (revision_id, entry_internal_id, entry_data) = create_record::(&entry_def_id, create_payload)?; - // create manually assigned identifier - let path = identity_path_for(&entry_def_id, &entry_id); - path.ensure()?; - - // link the hash identifier to the manually assigned identifier so we can determine it when reading & updating + // link the hash identifier to a new manually assigned identifier so we can determine the anchor when reading & updating let identifier_hash = calculate_identity_address(entry_def_id, &entry_internal_id)?; - create_link(identifier_hash.clone(), path.path_entry_hash()?, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; - create_link(path.path_entry_hash()?, identifier_hash.clone(), LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + link_identities(entry_def_id, &identifier_hash, &entry_id)?; Ok((revision_id, A::new(dna_info()?.hash, entry_id), entry_data)) } @@ -174,27 +165,29 @@ pub fn create_anchored_record( /// /// @see hdk_records::record_interface::UpdateableIdentifier /// -pub fn update_anchored_record( +pub fn update_anchored_record( entry_def_id: &S, - revision_id: &RevisionHash, + revision_id: &HeaderHash, update_payload: U, -) -> RecordAPIResult<(RevisionHash, B, I, I)> +) -> RecordAPIResult<(HeaderHash, B, I, I)> where S: AsRef, A: DnaAddressable, B: DnaIdentifiable, I: std::fmt::Debug + Identifiable + Updateable, U: UpdateableIdentifier, WasmError: From, - Entry: TryFrom, + Entry: TryFrom + TryFrom, R: Clone + std::fmt::Debug + Identified, SerializedBytes: TryInto, { // get referenced entry and identifiers for the given header let previous: R = get_entry_by_header(revision_id)?; + let prev_entry = previous.entry(); let identity = previous.identity()?; - let identity_hash: &EntryHash = identity.as_ref(); - let maybe_current_id = read_entry_anchor_id(identity_hash); + + let identity_hash = calculate_identity_address(entry_def_id, &identity)?; + let maybe_current_id = read_entry_anchor_id(&identity_hash); // ensure the referenced entry exists and has an anchored identifier path match maybe_current_id { @@ -214,19 +207,15 @@ pub fn update_anchored_record( Some(new_id) => { if new_id != final_id { // clear any old identity path, ensuring the link structure is as expected - let mut addrs = get_linked_addresses(identity_hash, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + let mut addrs = get_linked_headers(&identity_hash, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; if addrs.len() != 1 { return Err(DataIntegrityError::IndexNotFound(identity_hash.to_owned())); } let old_link = addrs.pop().unwrap(); - let old_link_id = get_latest_header_hash(old_link)?; - let old_link_hash: &HeaderHash = old_link_id.as_ref(); - delete_link(old_link_hash.to_owned())?; + delete_link(old_link)?; // create the new identifier and link to it - let path = identity_path_for(&entry_def_id, &new_id); - path.ensure()?; - create_link(identity_hash.to_owned(), path.path_entry_hash()?, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + link_identities(entry_def_id, &identity_hash, &new_id)?; // reference final ID in record updates to new identifier path final_id = new_id.into(); @@ -250,10 +239,36 @@ pub fn update_anchored_record( /// :TODO: This is a stub- include any logic necessary to handle cleanup of associated links. /// Not clearing old anchors may cause issues upon subsequent reinsert, which is not yet tested. /// -pub fn delete_anchored_record(address: &A) -> RecordAPIResult +pub fn delete_anchored_record(address: &HeaderHash) -> RecordAPIResult where SerializedBytes: TryInto, - A: AsRef, { - delete_entry::(address)?; + delete_entry::(address)?; Ok(true) } + +/// Writes a bidirectional set of anchoring entries for a record so that the string-based identifier +/// can be looked up from the content-addressable `EntryHash`-based identifier +/// +fn link_identities(entry_def_id: S, identifier_hash: &EntryHash, id_string: A) -> RecordAPIResult<()> + where S: AsRef, + A: Clone + AsRef, +{ + // create manually assigned identifier + let path = identity_path_for(&entry_def_id, &id_string); + path.ensure()?; + + let identifier_tag = create_id_tag(id_string.to_owned()); + create_link(identifier_hash.clone(), path.path_entry_hash()?, identifier_tag.to_owned())?; + create_link(path.path_entry_hash()?, identifier_hash.clone(), identifier_tag)?; + + Ok(()) +} + +/// Generate a link tag for the identity anchor of a record by encoding the ID string into the tag +/// so that it can be retreived by querying the DHT later. +/// +fn create_id_tag(id_str: S) -> LinkTag + where S: AsRef, +{ + LinkTag::new([crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG, id_str.as_ref().as_bytes()].concat()) +} diff --git a/lib/hdk_records/src/entry_helpers.rs b/lib/hdk_records/src/entry_helpers.rs index a3e3e3e8d..860f88b62 100644 --- a/lib/hdk_records/src/entry_helpers.rs +++ b/lib/hdk_records/src/entry_helpers.rs @@ -16,9 +16,8 @@ use hdk::prelude::{ update as hdk_update, delete_entry as hdk_delete_entry, }; -use hdk::info::dna_info; -use crate::{RevisionHash, RecordAPIResult, DataIntegrityError}; +use crate::{HeaderHash, RecordAPIResult, DataIntegrityError}; /// Helper to handle retrieving linked element entry from an element /// @@ -59,12 +58,11 @@ pub (crate) fn get_entry_by_address(address: &EntryHash) -> RecordAPIResult(address: &I) -> RecordAPIResult +pub (crate) fn get_entry_by_header(address: &HeaderHash) -> RecordAPIResult where SerializedBytes: TryInto, - I: AsRef, { // :DUPE: identical to above, only type signature differs - let maybe_result = get(address.as_ref().clone(), GetOptions { strategy: GetStrategy::Latest }); + let maybe_result = get(address.clone(), GetOptions { strategy: GetStrategy::Latest }); match maybe_result { Err(_) => return Err(DataIntegrityError::EntryNotFound), _ => (), @@ -93,7 +91,7 @@ pub (crate) fn get_entry_by_header(address: &I) -> RecordAPIResult pub fn create_entry>( entry_def_id: S, entry_struct: I, -) -> RecordAPIResult<(RevisionHash, EntryHash)> +) -> RecordAPIResult<(HeaderHash, EntryHash)> where WasmError: From, Entry: TryFrom, { @@ -103,7 +101,7 @@ pub fn create_entry>( match entry_data { Ok(entry) => { let header_hash = hdk_create(CreateInput::new(EntryDefId::App(entry_def_id.as_ref().to_string()), entry, ChainTopOrdering::default()))?; - Ok((RevisionHash(dna_info()?.hash, header_hash), entry_hash)) + Ok((header_hash, entry_hash)) }, Err(e) => Err(DataIntegrityError::Wasm(WasmError::from(e))), } @@ -120,14 +118,13 @@ pub fn create_entry>( /// :TODO: determine how to implement some best-possible validation to alleviate at /// least non-malicious forks in the hashchain of a datum. /// -pub fn update_entry<'a, I: Clone, E, A, S: AsRef>( +pub fn update_entry<'a, I: Clone, E, S: AsRef>( entry_def_id: S, - address: &A, + address: &HeaderHash, new_entry: I, -) -> RecordAPIResult<(RevisionHash, EntryHash)> +) -> RecordAPIResult<(HeaderHash, EntryHash)> where WasmError: From, Entry: TryFrom, - A: AsRef, { // get initial address let entry_address = hash_entry(new_entry.clone())?; @@ -136,9 +133,9 @@ pub fn update_entry<'a, I: Clone, E, A, S: AsRef>( let entry_data: Result = new_entry.try_into(); match entry_data { Ok(entry) => { - let updated_header = hdk_update(address.as_ref().clone(), CreateInput::new(EntryDefId::App(entry_def_id.as_ref().to_string()), entry, ChainTopOrdering::default()))?; + let updated_header = hdk_update(address.clone(), CreateInput::new(EntryDefId::App(entry_def_id.as_ref().to_string()), entry, ChainTopOrdering::default()))?; - Ok((RevisionHash(dna_info()?.hash, updated_header), entry_address)) + Ok((updated_header, entry_address)) }, Err(e) => Err(DataIntegrityError::Wasm(WasmError::from(e))), } @@ -148,16 +145,15 @@ pub fn update_entry<'a, I: Clone, E, A, S: AsRef>( /// Wrapper for `hdk::remove_entry` that ensures that the entry is of the specified type before deleting. /// -pub fn delete_entry( - address: &A, +pub fn delete_entry( + address: &HeaderHash, ) -> RecordAPIResult where SerializedBytes: TryInto, - A: AsRef, { // typecheck the record before deleting, to prevent any accidental or malicious cross-type deletions let _prev_entry: T = get_entry_by_header(address)?; - hdk_delete_entry(address.as_ref().clone())?; + hdk_delete_entry(address.clone())?; Ok(true) } diff --git a/lib/hdk_records/src/identity_helpers.rs b/lib/hdk_records/src/identity_helpers.rs index 5352b5e66..d8061600d 100644 --- a/lib/hdk_records/src/identity_helpers.rs +++ b/lib/hdk_records/src/identity_helpers.rs @@ -19,31 +19,13 @@ * @since 2019-05-16 */ use hdk::prelude::*; -use hdk::hash_path::path::Component; -use hdk_type_serialization_macros::{extern_id_to_bytes, bytes_to_extern_id, DnaAddressable}; +use hdk_uuid_types::DnaAddressable; use crate::{ RecordAPIResult, DataIntegrityError, - link_helpers::get_linked_addresses, entry_helpers::get_entry_by_address, }; -/// Represent `key index` record identities using native Holochain `Path` construct -/// -/// :TODO: sharding strategy for `c2` -/// -fn identity_path_for( - entry_type_root_path: S, - base_address: &A, -) -> Path - where S: AsRef, - A: DnaAddressable, -{ - let type_root = entry_type_root_path.as_ref().as_bytes().to_vec(); - - Path::from(vec![type_root.into(), extern_id_to_bytes::(base_address).into()]) -} - /// Determine root `Path` for an entry type, can be used to anchor type-specific indexes & queries. /// pub (crate) fn entry_type_root_path( @@ -58,70 +40,69 @@ pub (crate) fn entry_type_root_path( /// Determine the underlying `EntryHash` for a given `base_address` identifier, without querying the DHT. /// -pub fn calculate_identity_address( - entry_type_root_path: S, +pub fn calculate_identity_address( + _entry_type_root_path: S, base_address: &A, ) -> RecordAPIResult where S: AsRef, A: DnaAddressable, + Entry: TryFrom, + WasmError: From, { - Ok(identity_path_for(entry_type_root_path, base_address).path_entry_hash()?) -} - -/// Given an identity `EntryHash` (ie. the result of `create_entry_identity`), -/// query the underlying address for the progenitor Entry of the record. -/// -pub (crate) fn read_entry_identity( - identity_path_address: &EntryHash, -) -> RecordAPIResult -{ - let mut addrs = get_linked_addresses(identity_path_address, LinkTag::new(crate::identifiers::RECORD_INITIAL_ENTRY_LINK_TAG))?; - let entry_hash = addrs.pop().ok_or(DataIntegrityError::IndexNotFound((*identity_path_address).clone()))?; - - Ok(entry_hash) + Ok(hash_entry(base_address.to_owned())?) } /// Given an identity `EntryHash` (ie. the result of `create_entry_identity`), -/// query the `DnaHash` and `AnyDhtHash` of the record by inspecting the associated `Path` entry. +/// query the `DnaHash` and `AnyDhtHash` of the record. /// -/// :WARNING: if sharding is introduced, this will cause runtime failures until changed -/// -pub fn read_entry_identity_full( +pub fn read_entry_identity( identity_path_address: &EntryHash, ) -> RecordAPIResult where A: DnaAddressable, + SerializedBytes: TryInto, { - let index_path: Path = get_entry_by_address(&identity_path_address)?; - let components: &Vec = index_path.as_ref(); - let compound_key = components.last(); - - // ensure that a path component exists - if None == compound_key { return Err(DataIntegrityError::CorruptIndexError(identity_path_address.clone(), None)); } + let identifier = get_entry_by_address(identity_path_address); - // ensure final addressing path component length - let key_bytes = compound_key.unwrap().as_ref(); - match bytes_to_extern_id(key_bytes) { - Err(_) => Err(DataIntegrityError::CorruptIndexError(identity_path_address.clone(), Some(key_bytes.to_vec()))), + // throw meaningful error if reference is invalid + match identifier { + Err(_) => Err(DataIntegrityError::CorruptIndexError(identity_path_address.clone(), None)), Ok(identity) => Ok(identity), } } //-------------------------------[ CREATE ]------------------------------------- -/// Creates a `Path` to initialise a unique index for a new entry, and returns -/// the `EntryHash` of the new `Path`. +/// Creates a pointer to initialise a universally-unique ID for a new entry, and returns +/// the `EntryHash` of the stored identifier. /// -/// This `Path` is intended to be used as an anchor to base links to/from the +/// This identifier is intended to be used as an anchor to base links to/from the /// entry onto. /// -pub fn create_entry_identity( - entry_type_root_path: S, +/// Also links the identifier to a global index for all entries of the given `entry_type`. +/// :TODO: replace this linkage with date-ordered sparse index based on record creation time +/// @see query_root_index() +/// +pub fn create_entry_identity( + entry_type: S, initial_address: &A, ) -> RecordAPIResult where S: AsRef, A: DnaAddressable, + CreateInput: TryFrom, + Entry: TryFrom, + WasmError: From, { - let path = identity_path_for(entry_type_root_path, initial_address); - path.ensure()?; - Ok(path.path_entry_hash()?) + create_entry(initial_address.to_owned())?; + + let id_hash = calculate_identity_address(&entry_type, initial_address)?; + + let index_path = entry_type_root_path(&entry_type); + index_path.ensure()?; + create_link( + index_path.path_entry_hash()?, + id_hash.to_owned(), + LinkTag::new(crate::identifiers::RECORD_GLOBAL_INDEX_LINK_TAG), + )?; + + Ok(id_hash) } diff --git a/lib/hdk_records/src/lib.rs b/lib/hdk_records/src/lib.rs index 5c3382efc..7d3976b06 100644 --- a/lib/hdk_records/src/lib.rs +++ b/lib/hdk_records/src/lib.rs @@ -3,8 +3,9 @@ */ use thiserror::Error; use std::convert::Infallible; +use std::string::FromUtf8Error; use hdk::prelude::*; -pub use hdk_type_serialization_macros::{RevisionHash, DnaAddressable}; +pub use hdk_uuid_types::DnaAddressable; pub use hdk::prelude::{CellId, EntryHash, hash_entry}; pub use holo_hash::{DnaHash}; @@ -67,6 +68,8 @@ pub enum DataIntegrityError { EmptyQuery, #[error("Index at address {0} with malformed bytes {1:?}")] CorruptIndexError(EntryHash, Option>), + #[error("String index with malformed bytes {0:?}")] + BadStringIndexError(Vec), #[error("Error in remote call {0}")] RemoteRequestError(String), #[error("Bad zome RPC response format from {0}")] @@ -102,10 +105,18 @@ impl From for DataIntegrityError { } } +impl From for DataIntegrityError { + fn from(e: FromUtf8Error) -> DataIntegrityError { + DataIntegrityError::BadStringIndexError(e.into_bytes()) + } +} + // module constants / internals pub mod identifiers { // Holochain DHT storage type IDs pub const RECORD_INITIAL_ENTRY_LINK_TAG: &'static [u8] = b"initial_entry"; - pub const RECORD_IDENTITY_ANCHOR_LINK_TAG: &'static [u8] = b"identity_anchor"; + pub const RECORD_IDENTITY_ANCHOR_LINK_TAG: &'static [u8] = b"id|"; // :WARNING: byte length is important here. @see anchored_record_helpers::read_entry_anchor_id + // temporary: @see query_root_index() + pub const RECORD_GLOBAL_INDEX_LINK_TAG: &'static [u8] = b"all_entries"; } diff --git a/lib/hdk_records/src/link_helpers.rs b/lib/hdk_records/src/link_helpers.rs index d9eaf06db..bade80d48 100644 --- a/lib/hdk_records/src/link_helpers.rs +++ b/lib/hdk_records/src/link_helpers.rs @@ -43,6 +43,40 @@ pub fn get_linked_headers( pull_links_data(base_address, link_tag, get_link_target_header) } +/// Load any set of `LinkTag`s being referenced from the +/// provided `base_address` with the given `link_tag` prefix. +/// +pub fn get_linked_tags( + base_address: &EntryHash, + link_tag: LinkTag, +) -> RecordAPIResult> { + pull_links_data(base_address, link_tag, get_link_target_tag) +} + +/// Execute the provided `link_map` function against the set of links +/// between a `base_address` and `target_address` via the given `link_tag`. +/// +/// If you have a bidirectional link between two `EntryHash`es, you must +/// run this method twice (once to remove each direction of the paired link). +/// +pub fn walk_links_matching_entry( + base_address: &EntryHash, + target_address: &EntryHash, + link_tag: LinkTag, + link_map: F, +) -> RecordAPIResult> + where F: Fn(&Link) -> T, +{ + let links_result = get_links(base_address.to_owned(), Some(link_tag))?; + + Ok(links_result + .iter() + .filter(|l| { l.target == *target_address }) + .map(link_map) + .collect() + ) +} + //----------------------------------------------------- // :TODO: ensure ordering is latest-first @@ -69,3 +103,7 @@ fn get_link_target_entry(l: &Link) -> EntryHash { fn get_link_target_header(l: &Link) -> HeaderHash { l.create_link_hash.clone() } + +fn get_link_target_tag(l: &Link) -> LinkTag { + l.tag.clone() +} diff --git a/lib/hdk_records/src/local_index_helpers.rs b/lib/hdk_records/src/local_index_helpers.rs index 4bb1358d9..895ce5d01 100644 --- a/lib/hdk_records/src/local_index_helpers.rs +++ b/lib/hdk_records/src/local_index_helpers.rs @@ -12,7 +12,7 @@ use hdk::prelude::*; use crate::{ - RevisionHash, DnaAddressable, + HeaderHash, DnaAddressable, RecordAPIResult, record_interface::Identified, identity_helpers::{ @@ -26,19 +26,23 @@ use crate::{ /// Given a type of entry, returns a Vec of *all* records of that entry registered /// internally with the DHT. /// -/// :TODO: sharding strategy for 2-nth order link destinations +/// :TODO: replace with date-ordered sparse index based on record creation time /// pub fn query_root_index<'a, T, R, O, I: AsRef>( base_entry_type: &I, -) -> RecordAPIResult>> +) -> RecordAPIResult>> where T: std::fmt::Debug, O: DnaAddressable, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, Entry: TryFrom, R: std::fmt::Debug + Identified, { let index_path = entry_type_root_path(base_entry_type); - let linked_records: Vec = index_path.children()?.into(); + + let linked_records: Vec = get_links( + index_path.path_entry_hash()?, + Some(LinkTag::new(crate::identifiers::RECORD_GLOBAL_INDEX_LINK_TAG)), + )?; Ok(linked_records.iter() .map(|link| { read_record_entry_by_identity(&link.target) }) diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index f928ed213..b340a7a6b 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -12,7 +12,7 @@ use hdk::prelude::*; use hdk::info::dna_info; use crate::{ - RevisionHash, DnaAddressable, + DnaAddressable, RecordAPIResult, DataIntegrityError, record_interface::{Identifiable, Identified, Updateable}, entries::{ @@ -31,21 +31,25 @@ use crate::{ /// Helper to retrieve the HeaderHash for an Element /// fn get_header_hash(shh: element::SignedHeaderHashed) -> HeaderHash { - shh.header_hashed().as_hash().to_owned() + shh.as_hash().to_owned() } //--------------------------------[ READ ]-------------------------------------- -/// Retrieve the latest available RevisionHash for a given EntryHash. +/// Retrieve the latest available HeaderHash for a given EntryHash. /// /// Useful in coordinating updates between different entry types. /// -pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult { - Ok(RevisionHash(dna_info()?.hash, (match get_details(entry_hash, GetOptions { strategy: GetStrategy::Latest })? { +/// NOTE: this is a very naive recursive algorithm that basically assumes full network +/// connectivity between everyone at all times, and Updates form a Linked List, rather +/// than a multi-branching tree. This should be updated during other 'conflict resolution' related +/// changes outlined in issue https://github.com/holo-rea/holo-rea/issues/196 +pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult { + match get_details(entry_hash.clone(), GetOptions { strategy: GetStrategy::Latest })? { Some(Details::Entry(details)) => match details.entry_dht_status { metadata::EntryDhtStatus::Live => match details.updates.len() { 0 => { - // no updates yet, latest header hash is the first one + // https://docs.rs/hdk/latest/hdk/prelude/struct.EntryDetails.html#structfield.headers Ok(get_header_hash(details.headers.first().unwrap().to_owned())) }, _ => { @@ -53,19 +57,26 @@ pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult Err(DataIntegrityError::EntryNotFound), }, _ => Err(DataIntegrityError::EntryNotFound), - })?)) + } } -/// Retrive the specific version of an entry specified by the given `RevisionHash` +/// Retrive the specific version of an entry specified by the given `HeaderHash` /// pub fn read_record_entry_by_header( - header_hash: &RevisionHash, + header_hash: &HeaderHash, ) -> RecordAPIResult<(B, T)> where T: std::fmt::Debug, B: DnaAddressable, @@ -87,21 +98,22 @@ pub fn read_record_entry_by_header( /// pub (crate) fn read_record_entry_by_identity( identity_address: &EntryHash, -) -> RecordAPIResult<(RevisionHash, B, T)> +) -> RecordAPIResult<(HeaderHash, B, T)> where T: std::fmt::Debug, B: DnaAddressable, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, Entry: TryFrom, R: std::fmt::Debug + Identified, { // read active links to current version - let entry_hash = read_entry_identity(identity_address)?; + let identifier: B = read_entry_identity(identity_address)?; // pull details of the current version, to ensure we have the most recent - let latest_header_hash = get_latest_header_hash(entry_hash)?; + let entry_hash: &EntryHash = identifier.as_ref(); + let latest_header_hash = get_latest_header_hash(entry_hash.to_owned())?; - let (entry_hash, entry_data) = read_record_entry_by_header(&latest_header_hash)?; + let (read_entry_hash, entry_data) = read_record_entry_by_header(&latest_header_hash)?; - Ok((latest_header_hash, entry_hash, entry_data)) + Ok((latest_header_hash, read_entry_hash, entry_data)) } /// Read a record's entry data by locating it via an anchor `Path` composed @@ -110,15 +122,16 @@ pub (crate) fn read_record_entry_by_identity( /// Presumes that the record is to be fetched from the current DNA and naturally errors /// if attempted on an `EntryHash` that only exists in a foreign cell. /// -pub fn read_record_entry( +pub fn read_record_entry( entry_type_root_path: &S, address: &EntryHash, -) -> RecordAPIResult<(RevisionHash, B, T)> +) -> RecordAPIResult<(HeaderHash, B, T)> where S: AsRef, T: std::fmt::Debug, B: DnaAddressable, - SerializedBytes: TryInto, - Entry: TryFrom, + SerializedBytes: TryInto + TryInto, + Entry: TryFrom + TryFrom, + WasmError: From, R: std::fmt::Debug + Identified, { let identity_address = calculate_identity_address(entry_type_root_path, &B::new(dna_info()?.hash, address.clone()))?; @@ -133,13 +146,14 @@ pub fn read_record_entry( pub fn create_record( entry_def_id: S, create_payload: C, -) -> RecordAPIResult<(RevisionHash, B, I)> +) -> RecordAPIResult<(HeaderHash, B, I)> where S: AsRef, B: DnaAddressable, C: Into, I: Identifiable, WasmError: From, - Entry: TryFrom, + Entry: TryFrom + TryFrom, + CreateInput: TryFrom, R: Identified, { // convert the type's CREATE payload into internal storage struct @@ -155,6 +169,7 @@ pub fn create_record( let identity_address = create_entry_identity(&entry_def_id, &identity)?; // link the identifier to the actual entry + // :TODO: this isn't needed for reading anymore, but might be worthwhile retaining for legibility in the DHT. Needs consideration as to DHT size bloat tradeoff. create_link(identity_address, entry_hash, LinkTag::new(crate::identifiers::RECORD_INITIAL_ENTRY_LINK_TAG))?; Ok((header_hash, identity, entry_data)) @@ -173,9 +188,9 @@ pub fn create_record( /// pub fn update_record( entry_def_id: S, - address: &RevisionHash, + address: &HeaderHash, update_payload: U, -) -> RecordAPIResult<(RevisionHash, B, I, I)> +) -> RecordAPIResult<(HeaderHash, B, I, I)> where S: AsRef, B: DnaAddressable, I: Identifiable + Updateable, @@ -206,20 +221,19 @@ pub fn update_record( /// /// Links are not affected so as to retain a link to the referencing information, which may now need to be updated. /// -pub fn delete_record(address: &A) -> RecordAPIResult +pub fn delete_record(address: &HeaderHash) -> RecordAPIResult where SerializedBytes: TryInto, - A: AsRef, { // :TODO: handle deletion of the identity `Path` for the referenced entry if this is the last header being deleted - delete_entry::(address)?; + delete_entry::(address)?; Ok(true) } #[cfg(test)] mod tests { use super::*; - use hdk_type_serialization_macros::{ simple_alias, addressable_identifier }; + use hdk_uuid_types::{ simple_alias, addressable_identifier }; use crate::{generate_record_entry}; simple_alias!(EntryId => EntryHash); diff --git a/lib/hdk_records/src/record_interface.rs b/lib/hdk_records/src/record_interface.rs index 6fc1ee997..67f0b70fe 100644 --- a/lib/hdk_records/src/record_interface.rs +++ b/lib/hdk_records/src/record_interface.rs @@ -9,7 +9,7 @@ */ use hdk::prelude::*; -use hdk_type_serialization_macros::DnaAddressable; +use hdk_uuid_types::DnaAddressable; use crate::{ RecordAPIResult, diff --git a/lib/hdk_semantic_indexes/README.md b/lib/hdk_semantic_indexes/README.md index db40e68ef..db40cf998 100644 --- a/lib/hdk_semantic_indexes/README.md +++ b/lib/hdk_semantic_indexes/README.md @@ -33,7 +33,7 @@ As such, there are four crates comprising this module in its completeness: ### Defining an index -You will need to declare two zome crates- one for each side of the index. In addition to these zome crates you also need to define some identifier types implementing `hdk_type_serialization_macros::DnaAddressable` and map them to a `QueryParams` struct which forms the external API. +You will need to declare two zome crates- one for each side of the index. In addition to these zome crates you also need to define some identifier types implementing `hdk_uuid_types::DnaAddressable` and map them to a `QueryParams` struct which forms the external API. In the example above, this might look as follows: @@ -41,7 +41,7 @@ In the example above, this might look as follows: use hdk_semantic_indexes_zome_derive::index_zome; //-- usually, you would define these shared identifier types in another crate -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; addressable_identifier!(PostId => EntryHash); //-- @@ -67,7 +67,7 @@ struct Writer { use hdk_semantic_indexes_zome_derive::index_zome; //-- usually, you would define these shared identifier types in another crate -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; addressable_identifier!(AuthorId => AgentPubKey); //-- @@ -229,7 +229,7 @@ No other identifiers need match- in this example, the client zome need not have ### A word on `DnaAddressable` identifiers -[`hdk_type_serialization_macros`](../hdk_type_serialization_macros) provides macros for wrapping "raw" (DNA-local) identifiers with an associated `DnaHash`, which makes them universally-unique between all cells in a running Holochain conductor. +[`hdk_uuid_types`](../hdk_uuid_types) provides macros for wrapping "raw" (DNA-local) identifiers with an associated `DnaHash`, which makes them universally-unique between all cells in a running Holochain conductor. Since **all indexes provided by this library manage many:many relationships between application cells** it is possible that links between records might reference foreign records in multiple different networks. Complicating this further, if UI applications are to be able to dynamically compose different network arrangements to create "agent-centric" views which interact with multiple communities simultaneously; then **the possibility exists for such multiple references to different networks to be created independently of the original design of each application**. diff --git a/lib/hdk_semantic_indexes/client/src/lib.rs b/lib/hdk_semantic_indexes/client/src/lib.rs index fa48b7e6b..ede05d0c9 100644 --- a/lib/hdk_semantic_indexes/client/src/lib.rs +++ b/lib/hdk_semantic_indexes/client/src/lib.rs @@ -26,10 +26,11 @@ * @package hdk_semantic_indexes_client_lib * @since 2020-08-07 */ +use std::collections::HashMap; use hdk::prelude::*; +use holo_hash::DnaHash; use hdk_records::{ - RecordAPIResult, DataIntegrityError, - OtherCellResult, CrossCellError, + RecordAPIResult, OtherCellResult, DnaAddressable, rpc::{ call_local_zome_method, @@ -44,58 +45,42 @@ use hdk_semantic_indexes_zome_rpc::{ //-------------------------------[ MACRO LAYER ]------------------------------------- /// Create indexes by defining record types, relationships and associated IDs. +/// Local / remote determination is managed by DnaHash of target addresses. /// #[macro_export] macro_rules! create_index { - // local bidirectional index + // bidirectional 1:1 indexes ( - Local( - $lrecord_type:ident.$lrel:ident($ldest_record_id:expr), - $ldest_record_type:ident.$linv_rel:ident($lrecord_id:expr) - ) - ) => { - paste! { - create_local_index( - [], - &stringify!([<_internal_index_ $lrecord_type:lower:snake _ $lrel:lower:snake>]), - $lrecord_id, - [], - &stringify!([<_internal_index_ $ldest_record_type:lower:snake _ $linv_rel:lower:snake>]), - $ldest_record_id, - ) - } - }; - // remote bidirectional index - ( - Remote( - $rrecord_type:ident.$rrel:ident($rdest_record_id:expr), - $rdest_record_type:ident.$rinv_rel:ident($rrecord_id:expr) - ) + $record_type:ident.$rel:ident($dest_record_id:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) ) => { paste! { - create_remote_index( - [], - &stringify!([<_internal_index_ $rrecord_type:lower:snake _ $rrel:lower:snake>]), - $rrecord_id, - &stringify!([]), - vec![$rdest_record_id.to_owned()].as_slice(), + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + [], + &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), + &stringify!([]), + vec![$dest_record_id.to_owned()].as_slice(), + vec![].as_slice(), ) } }; - // special case for self-referential or local-only indexes + // self-referential, local-only indexes ( - Self( - $record_type:ident($record_id:expr).$rel:ident($dest_record_id:expr) - ) + $record_type:ident($record_id:expr).$rel:ident($dest_record_id:expr) ) => { paste! { - create_local_index( + manage_index( [], &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), $record_id, |_| { None }, // specify none for destination index &"", // ignored, since no index zome name is returned - $dest_record_id, + &"", // ignored, since no index zome name is returned + vec![$dest_record_id.to_owned()].as_slice(), + vec![].as_slice(), ) } }; @@ -119,117 +104,61 @@ macro_rules! read_index { } /// Update indexes by defining added and removed identifiers. +/// Local / remote determination is managed by DnaHash of target addresses, and +/// you can freely mix identifiers from disparate DNAs in the same input. /// #[macro_export] macro_rules! update_index { - // local index, add only + // add only ( - Local( - $record_type:ident.$rel:ident($dest_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) + $record_type:ident.$rel:ident($dest_record_ids:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) ) => { paste! { - update_local_index( + manage_index( [], &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), $record_id, [], &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), + &stringify!([]), $dest_record_ids, - &vec![].as_slice(), + vec![].as_slice(), ) } }; - // local index, remove only + // add and remove ( - Local( - $record_type:ident.$rel:ident.not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) + $record_type:ident.$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) ) => { paste! { - update_local_index( + manage_index( [], &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), $record_id, [], &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), - &vec![].as_slice(), - $remove_record_ids, - ) - } - }; - // local index, add and remove - ( - Local( - $record_type:ident.$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - [], - &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), - $dest_record_ids, - $remove_record_ids, - ) - } - }; - - // remote index, add only - ( - Remote( - $record_type:ident.$rel:ident($dest_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_remote_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, &stringify!([]), $dest_record_ids, - &vec![].as_slice(), - ) - } - }; - // remote index, remove only - ( - Remote( - $record_type:ident.$rel:ident.not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_remote_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - &stringify!([]), - &vec![].as_slice(), $remove_record_ids, ) } }; - // remote index, add and remove + // remove only ( - Remote( - $record_type:ident.$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) + $record_type:ident.$rel:ident.not($remove_record_ids:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) ) => { paste! { - update_remote_index( + manage_index( [], &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), $record_id, + [], + &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), &stringify!([]), - $dest_record_ids, + vec![].as_slice(), $remove_record_ids, ) } @@ -237,17 +166,16 @@ macro_rules! update_index { // self-referential or local-only indexes, add only ( - Self( - $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr) - ) + $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr) ) => { paste! { - update_local_index( + manage_index( [], &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), $record_id, |_| { None }, // specify none for destination index &"", // ignored, since no index zome name is returned + &"", // ignored, since no index zome name is returned $dest_record_ids, &vec![].as_slice(), ) @@ -255,17 +183,16 @@ macro_rules! update_index { }; // self-referential or local-only indexes, remove only ( - Self( - $record_type:ident($record_id:expr).$rel:ident.not($remove_record_ids:expr) - ) + $record_type:ident($record_id:expr).$rel:ident.not($remove_record_ids:expr) ) => { paste! { - update_local_index( + manage_index( [], &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), $record_id, |_| { None }, // specify none for destination index &"", // ignored, since no index zome name is returned + &"", // ignored, since no index zome name is returned &vec![].as_slice(), $remove_record_ids, ) @@ -273,17 +200,16 @@ macro_rules! update_index { }; // self-referential or local-only indexes, add & remove ( - Self( - $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr) - ) + $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr) ) => { paste! { - update_local_index( + manage_index( [], &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), $record_id, |_| { None }, // specify none for destination index &"", // ignored, since no index zome name is returned + &"", // ignored, since no index zome name is returned $dest_record_ids, $remove_record_ids, ) @@ -291,121 +217,112 @@ macro_rules! update_index { }; } - //-------------------------------[ CREATE ]------------------------------------- -/// Toplevel method for triggering a link creation flow between two records in -/// different DNA cells. The calling cell will have an 'origin query index' created for -/// fetching the referenced remote IDs; the destination cell will have a -/// 'destination query index' created for querying the referenced records in full. +/// Outer method for creating indexes. /// -/// :IMPORTANT: in handling errors from this method, one should take care to test -/// ALL `OtherCellResult`s in the returned Vector if the success of updating both -/// sides of the index is important. By default, an index failure on either side -/// may occur without the outer result failing. +/// :TODO: documentation /// -/// :TODO: consider a robust method for handling updates of data in remote DNAs & -/// foreign zomes where the transactionality guarantees of single-zome execution -/// are foregone. +/// @see create_index! /// -pub fn create_remote_index( +pub fn manage_index( origin_zome_name_from_config: F, origin_fn_name: &S, source: &A, + dest_zome_name_from_config: G, + dest_fn_name: &S, remote_permission_id: &S, dest_addresses: &[B], -) -> RecordAPIResult>> + remove_addresses: &[B], +) -> RecordAPIResult>> where S: AsRef, A: DnaAddressable, B: DnaAddressable, C: std::fmt::Debug, SerializedBytes: TryInto, - F: Clone + FnOnce(C) -> Option, + F: Copy + Fn(C) -> Option, + G: Copy + Fn(C) -> Option, { + // altering an index with no targets is a no-op + if dest_addresses.len() == 0 && remove_addresses.len() == 0 { + return Ok(vec![]) + } + let sources = vec![source.clone()]; + let targets = prefilter_target_dnas(dest_addresses, remove_addresses)?; + + // Manage local index creation / removal + + let empty = vec![]; - // Build local index first (for reading linked record IDs from the `source`) - // :TODO: optimise to call once per target DNA - let created: Vec> = dest_addresses.iter().map(|dest| { + let local_forward_add = ( + if targets.local_dests.0.len() > 0 { targets.local_dests.0.iter() } + else { empty.iter() } + ).map(|dest| { request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, + origin_zome_name_from_config, origin_fn_name, dest, &sources, &vec![], ) - }).collect(); - - // request building of remote index in foreign cell - let resp = request_sync_remote_index( - remote_permission_id, - source, dest_addresses, &vec![], - ); - - let mut indexes_created = merge_indexing_results(&created, |r| { r.indexes_created.to_owned() }); - - match resp { - Ok(mut remote_results) => { - indexes_created.append(&mut remote_results.indexes_created) - }, - Err(e) => { - indexes_created.push(Err(e.into())) - }, - }; - - Ok(indexes_created) -} - -/// Creates a bidirectional link between a local entry and another from a foreign zome in the same DNA, -/// and returns a vector of the `HeaderHash`es of the (respectively) forward & reciprocal links created. -/// -/// :IMPORTANT: unlike remote indexes, it can be considered that DNA-local indexes are executing -/// "in good faith", since their inclusion in a DNA means they become part of the (hashed & shared) -/// computation space. The expectation is placed onto the DNA configurator to ensure that all -/// identifiers align and options correctly validate. As such, we treat local index failures more -/// severely than remote ones and WILL return a toplevel `DataIntegrityError` if *either* of the -/// paired index zomes fails to update; possibly causing a rollback in any other client logic -/// already attempted. -/// -/// :TODO: as above for remote indexes, so with local ones. -/// -pub fn create_local_index( - origin_zome_name_from_config: F, - origin_fn_name: &S, - source: &A, - dest_zome_name_from_config: G, - dest_fn_name: &S, - dest: &B, -) -> RecordAPIResult>> - where S: AsRef, - C: std::fmt::Debug, - SerializedBytes: TryInto, - F: FnOnce(C) -> Option, - G: FnOnce(C) -> Option, - A: DnaAddressable, - B: DnaAddressable, -{ - let dests = vec![(*dest).clone()]; - let sources = vec![source.clone()]; - - let or = request_sync_local_index(origin_zome_name_from_config, origin_fn_name, dest, &sources, &vec![]); - let dr = request_sync_local_index(dest_zome_name_from_config, dest_fn_name, source, &dests, &vec![]); - - let indexes_created = vec! [ - match dr { - Ok(drr) => drr.indexes_created - .first().ok_or(CrossCellError::Internal("cross-zome index creation failed".to_string()))? - .clone() - .map_err(|e| { DataIntegrityError::RemoteRequestError(e.to_string()) }), - Err(e) => Err(e.into()), - }, - match or { - Ok(orr) => orr.indexes_created - .first().ok_or(CrossCellError::Internal("cross-zome index creation failed".to_string()))? - .clone() - .map_err(|e| { DataIntegrityError::RemoteRequestError(e.to_string()) }), - Err(e) => Err(e.into()), - }, - ]; + }); - Ok(indexes_created) + let local_forward_remove = ( + if targets.local_dests.1.len() > 0 { targets.local_dests.1.iter() } + else { empty.iter() } + ).map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &vec![], &sources, + ) + }); + + let mut local_updates = vec![]; + let local_reciprocal_update = + if targets.local_dests.0.len() > 0 || targets.local_dests.1.len() > 0 { + let mut others = vec![request_sync_local_index( + dest_zome_name_from_config, dest_fn_name, + source, targets.local_dests.0.as_slice(), targets.local_dests.1.as_slice(), + )]; + local_updates.append(&mut others); + local_updates.to_owned() + } else { vec![] }; + + // Manage remote index creation / removal & append to resultset + + // :TODO: improve error handling by asserting that successful RPC + // calls fired for local targets + remote targets add up to equal + // the number of input `dest_addresses` & `remove_addresses` + + Ok(std::iter::empty() + .chain(local_forward_add) + .chain(local_forward_remove) + .chain(local_reciprocal_update) + .chain(targets.remote_dests.iter() + .flat_map(|(_dna, (add_dests, remove_dests))| { + let remote_forward_add = add_dests.iter() + .map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &sources, &vec![], + ) + }); + let remote_forward_remove = remove_dests.iter() + .map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &vec![], &sources, + ) + }); + let remote_reciprocal_update = std::iter::once(request_sync_remote_index( + remote_permission_id, + source, add_dests, remove_dests, + )); + + std::iter::empty() + .chain(remote_forward_add) + .chain(remote_forward_remove) + .chain(remote_reciprocal_update) + })) + .collect()) } //--------------------------------[ READ ]-------------------------------------- @@ -436,139 +353,6 @@ pub fn read_local_index<'a, O, A, S, F, C>( //-------------------------------[ UPDATE ]------------------------------------- -/// Toplevel method for triggering a link update flow between two records in -/// different DNAs. Indexes on both sides of the network boundary will be updated. -/// -/// :NOTE: All remote index deletion logic should use the update/sync API, as IDs -/// must be explicitly provided in order to guard against indexes from unrelated -/// cells being wiped by this cell. -/// -pub fn update_remote_index( - origin_zome_name_from_config: F, - origin_fn_name: &S, - source: &A, - remote_permission_id: &S, - dest_addresses: &[B], - remove_addresses: &[B], -) -> RecordAPIResult - where S: AsRef, - A: DnaAddressable, - B: DnaAddressable, - C: std::fmt::Debug, - SerializedBytes: TryInto, - F: Clone + FnOnce(C) -> Option, -{ - // handle local 'origin' index first - let sources = vec![source.clone()]; - - // :TODO: optimise to call once per target DNA - let created: Vec> = dest_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &sources, &vec![], - ) - }).collect(); - - let deleted: Vec> = remove_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &vec![], &sources, - ) - }).collect(); - - // forward request to remote cell to update destination indexes - let resp = request_sync_remote_index( - remote_permission_id, - source, dest_addresses, remove_addresses, - ); - - let mut indexes_created = merge_indexing_results(&created, |r| { r.indexes_created.to_owned() }); - let mut indexes_removed = merge_indexing_results(&deleted, |r| { r.indexes_removed.to_owned() }); - match resp { - Ok(mut remote_results) => { - indexes_created.append(&mut remote_results.indexes_created); - indexes_removed.append(&mut remote_results.indexes_removed); - }, - Err(e) => { - indexes_created.push(Err(e)); - }, - }; - - Ok(RemoteEntryLinkResponse { indexes_created, indexes_removed }) -} - -/// Toplevel API for triggering an update flow between two indexes, where one is -/// in the local DNA and another is managed in a remote Cell. -/// -pub fn update_local_index( - origin_zome_name_from_config: F, - origin_fn_name: &S, - source: &A, - dest_zome_name_from_config: G, - dest_fn_name: &S, - dest_addresses: &[B], - remove_addresses: &[B], -) -> RecordAPIResult - where S: AsRef, - C: std::fmt::Debug, - SerializedBytes: TryInto, - F: Clone + FnOnce(C) -> Option, - G: FnOnce(C) -> Option, - A: DnaAddressable, - B: DnaAddressable, -{ - let sources = vec![source.clone()]; - - // :TODO: optimise to call once per target DNA - let created: Vec> = dest_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &sources, &vec![], - ) - }).collect(); - - let deleted: Vec> = remove_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &vec![], &sources, - ) - }).collect(); - - let resp = request_sync_local_index( - dest_zome_name_from_config, dest_fn_name, - source, dest_addresses, remove_addresses, - ); - - let mut indexes_created = merge_indexing_results(&created, |r| { r.indexes_created.to_owned() }); - let mut indexes_removed = merge_indexing_results(&deleted, |r| { r.indexes_removed.to_owned() }); - match resp { - Ok(mut remote_results) => { - indexes_created.append(&mut remote_results.indexes_created); - indexes_removed.append(&mut remote_results.indexes_removed); - }, - Err(e) => { - indexes_created.push(Err(e)); - }, - }; - - Ok(RemoteEntryLinkResponse { indexes_created, indexes_removed }) -} - -fn merge_indexing_results( - foreign_zome_results: &[OtherCellResult], - response_accessor: impl Fn(&RemoteEntryLinkResponse) -> Vec>, -) -> Vec> -{ - foreign_zome_results.iter() - .flat_map(|r| { - match r { - Ok(resp) => response_accessor(resp), - Err(e) => vec![Err(e.to_owned())], - } - }) - .collect() -} - /// Ask another bridged cell to build a 'destination query index' to match the /// 'origin' one that we have just created locally. /// When calling zomes within the same DNA, use `None` as `to_cell`. @@ -627,3 +411,63 @@ fn request_sync_local_index( ) )?) } + + +/// internal struct for pre-arranging lists of IDs for transmission to remote +/// DNA-relative API endpoints +#[derive(Debug)] +struct TargetsByDna + where B: DnaAddressable, +{ + pub local_dests: (Vec, Vec), + pub remote_dests: HashMap, Vec)>, +} + +// pre-arrange input IDs for transmission to target DNAs +fn prefilter_target_dnas<'a, B>( + dest_addresses: &'a [B], + remove_addresses: &'a [B], +) -> RecordAPIResult> + where B: DnaAddressable, +{ + let local_dna = dna_info()?.hash; + + let results = dest_addresses.iter() + .fold(TargetsByDna { + local_dests: (vec![], vec![]), + remote_dests: HashMap::new(), + }, |mut targets: TargetsByDna, val| { + let target_dna: &DnaHash = val.as_ref(); + if local_dna == target_dna.to_owned() { + targets.local_dests.0.push(val.to_owned()); + } else { + match targets.remote_dests.insert(target_dna.to_owned(), (vec![val.to_owned()], vec![])) { + Some(mut prev_val) => { + let vals = targets.remote_dests.get_mut(target_dna).unwrap(); + vals.0.append(&mut prev_val.0); + }, + None => (), + } + } + targets + }); + + Ok(remove_addresses.iter() + .fold(results, |mut targets: TargetsByDna, val| { + let target_dna: &DnaHash = val.as_ref(); + if local_dna == target_dna.to_owned() { + targets.local_dests.1.push(val.to_owned()); + } else { + match targets.remote_dests.insert(target_dna.to_owned(), (vec![], vec![val.to_owned()])) { + Some(mut prev_val) => { + let vals = targets.remote_dests.get_mut(target_dna).unwrap(); + vals.0.append(&mut prev_val.0); + vals.1.append(&mut prev_val.1); + }, + None => (), + } + } + targets + }) + ) +} diff --git a/lib/hdk_semantic_indexes/rpc/Cargo.toml b/lib/hdk_semantic_indexes/rpc/Cargo.toml index 949cac6a6..a376c7810 100644 --- a/lib/hdk_semantic_indexes/rpc/Cargo.toml +++ b/lib/hdk_semantic_indexes/rpc/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" serde = "1" holochain_serialized_bytes = "0.0.51" hdk_rpc_errors = { path = "../../hdk_rpc_errors" } -hdk_type_serialization_macros = { path = "../../hdk_type_serialization_macros" } +hdk_uuid_types = { path = "../../hdk_uuid_types" } [lib] crate-type = ["lib"] diff --git a/lib/hdk_semantic_indexes/rpc/src/lib.rs b/lib/hdk_semantic_indexes/rpc/src/lib.rs index 9aa7a54dd..332243a45 100644 --- a/lib/hdk_semantic_indexes/rpc/src/lib.rs +++ b/lib/hdk_semantic_indexes/rpc/src/lib.rs @@ -6,8 +6,8 @@ * @since 2021-10-01 */ use holochain_serialized_bytes::prelude::*; -use hdk_type_serialization_macros::{ - DnaAddressable, EntryHash, HeaderHash, RevisionHash, +use hdk_uuid_types::{ + DnaAddressable, EntryHash, HeaderHash, }; pub use hdk_rpc_errors::{OtherCellResult, CrossCellError}; @@ -17,7 +17,7 @@ pub use hdk_rpc_errors::{OtherCellResult, CrossCellError}; /// Query / modify entries by revision / `HeaderHash` #[derive(Debug, Serialize, Deserialize)] pub struct ByHeader { - pub address: RevisionHash, + pub address: HeaderHash, } /// Shared parameter struct that all related record storage endpoints must implement diff --git a/lib/hdk_semantic_indexes/zome/Cargo.toml b/lib/hdk_semantic_indexes/zome/Cargo.toml index 16d7bfdc0..36e5dac94 100644 --- a/lib/hdk_semantic_indexes/zome/Cargo.toml +++ b/lib/hdk_semantic_indexes/zome/Cargo.toml @@ -11,6 +11,7 @@ hdk = "0.0" hdk_semantic_indexes_zome_rpc = { path = "../rpc" } hdk_rpc_errors = { path = "../../hdk_rpc_errors" } hdk_records = { path = "../../hdk_records" } +hdk_relay_pagination = { path = "../../hdk_relay_pagination" } serde_maybe_undefined = { path = "../../serde_maybe_undefined" } hc_zome_dna_auth_resolver_lib = {git = "https://github.com/holochain-open-dev/dna-auth-resolver", rev = "b1adec5", package = "hc_zome_dna_auth_resolver_lib"} diff --git a/lib/hdk_semantic_indexes/zome/src/lib.rs b/lib/hdk_semantic_indexes/zome/src/lib.rs index 880ce3aed..128fb18bc 100644 --- a/lib/hdk_semantic_indexes/zome/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome/src/lib.rs @@ -11,13 +11,14 @@ use hdk_records::{ identities::{ calculate_identity_address, create_entry_identity, - read_entry_identity_full, + read_entry_identity, }, - links::{get_linked_addresses, get_linked_headers}, + links::{get_linked_addresses, walk_links_matching_entry}, rpc::call_local_zome_method, }; pub use hdk_records::{ RecordAPIResult, DataIntegrityError }; pub use hdk_semantic_indexes_zome_rpc::*; +pub use hdk_relay_pagination::PageInfo; //--------------- ZOME CONFIGURATION ATTRIBUTES ---------------- @@ -36,7 +37,7 @@ pub struct IndexingZomeConfig { /// Use this method to query associated IDs for a query edge, without retrieving /// the records themselves. /// -pub fn read_index<'a, O, A, S, I>( +pub fn read_index<'a, O, A, S, I, E>( base_entry_type: &I, base_address: &A, link_tag: &S, @@ -45,12 +46,15 @@ pub fn read_index<'a, O, A, S, I>( I: AsRef, A: DnaAddressable, O: DnaAddressable, + Entry: TryFrom + TryFrom, + SerializedBytes: TryInto, + WasmError: From, { let index_address = calculate_identity_address(base_entry_type, base_address)?; let refd_index_addresses = get_linked_addresses(&index_address, LinkTag::new(link_tag.as_ref()))?; let (existing_link_results, read_errors): (Vec>, Vec>) = refd_index_addresses.iter() - .map(read_entry_identity_full) + .map(read_entry_identity) .partition(Result::is_ok); // :TODO: this might have some issues as it presumes integrity of the DHT; needs investigating @@ -67,7 +71,7 @@ pub fn read_index<'a, O, A, S, I>( /// /// Use this method to query associated records for a query edge in full. /// -pub fn query_index<'a, T, O, C, F, A, S, I, J>( +pub fn query_index<'a, T, O, C, F, A, S, I, J, E>( base_entry_type: &I, base_address: &A, link_tag: &S, @@ -81,8 +85,10 @@ pub fn query_index<'a, T, O, C, F, A, S, I, J>( O: DnaAddressable, T: serde::de::DeserializeOwned + std::fmt::Debug, C: std::fmt::Debug, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, F: Fn(C) -> Option, + Entry: TryFrom, + WasmError: From, { let index_address = calculate_identity_address(base_entry_type, base_address)?; let addrs_result = get_linked_addresses(&index_address, LinkTag::new(link_tag.as_ref()))?; @@ -108,7 +114,7 @@ fn retrieve_foreign_records<'a, T, B, C, F, S>( T: serde::de::DeserializeOwned + std::fmt::Debug, B: DnaAddressable, C: std::fmt::Debug, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, F: Fn(C) -> Option, { let read_single_record = retrieve_foreign_record::(zome_name_from_config, &method_name); @@ -126,11 +132,11 @@ fn retrieve_foreign_record<'a, T, B, C, F, S>( T: serde::de::DeserializeOwned + std::fmt::Debug, B: DnaAddressable, C: std::fmt::Debug, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, F: Fn(C) -> Option, { move |addr| { - let address: B = read_entry_identity_full(addr)?; + let address: B = read_entry_identity(addr)?; let entry_res: T = call_local_zome_method(zome_name_from_config.to_owned(), method_name, ByAddress { address })?; Ok(entry_res) } @@ -146,7 +152,7 @@ fn retrieve_foreign_record<'a, T, B, C, F, S>( /// The returned `RemoteEntryLinkResponse` provides an appropriate format for responding to indexing /// requests that originate from calls to `create/update/delete_remote_index` in a foreign DNA. /// -pub fn sync_index( +pub fn sync_index( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -159,6 +165,9 @@ pub fn sync_index( I: AsRef, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + CreateInput: TryFrom + TryFrom, + WasmError: From, { // create any new indexes let indexes_created = create_remote_index_destination( @@ -187,7 +196,7 @@ pub fn sync_index( /// This basically consists of an identity `Path` for the remote content and bidirectional /// links between it and its `dest_addresses`. /// -fn create_remote_index_destination( +fn create_remote_index_destination( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -199,6 +208,9 @@ fn create_remote_index_destination( I: AsRef, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + CreateInput: TryFrom + TryFrom, + WasmError: From, { // create a base entry pointer for the referenced origin record let _identity_hash = create_entry_identity(source_entry_type, source)?; @@ -210,7 +222,7 @@ fn create_remote_index_destination( ) } -fn create_dest_identities_and_indexes<'a, A, B, S, I>( +fn create_dest_identities_and_indexes<'a, A, B, S, I, E>( source_entry_type: &'a I, source: &'a A, dest_entry_type: &'a I, @@ -221,6 +233,9 @@ fn create_dest_identities_and_indexes<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: 'a + DnaAddressable, + Entry: TryFrom + TryFrom, + CreateInput: TryFrom + TryFrom, + WasmError: From, { let base_method = create_dest_indexes(source_entry_type, source, dest_entry_type, link_tag, link_tag_reciprocal); @@ -235,7 +250,7 @@ fn create_dest_identities_and_indexes<'a, A, B, S, I>( } /// Helper for index update to add multiple destination links from some source. -fn create_dest_indexes<'a, A, B, S, I>( +fn create_dest_indexes<'a, A, B, S, I, E>( source_entry_type: &'a I, source: &'a A, dest_entry_type: &'a I, @@ -246,6 +261,8 @@ fn create_dest_indexes<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { Box::new(move |dest| { match create_index(source_entry_type, source, dest_entry_type, dest, link_tag, link_tag_reciprocal) { @@ -260,7 +277,7 @@ fn create_dest_indexes<'a, A, B, S, I>( /// Creates a bidirectional link between two entry addresses, and returns a vector /// of the `HeaderHash`es of the (respectively) forward & reciprocal links created. -fn create_index( +fn create_index( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -272,6 +289,8 @@ fn create_index( S: AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { let source_hash = calculate_identity_address(source_entry_type, source)?; let dest_hash = calculate_identity_address(dest_entry_type, dest)?; @@ -292,7 +311,7 @@ fn create_index( /// affected in the removal, and is simply left dangling in the /// DHT space as an indicator of previously linked items. /// -fn remove_remote_index_links( +fn remove_remote_index_links( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -304,6 +323,8 @@ fn remove_remote_index_links( I: AsRef, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { Ok(remove_addresses.iter() .flat_map(delete_dest_indexes( @@ -316,7 +337,7 @@ fn remove_remote_index_links( } /// Helper for index update to remove multiple destination links from some source. -fn delete_dest_indexes<'a, A, B, S, I>( +fn delete_dest_indexes<'a, A, B, S, I, E>( source_entry_type: &'a I, source: &'a A, dest_entry_type: &'a I, @@ -327,6 +348,8 @@ fn delete_dest_indexes<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { Box::new(move |dest_addr| { match delete_index(source_entry_type, source, dest_entry_type, dest_addr, link_tag, link_tag_reciprocal) { @@ -342,9 +365,7 @@ fn delete_dest_indexes<'a, A, B, S, I>( /// Deletes a bidirectional link between two entry addresses. Any active links between /// the given addresses using the given tags will be deleted. /// -/// :TODO: this should probably only delete the referenced IDs, at the moment it clears anything matching tags. -/// -fn delete_index<'a, A, B, S, I>( +fn delete_index<'a, A, B, S, I, E>( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -356,24 +377,36 @@ fn delete_index<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { let tag_source = LinkTag::new(link_tag.as_ref()); let tag_dest = LinkTag::new(link_tag_reciprocal.as_ref()); let address_source = calculate_identity_address(source_entry_type, source)?; let address_dest = calculate_identity_address(dest_entry_type, dest)?; - let mut links = get_linked_headers(&address_source, tag_source)?; - links.append(& mut get_linked_headers(&address_dest, tag_dest)?); + let mut links = walk_links_matching_entry( + &address_source, + &address_dest, + tag_source, + delete_link_target_header, + )?; + links.append(& mut walk_links_matching_entry( + &address_dest, + &address_source, + tag_dest, + delete_link_target_header, + )?); - Ok(links - .iter().cloned() - .map(|l| { Ok(delete_link(l)?) }) - .collect() - ) + Ok(links) } //--------------------------[ UTILITIES / INTERNALS ]--------------------- +fn delete_link_target_header(l: &Link) -> RecordAPIResult { + Ok(delete_link(l.create_link_hash.to_owned())?) +} + /// Returns the first error encountered (if any). Best used with the `?` operator. fn throw_any_error(mut errors: Vec>) -> RecordAPIResult<()> { if errors.len() == 0 { diff --git a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs index ad9ce2e6a..fce2c679d 100644 --- a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs @@ -46,6 +46,7 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { // build toplevel variables for generated code let record_type = &input.ident; let record_type_str_attribute = record_type.to_string().to_case(Case::Snake); + let record_type_str_ident = format_ident!("{}", record_type_str_attribute); let record_type_index_attribute = format_ident!("{}_index", record_type_str_attribute); let record_read_api_method_name = format_ident!("get_{}", record_type_str_attribute); @@ -147,7 +148,7 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { quote! { match ¶ms.#query_field_ident { Some(#query_field_ident) => { - entries_result = query_index::( + entries_result = query_index::( &stringify!(#related_record_type_str_attribute), #query_field_ident, &stringify!(#reciprocal_index_name), @@ -164,9 +165,6 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { use hdk::prelude::*; use hdk_semantic_indexes_zome_lib::*; - // unrelated toplevel zome boilerplate - entry_defs![PathEntry::entry_def()]; - // :TODO: obviate this with zome-specific configs #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct DnaConfigSlice { @@ -197,17 +195,23 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { #index_mutators )* - // define query results structure as a flat array which separates errors into own list + // query results structure mimicing Relay's pagination format #[derive(Debug, Serialize, Deserialize)] struct QueryResults { + pub page_info: PageInfo, #[serde(default)] - pub results: Vec, - // :TODO: pagination metadata + pub edges: Vec, #[serde(default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub errors: Vec, } + #[derive(Debug, Serialize, Deserialize)] + struct Edge { + node: Response, + cursor: String, + } + // declare public query method with injected handler logic #[hdk_extern] fn #exposed_query_api_method_name(SearchInputs { params }: SearchInputs) -> ExternResult @@ -221,11 +225,32 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { let entries = entries_result?; + let formatted_edges = entries.iter() + .cloned() + .filter_map(Result::ok) + .map(|node| { + let record_cursor: Vec = node.#record_type_str_ident.id.to_owned().into(); + Edge { + node: node.#record_type_str_ident, + // :TODO: use HoloHashb64 once API stabilises + cursor: String::from_utf8(record_cursor).unwrap_or("".to_string()) + } + }); + + let mut edge_cursors = formatted_edges.clone().map(|e| { e.cursor }); + let first_cursor = edge_cursors.next().unwrap_or("0".to_string()); + Ok(QueryResults { - results: entries.iter() - .cloned() - .filter_map(Result::ok) - .collect(), + edges: formatted_edges.collect(), + page_info: PageInfo { + end_cursor: edge_cursors.last().unwrap_or(first_cursor.clone()), + start_cursor: first_cursor, + // :TODO: + has_next_page: true, + has_previous_page: true, + page_limit: None, + total_count: None, + }, errors: entries.iter() .cloned() .filter_map(Result::err) diff --git a/lib/hdk_type_serialization_macros/Cargo.toml b/lib/hdk_uuid_types/Cargo.toml similarity index 68% rename from lib/hdk_type_serialization_macros/Cargo.toml rename to lib/hdk_uuid_types/Cargo.toml index 5f4a73f49..67f9ab0df 100644 --- a/lib/hdk_type_serialization_macros/Cargo.toml +++ b/lib/hdk_uuid_types/Cargo.toml @@ -1,12 +1,12 @@ [package] -name = "hdk_type_serialization_macros" +name = "hdk_uuid_types" version = "0.1.0" authors = ["pospi "] edition = "2018" [dependencies] serde = "1" -holochain_serialized_bytes = "0.0" +hdk = "0.0" holo_hash = "0.0" [lib] diff --git a/lib/hdk_type_serialization_macros/README.md b/lib/hdk_uuid_types/README.md similarity index 98% rename from lib/hdk_type_serialization_macros/README.md rename to lib/hdk_uuid_types/README.md index 1f8f42cea..17e32f206 100644 --- a/lib/hdk_type_serialization_macros/README.md +++ b/lib/hdk_uuid_types/README.md @@ -15,7 +15,7 @@ This module provides macros for wrapping "raw" (DNA-local) `EntryHash`, `HeaderH ## Usage ```rust -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; // "Newtype struct" pattern, wraps values in different types to enforce compile-time distinctness. // To access the raw wrapped value, use `.as_ref()`. diff --git a/lib/hdk_type_serialization_macros/src/lib.rs b/lib/hdk_uuid_types/src/lib.rs similarity index 60% rename from lib/hdk_type_serialization_macros/src/lib.rs rename to lib/hdk_uuid_types/src/lib.rs index d7092364b..da5c78c1a 100644 --- a/lib/hdk_type_serialization_macros/src/lib.rs +++ b/lib/hdk_uuid_types/src/lib.rs @@ -1,14 +1,37 @@ /** - * Type aliases used to ensure explicit awareness of applicable record types in VF structs + * The primary goal of this module is to provide structs which ensure universal uniqueness + * of record identifiers in Holochain apps. This is achieved by concatenating the `DnaHash` + * of the host network space with an identifier which is locally-unique within that membrane. * - * To convert wrapped values to an `EntryHash`, use `aliased_val.as_ref()`. - * To convert a plain `EntryHash` to its wrapped form, use `raw_address.into()`. + * Such information is sufficient to build a universally-unique Holochain URI, and allows + * apps to mix references to disparate network spaces in the same data structures. + * + * To convert wrapped values to an `EntryHash` or `DnaHash`, use `aliased_val.as_ref()` in assignment + * to the appropriate type. + * + * A secondary goal is to provide an ability to create distinct types for different identifiers, + * such that identifiers cannot be accidentally mismatched to the wrong record types. + * For example, given these two definitions- + * + * addressable_identifier!(CommitmentAddress => EntryHash); + * addressable_identifier!(IntentAddress => EntryHash); + * + * 'CommitmentAddress' and 'IntentAddress' cannot be confused even though they contain data + * of the same format, and the compiler will complain if a 'CommitmentAddress' is incorrectly + * assigned to a struct field or method parameter expecting an 'IntentAddress'. This helps to + * prevent developer error when your application has a large number of different entry types. + * + * This same functionality is also provided for simple values with the `simple_alias` macro. */ use std::fmt::Debug; -pub use holochain_serialized_bytes::prelude::*; -pub use holo_hash::{DnaHash, EntryHash, HeaderHash, AnyDhtHash, HOLO_HASH_UNTYPED_LEN}; +pub use hdk::prelude::*; +pub use hdk; +pub use holo_hash::*; +/// Generate a simple newtype wrapper around some raw data, to enforce distinctness of +/// different data items with the same underlying format. +/// #[macro_export] macro_rules! simple_alias { ($id:ident => $base:ty) => { @@ -35,7 +58,7 @@ macro_rules! simple_alias { } } -/// Supertrait to bind all dependent traits that implement identifier behaviours. +/// Supertrait to bind all dependent traits that implement unique identifier behaviours. /// pub trait DnaAddressable where Self: Clone + Eq + std::hash::Hash @@ -47,13 +70,29 @@ pub trait DnaAddressable fn new(dna: DnaHash, identifier: B) -> Self; } +/// Generate a universally-unique identifier for some DNA-local identifier +/// (an `EntryHash` or `AgentPubKey`). +/// +/// This also defines an `EntryDef` of the same name so that the identifier +/// can be directly stored to the DHT, which is required for building foreign-key +/// indexes which reference remote data. +/// #[macro_export] macro_rules! addressable_identifier { ($r:ident => $base:ty) => { - // externally facing type, with DnaHash of cell for context + // externally facing type, with DnaHash of cell for universally-unique context #[derive(Serialize, Deserialize, SerializedBytes, Debug, Clone, PartialEq, Eq, Hash)] pub struct $r(pub DnaHash, pub $base); + // define as an EntryDef so identifiers can be stored directly to the DHT as indexing anchors + entry_def!($r EntryDef { + id: stringify!($r).into(), + crdt_type: CrdtType, + required_validations: RequiredValidations::default(), + visibility: EntryVisibility::Public, + required_validation_type: RequiredValidationType::default(), + }); + // constructor impl $crate::DnaAddressable<$base> for $r { fn new(dna: DnaHash, identifier: $base) -> Self { @@ -84,8 +123,6 @@ macro_rules! addressable_identifier { } } -addressable_identifier!(RevisionHash => HeaderHash); - /// Supertrait for things which can be identified by some string label in a particular DNA /// pub trait DnaIdentifiable @@ -95,6 +132,10 @@ pub trait DnaIdentifiable fn new(dna: DnaHash, identifier: B) -> Self; } +/// Generate a universally-unique identifier for some DNA-local string identifier. +/// The implementor must ensure that this string ID remains unique in the DNA via +/// whatever application logic is relevant to the use-case. +/// #[macro_export] macro_rules! dna_scoped_string { ($r:ident) => { @@ -130,6 +171,9 @@ macro_rules! dna_scoped_string { /// /// Use the `addressable_identifier!` macro to auto-implement type-specific identifiers compatible with this method of encoding. /// +/// :TODO: remove this method, it's currently used in conversion of IDs to cursors in response formatting and +/// should probably be replaced with the HoloHashB64 variants or similar functionality. +/// pub fn extern_id_to_bytes(id: &A) -> Vec where A: AsRef + AsRef, B: Clone, @@ -143,23 +187,6 @@ pub fn extern_id_to_bytes(id: &A) -> Vec [AnyDhtHash::from((*entry_address).clone()).get_raw_36(), dna_hash.get_raw_36()].concat() } -/// Convert raw bytes encoded into a `Path` index into its full identity pair -/// -/// @see hdk_type_serialization_macros::extern_id_to_bytes -/// -pub fn bytes_to_extern_id(key_bytes: &[u8]) -> Result - where A: DnaAddressable, -{ - if key_bytes.len() != HOLO_HASH_UNTYPED_LEN * 2 { return Err(SerializedBytesError::Deserialize("Invalid input length for bytes_to_extern_id!".to_string())) } - - // pull DnaHash from last 36 bytes; first 36 are for EntryHash/HeaderHash - // @see holo_hash::hash - Ok(A::new( - DnaHash::from_raw_36(key_bytes[HOLO_HASH_UNTYPED_LEN..].to_vec()), - EntryHash::from_raw_36(key_bytes[0..HOLO_HASH_UNTYPED_LEN].to_vec()), - )) -} - #[cfg(test)] mod tests { use super::*; diff --git a/lib/vf_attributes_hdk/Cargo.toml b/lib/vf_attributes_hdk/Cargo.toml index b88414b63..62672b7cc 100644 --- a/lib/vf_attributes_hdk/Cargo.toml +++ b/lib/vf_attributes_hdk/Cargo.toml @@ -11,7 +11,7 @@ holochain_serialized_bytes = "0.0" holochain_zome_types = { version = "0.0", default-features = false } holo_hash = "0.0" -hdk_type_serialization_macros = { path = "../hdk_type_serialization_macros" } +hdk_uuid_types = { path = "../hdk_uuid_types" } hdk_semantic_indexes_zome_rpc = { path = "../hdk_semantic_indexes/rpc" } [lib] diff --git a/lib/vf_attributes_hdk/src/lib.rs b/lib/vf_attributes_hdk/src/lib.rs index fb677934d..1a3dc1f24 100644 --- a/lib/vf_attributes_hdk/src/lib.rs +++ b/lib/vf_attributes_hdk/src/lib.rs @@ -1,10 +1,10 @@ -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; // re-exports for convenience pub use chrono::{ FixedOffset, Utc, DateTime }; pub use holo_hash::{ AgentPubKey, EntryHash, HeaderHash }; pub use holochain_zome_types::timestamp::Timestamp; -pub use hdk_type_serialization_macros::{RevisionHash, DnaAddressable}; +pub use hdk_uuid_types::{DnaAddressable}; pub use hdk_semantic_indexes_zome_rpc::{ByHeader, ByAddress}; simple_alias!(ActionId => String); diff --git a/lib/vf_measurement/src/lib.rs b/lib/vf_measurement/src/lib.rs index 8d966f5fd..10d76f158 100644 --- a/lib/vf_measurement/src/lib.rs +++ b/lib/vf_measurement/src/lib.rs @@ -9,9 +9,9 @@ use vf_attributes_hdk::UnitId; #[derive(Debug, Clone)] pub struct Unit { - id: UnitId, - name: Option, - symbol: Option, + pub id: UnitId, + pub name: Option, + pub symbol: Option, } #[derive(Clone, PartialEq, Serialize, Deserialize, SerializedBytes, Debug)] diff --git a/modules/graphql-client/README.md b/modules/graphql-client/README.md index e42aab67f..30dda73e7 100644 --- a/modules/graphql-client/README.md +++ b/modules/graphql-client/README.md @@ -23,12 +23,6 @@ In a [Svelte](https://svelte.dev/) application, simple app initialisation logic import App from './my-happ-ui' - // define connection params - const conductorUri = process.env.REACT_APP_HC_CONN_URL || 'ws://localhost:4001' - const dnaConfig = { - // :TODO: determine appropriate `CellId`s by interrogating admin websocket - } - // init and manage GraphQL client connection let client = null let loading = true @@ -44,9 +38,17 @@ In a [Svelte](https://svelte.dev/) application, simple app initialisation logic error = null } + // Omit these options for connecting via the Holochain Launcher. + // During development, you can provide them as follows: initConnection({ - conductorUri, - dnaConfig, + // A websocket URI to connect to the Holochain Conductor on: + // conductorUri, + + // Mapping of hREA module IDs to Holochain CellIds. If ommitted, + // The client will attempt to sniff them by inspecting the names + // of active app cells. Any cell with a known 'hrea_*_X' format + // will be matched. + // dnaConfig, }) // workaround to set the context outside of init action diff --git a/modules/graphql-client/finish-build.js b/modules/graphql-client/finish-build.js index cbaebdfd5..01a88e8b8 100644 --- a/modules/graphql-client/finish-build.js +++ b/modules/graphql-client/finish-build.js @@ -11,17 +11,12 @@ const fs = require('fs') const path = require('path') -fs.copyFileSync(path.resolve(__dirname, '../../LICENSE'), path.resolve(__dirname, './build/LICENSE')) fs.copyFileSync(path.resolve(__dirname, './README.md'), path.resolve(__dirname, './build/README.md')) -fs.copyFileSync(path.resolve(__dirname, './package.json'), path.resolve(__dirname, './build/package.json')) -const packageJson = require(path.resolve(__dirname, './build/package.json')) +const packageJson = require(path.resolve(__dirname, './package.json')) +const dependentPackageJson = require(path.resolve(__dirname, '../vf-graphql-holochain/package.json')) -delete packageJson['private'] -delete packageJson['main'] -delete packageJson['types'] delete packageJson.scripts['prepare'] -packageJson.dependencies['@valueflows/vf-graphql-holochain'] = packageJson.dependencies['@valueflows/vf-graphql-holochain'].replace('../', '../../') +packageJson.dependencies['@valueflows/vf-graphql-holochain'] = dependentPackageJson.version fs.writeFileSync(path.resolve(__dirname, './build/package.json'), JSON.stringify(packageJson, undefined, " ")) - diff --git a/modules/graphql-client/index.ts b/modules/graphql-client/index.ts index 3ca21890d..3e151405d 100644 --- a/modules/graphql-client/index.ts +++ b/modules/graphql-client/index.ts @@ -1,8 +1,6 @@ /** * GraphQL client interface for Holochain connection * - * :TODO: sniff active DNA configuration from conductor - * * @package Holo-REA GraphQL client * @since 2020-07-14 */ @@ -10,9 +8,16 @@ import { InMemoryCache, ApolloClient } from '@apollo/client' import { SchemaLink } from '@apollo/link-schema' -import bindSchema from '@valueflows/vf-graphql-holochain' +import bindSchema, { autoConnect, APIOptions, DNAIdMappings } from '@valueflows/vf-graphql-holochain' + +// Same as OpenConnectionOptions but for external client where dnaConfig may be autodetected +interface AutoConnectionOptions { + dnaConfig?: DNAIdMappings, +} + +export type ClientOptions = APIOptions & AutoConnectionOptions -async function initGraphQLClient(options) { +export async function initGraphQLClient(options: APIOptions) { const schema = await bindSchema(options/* modules, DNA id bindings */) return new ApolloClient({ @@ -21,4 +26,14 @@ async function initGraphQLClient(options) { }); } -export default initGraphQLClient; +async function connect(options: ClientOptions) { + // autodetect `CellId`s if no explicit `dnaConfig` is provided + if (!options.dnaConfig) { + let { dnaConfig } = await autoConnect(options.conductorUri) + options.dnaConfig = dnaConfig + } + + return await initGraphQLClient(options) +} + +export default connect; diff --git a/modules/graphql-client/package.json b/modules/graphql-client/package.json index 224eaf856..cde9da1ea 100644 --- a/modules/graphql-client/package.json +++ b/modules/graphql-client/package.json @@ -1,9 +1,9 @@ { "name": "@vf-ui/graphql-client-holochain", - "private": true, - "version": "0.0.1", + "version": "0.0.1-alpha.4", "description": "ValueFlows GraphQLClient configurations, providing pluggable backend datasources for different distributed, federated and client/server infrastructure.", - "main": "index.ts", + "main": "build/index.js", + "types": "build/index.d.ts", "scripts": { "prepare": "npm run build", "build": "tsc -p ./tsconfig.dev.json; node ./finish-build", @@ -27,8 +27,13 @@ "url": "https://github.com/holo-rea/holo-rea/issues" }, "homepage": "https://github.com/holo-rea/holo-rea#readme", + "publishConfig": { + "directory": "build", + "main": "index.js", + "types": "index.d.ts" + }, "dependencies": { - "@valueflows/vf-graphql-holochain": "link:../vf-graphql-holochain/build", + "@valueflows/vf-graphql-holochain": "workspace:*", "@apollo/link-schema": "^2.0.0-beta.3", "tslib": "^2.0.0" }, diff --git a/modules/vf-graphql-holochain/README.md b/modules/vf-graphql-holochain/README.md index 3d09ff596..e2b123f92 100644 --- a/modules/vf-graphql-holochain/README.md +++ b/modules/vf-graphql-holochain/README.md @@ -48,7 +48,7 @@ The `enabledVFModules` option, if specified, [controls the subset of ValueFlows ### Multiple collaboration spaces -The `dnaConfig` option allows the callee to specify custom DNA identifiers to bind GraphQL functions to. For each hREA module ID (see the directory names under `/happs` in this repository), a runtime `CellId` must be provided as an instance of that DNA to bind to. +The `dnaConfig` option allows the callee to specify custom DNA identifiers to bind GraphQL functions to. For each hREA module ID (see the directory names under `/bundles/dna` in this repository), a runtime `CellId` must be provided as an instance of that DNA to bind to. By targeting multiple sets of DNAs, multiple "collaboration spaces" can be initialised for a single client application. Several GraphQL APIs can be interacted with via the standard ValueFlows specification. User interfaces should make explicit the scope of data and destination networks to perform query and mutation operations against. @@ -63,10 +63,10 @@ In some cases, tooling may require low-level access to the GraphQL resolver call ```js import { makeExecutableSchema } from '@graphql-tools/schema' -import { generateResolvers } from '@valueflows/vf-graphql-holochain' +import { generateResolvers, VfModule } from '@valueflows/vf-graphql-holochain' const { buildSchema, printSchema } = require('@valueflows/vf-graphql') -const enabledVFModules = ['measurement', 'knowledge', 'observation'] +const enabledVFModules = [VfModule.Measurement, VfModule.Knowledge,VfModule.Observation] const resolvers = generateResolvers({ enabledVFModules }) @@ -76,7 +76,7 @@ const schema = makeExecutableSchema({ }) ``` -Note that the IDs of ValueFlows modules in `enabledVFModules` above do not map exactly 1:1 with the hREA DNA identifiers in `dnaConfig`. For example, the "knowledge" VF module determines the presence of the `ResourceSpecification` and `ProcessSpecification` resolvers, which actually map to an hREA *specification* DNA. +Note that the IDs of ValueFlows modules in `enabledVFModules` above do not map exactly 1:1 with the hREA DNA identifiers in `dnaConfig`. For example, `VfModule.Knowledge` determines the presence of the `ResourceSpecification` and `ProcessSpecification` resolvers, which actually map to an hREA *specification* DNA. ## Repository structure @@ -95,7 +95,7 @@ Other files implement the query bindings between the linked hREA app DNAs and Gr - You will need to be given access to the [VF NPM org](https://www.npmjs.com/org/valueflows) in order to update the module on the registry. You can request access in https://gitter.im/valueflows/welcome - Bump the version in `package.json` & commit to the repository -- Run `npm run build` from this directory or `npm run build:graphql-adapter` from the root of the hREA repository +- Run `pnpm run build` from this directory or `pnpm run build:graphql-adapter` from the root of the hREA repository - Change to `./build` under this directory, where the new generated files are - Run `npm publish --access public` from the `./build` directory - Tag the current release in git and push the tag to `origin` diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index 7cffd816c..ad006bfd5 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -26,17 +26,33 @@ import { DNAIdMappings } from './types' type RecordId = [HoloHash, HoloHash] +type ActualInstalledCell = { // :TODO: remove this when fixed in tryorama + cell_id: CellId; + role_id: string; +} + //---------------------------------------------------------------------------------------------------------------------- // Connection persistence and multi-conductor / multi-agent handling //---------------------------------------------------------------------------------------------------------------------- -const HOLOCHAIN_LAUNCHER_CONTEXT_ID = 'unspecifiedForHolochainLauncher' -// the only environment at this point that will -// work without a specified websocket url is the Holochain -// Launcher -let DEFAULT_CONNECTION_URI = process.env.REACT_APP_HC_CONN_URL || HOLOCHAIN_LAUNCHER_CONTEXT_ID +// :NOTE: when calling AppWebsocket.connect for the Launcher Context +// it just expects an empty string for the socketURI. Other environments require it. +let DEFAULT_CONNECTION_URI = process.env.REACT_APP_HC_CONN_URL as string || '' +let HOLOCHAIN_APP_ID = process.env.REACT_APP_HC_APP_ID as string || '' + const CONNECTION_CACHE: { [i: string]: Promise } = {} +export async function autoConnect(conductorUri?: string, appID?: string, traceAppSignals?: AppSignalCb) { + if (!conductorUri) { + conductorUri = DEFAULT_CONNECTION_URI + } + + const conn = await openConnection(conductorUri, traceAppSignals) + const dnaConfig = await sniffHolochainAppCells(conn, appID) + + return { conn, dnaConfig, conductorUri } +} + /** * Inits a connection for the given websocket URI. If no `socketURI` is provided, * a connection is attempted via the `REACT_APP_HC_CONN_URL` environment variable. @@ -47,25 +63,15 @@ const CONNECTION_CACHE: { [i: string]: Promise } = {} * been previously performed for the same `socketURI`. */ export const openConnection = (socketURI: string, traceAppSignals?: AppSignalCb) => { - if (!socketURI) { - socketURI = DEFAULT_CONNECTION_URI - } - console.log(`Init Holochain connection: ${socketURI}`) - // when calling AppWebsocket.connect for the Launcher Context - // it just expects an empty string for the socketURI - const uriToPassAppWebsocket = socketURI === HOLOCHAIN_LAUNCHER_CONTEXT_ID ? '' : socketURI - CONNECTION_CACHE[socketURI] = AppWebsocket.connect(uriToPassAppWebsocket, undefined, traceAppSignals) + CONNECTION_CACHE[socketURI] = AppWebsocket.connect(socketURI, undefined, traceAppSignals) .then((client) => { console.log(`Holochain connection to ${socketURI} OK`) return client }) - return { - connectionPromise: CONNECTION_CACHE[socketURI], - socketURI - } + return CONNECTION_CACHE[socketURI] } const getConnection = (socketURI: string) => { @@ -76,6 +82,29 @@ const getConnection = (socketURI: string) => { return CONNECTION_CACHE[socketURI] } +/** + * Introspect an active Holochain connection's app cells to determine cell IDs + * for mapping to the schema resolvers. + */ +export async function sniffHolochainAppCells(conn: AppWebsocket, appID?: string) { + const appInfo = await conn.appInfo({ installed_app_id: appID || HOLOCHAIN_APP_ID }) + if (!appInfo) { + throw new Error(`appInfo call failed for Holochain app '${appID || HOLOCHAIN_APP_ID}' - ensure the name is correct and that the app installation has succeeded`) + } + + let dnaMappings: DNAIdMappings = (appInfo['cell_data'] as unknown[] as ActualInstalledCell[]).reduce((mappings, { cell_id, role_id }) => { + const hrea_cell_match = role_id.match(/hrea_(\w+)_\d+/) + if (!hrea_cell_match) { return mappings } + + mappings[hrea_cell_match[1] as keyof DNAIdMappings] = cell_id + return mappings + }, {} as DNAIdMappings) + + console.info('Connecting to detected Holochain cells:', dnaMappings) + + return dnaMappings +} + //---------------------------------------------------------------------------------------------------------------------- // Holochain / GraphQL type translation layer @@ -89,11 +118,12 @@ const HOLOHASH_PREFIX_ENTRY = [0x84, 0x21, 0x24] // uhCEk const HOLOHASH_PREFIX_HEADER = [0x84, 0x29, 0x24] // uhCkk const HOLOHASH_PREFIX_AGENT = [0x84, 0x20, 0x24] // uhCAk +const serializedHashMatchRegex = /^[A-Za-z0-9_+\-/]{53}={0,2}$/ const idMatchRegex = /^[A-Za-z0-9_+\-/]{53}={0,2}:[A-Za-z0-9_+\-/]{53}={0,2}$/ const stringIdRegex = /^\w+?:[A-Za-z0-9_+\-/]{53}={0,2}$/ // @see https://github.com/holochain-open-dev/core-types/blob/main/src/utils.ts -function deserializeHash(hash: string): Uint8Array { +export function deserializeHash(hash: string): Uint8Array { return Base64.toUint8Array(hash.slice(1)) } @@ -105,7 +135,7 @@ function deserializeId(field: string): RecordId { ] } -function deserializeStringId(field: string): Array { +function deserializeStringId(field: string): [Buffer,string] { const matches = field.split(':') return [ Buffer.from(deserializeHash(matches[1])), @@ -114,7 +144,7 @@ function deserializeStringId(field: string): Array { } // @see https://github.com/holochain-open-dev/core-types/blob/main/src/utils.ts -function serializeHash(hash: Uint8Array): string { +export function serializeHash(hash: Uint8Array): string { return `u${Base64.fromUint8Array(hash, true)}` } @@ -122,11 +152,19 @@ function seralizeId(id: RecordId): string { return `${serializeHash(id[1])}:${serializeHash(id[0])}` } -function seralizeStringId(id: Array): string { - return `${id[1]}:${serializeHash(id[0] as Buffer)}` +function seralizeStringId(id: [Buffer,string]): string { + return `${id[1]}:${serializeHash(id[0])}` +} + +// Construct appropriate IDs for records in associated DNAs by substituting +// the CellId portion of the ID with that of an appropriate destination record +export function remapCellId(originalId, newCellId) { + const [origId, _origCell] = originalId.split(':') + return `${origId}:${newCellId.split(':')[1]}` } const LONG_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ss.SSSZ' +const SHORT_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ssZ' const isoDateRegex = /^\d{4}-\d\d-\d\d(T\d\d:\d\d:\d\d(\.\d\d\d)?)?([+-]\d\d:\d\d)?$/ /** @@ -137,6 +175,12 @@ const isoDateRegex = /^\d{4}-\d\d-\d\d(T\d\d:\d\d:\d\d(\.\d\d\d)?)?([+-]\d\d:\d\ const decodeFields = (result: any): void => { deepForEach(result, (value, prop, subject) => { + // HeaderHash + if ((value instanceof Buffer || value instanceof Uint8Array) && value.length === HOLOCHAIN_IDENTIFIER_LEN && checkLeadingBytes(value, HOLOHASH_PREFIX_HEADER)) { + subject[prop] = serializeHash(value as unknown as Uint8Array) + } + + // RecordId | StringId (Agent, for now) if (Array.isArray(value) && value.length == 2 && (value[0] instanceof Buffer || value[0] instanceof Uint8Array) && value[0].length === HOLOCHAIN_IDENTIFIER_LEN && @@ -152,13 +196,16 @@ const decodeFields = (result: any): void => { // :TODO: This one probably isn't safe for regular ID field mixing. // Custom serde de/serializer would make bind this handling to the appropriate fields without duck-typing issues. } else { - subject[prop] = seralizeStringId(value) + subject[prop] = seralizeStringId(value as [Buffer, string]) } } // recursively check for Date strings and convert to JS date objects upon receiving if (value && value.match && value.match(isoDateRegex)) { subject[prop] = parse(value, LONG_DATETIME_FORMAT) + if (subject[prop] === null) { + subject[prop] = parse(value, SHORT_DATETIME_FORMAT) + } } }) @@ -185,6 +232,9 @@ const encodeFields = (args: any): any => { } // deserialise any identifiers back to their binary format + else if (args.match && args.match(serializedHashMatchRegex)) { + return deserializeHash(args) + } else if (args.match && args.match(idMatchRegex)) { return deserializeId(args) } @@ -219,7 +269,7 @@ export type BoundZomeFn = (args: any) => any; /** * Higher-order function to generate async functions for calling zome RPC methods */ -const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_name: string): BoundZomeFn => async (args) => { +const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_name: string, skipEncodeDecode?: boolean): BoundZomeFn => async (args) => { const { callZome } = await getConnection(socketURI) const res = await callZome({ cap_secret: null, // :TODO: @@ -227,9 +277,9 @@ const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_ zome_name, fn_name, provenance: cell_id[1], - payload: encodeFields(args), + payload: skipEncodeDecode ? args : encodeFields(args), }) - decodeFields(res) + if (!skipEncodeDecode) decodeFields(res) return res } @@ -242,5 +292,13 @@ const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_ * * @return bound async zome function which can be called directly */ -export const mapZomeFn = (mappings: DNAIdMappings, socketURI: string, instance: string, zome: string, fn: string) => - zomeFunction(socketURI, (mappings && mappings[instance]), zome, fn) +export const mapZomeFn = (mappings: DNAIdMappings, socketURI: string, instance: string, zome: string, fn: string, skipEncodeDecode?: boolean) => + zomeFunction(socketURI, (mappings && mappings[instance]), zome, fn, skipEncodeDecode) + + +export const extractEdges = (withEdges: { edges: { node: T }[] }): T[] => { + if (!withEdges.edges || !withEdges.edges.length) { + return [] + } + return withEdges.edges.map(({ node }) => node) +} diff --git a/modules/vf-graphql-holochain/finish-build.js b/modules/vf-graphql-holochain/finish-build.js index b7b01b030..082617293 100644 --- a/modules/vf-graphql-holochain/finish-build.js +++ b/modules/vf-graphql-holochain/finish-build.js @@ -11,15 +11,8 @@ const fs = require('fs') const path = require('path') -fs.copyFileSync(path.resolve(__dirname, '../../LICENSE'), path.resolve(__dirname, './build/LICENSE')) fs.copyFileSync(path.resolve(__dirname, './README.md'), path.resolve(__dirname, './build/README.md')) -fs.copyFileSync(path.resolve(__dirname, './package.json'), path.resolve(__dirname, './build/package.json')) -const packageJson = require(path.resolve(__dirname, './build/package.json')) - -delete packageJson['private'] -delete packageJson['main'] -delete packageJson['types'] +const packageJson = require(path.resolve(__dirname, './package.json')) delete packageJson.scripts['prepare'] - fs.writeFileSync(path.resolve(__dirname, './build/package.json'), JSON.stringify(packageJson, undefined, " ")) diff --git a/modules/vf-graphql-holochain/index.ts b/modules/vf-graphql-holochain/index.ts index 4343b42bb..cdaa1d7db 100644 --- a/modules/vf-graphql-holochain/index.ts +++ b/modules/vf-graphql-holochain/index.ts @@ -10,19 +10,23 @@ import { makeExecutableSchema } from '@graphql-tools/schema' -import { APIOptions, ResolverOptions, DEFAULT_VF_MODULES, DNAMappings, CellId } from './types' +import { APIOptions, ResolverOptions, DEFAULT_VF_MODULES, DNAIdMappings, CellId, VfModule } from './types' import generateResolvers from './resolvers' -import { mapZomeFn, openConnection } from './connection' +import { mapZomeFn, autoConnect, openConnection, sniffHolochainAppCells, remapCellId } from './connection' const { buildSchema, printSchema } = require('@valueflows/vf-graphql') export { // direct access to resolver callbacks generator for apps that need to bind to other GraphQL schemas generateResolvers, + // connection handling methods + autoConnect, openConnection, sniffHolochainAppCells, // direct access to Holochain zome method bindings for authoring own custom resolvers bound to non-REA DNAs - openConnection, mapZomeFn, // types that wrapper libraries may need to manage conductor DNA connection logic - DNAMappings, CellId, + DNAIdMappings, CellId, APIOptions, VfModule, + + // :TODO: remove this. After #266 clients should not need to think about differing IDs between Cells. + remapCellId, } /** diff --git a/modules/vf-graphql-holochain/mutations/index.ts b/modules/vf-graphql-holochain/mutations/index.ts index 55f287fff..b2b250b28 100644 --- a/modules/vf-graphql-holochain/mutations/index.ts +++ b/modules/vf-graphql-holochain/mutations/index.ts @@ -5,7 +5,7 @@ * @since: 2019-05-22 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import ResourceSpecification from './resourceSpecification' import ProcessSpecification from './processSpecification' @@ -30,15 +30,13 @@ import Agreement from './agreement' // generic deletion calling format used by all mutations export type deleteHandler = (root: any, args: { revisionId: string }) => Promise -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const VFmodules = enabledVFModules || [] - const hasAgent = -1 !== VFmodules.indexOf("agent") - const hasMeasurement = -1 !== VFmodules.indexOf("measurement") - const hasKnowledge = -1 !== VFmodules.indexOf("knowledge") - const hasObservation = -1 !== VFmodules.indexOf("observation") - const hasPlanning = -1 !== VFmodules.indexOf("planning") - const hasProposal = -1 !== VFmodules.indexOf("proposal") - const hasAgreement = -1 !== VFmodules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) return Object.assign( (hasMeasurement ? { ...Unit(dnaConfig, conductorUri) } : {}), diff --git a/modules/vf-graphql-holochain/mutations/proposal.ts b/modules/vf-graphql-holochain/mutations/proposal.ts index 0edbdf040..959409185 100644 --- a/modules/vf-graphql-holochain/mutations/proposal.ts +++ b/modules/vf-graphql-holochain/mutations/proposal.ts @@ -1,5 +1,5 @@ /** - * Mutations for manipulating process specification + * Mutations for manipulating proposals * * @package: HoloREA * @since: 2019-09-12 diff --git a/modules/vf-graphql-holochain/mutations/proposedIntent.ts b/modules/vf-graphql-holochain/mutations/proposedIntent.ts index 3612e3141..3479be950 100644 --- a/modules/vf-graphql-holochain/mutations/proposedIntent.ts +++ b/modules/vf-graphql-holochain/mutations/proposedIntent.ts @@ -1,5 +1,5 @@ /** - * Mutations for manipulating process specification + * Mutations for manipulating proposed intents * * @package: HoloREA * @since: 2019-09-12 diff --git a/modules/vf-graphql-holochain/mutations/proposedTo.ts b/modules/vf-graphql-holochain/mutations/proposedTo.ts index 08ee8134c..5d49ac031 100644 --- a/modules/vf-graphql-holochain/mutations/proposedTo.ts +++ b/modules/vf-graphql-holochain/mutations/proposedTo.ts @@ -1,5 +1,5 @@ /** - * Mutations for manipulating process specification + * Mutations for manipulating proposed to * * @package: HoloREA * @since: 2019-09-12 diff --git a/modules/vf-graphql-holochain/package.json b/modules/vf-graphql-holochain/package.json index d366a9b75..605a577e7 100644 --- a/modules/vf-graphql-holochain/package.json +++ b/modules/vf-graphql-holochain/package.json @@ -1,7 +1,6 @@ { "name": "@valueflows/vf-graphql-holochain", - "version": "0.0.1-alpha.3", - "private": true, + "version": "0.0.1-alpha.4", "main": "build/index.js", "types": "build/index.d.ts", "description": "GraphQL schema bindings for the Holochain implementation of ValueFlows", @@ -32,12 +31,17 @@ "url": "https://github.com/holo-rea/holo-rea/issues" }, "homepage": "https://github.com/holo-rea/holo-rea#readme", + "publishConfig": { + "directory": "build", + "main": "index.js", + "types": "index.d.ts" + }, "dependencies": { "buffer": "^6.0.3", "@graphql-tools/schema": "^8.3.1", "@graphql-tools/utils": "^8.6.1", "@holochain/client": "0.3.2", - "@valueflows/vf-graphql": "0.9.0-alpha.2", + "@valueflows/vf-graphql": "0.9.0-alpha.3", "dataloader": "^1.4.0", "deep-for-each": "^3.0.0", "fecha": "^4.1.0", diff --git a/modules/vf-graphql-holochain/queries/agent.ts b/modules/vf-graphql-holochain/queries/agent.ts index d0f5fe95f..883265d74 100644 --- a/modules/vf-graphql-holochain/queries/agent.ts +++ b/modules/vf-graphql-holochain/queries/agent.ts @@ -8,22 +8,18 @@ */ import { DNAIdMappings, injectTypename } from '../types' -import { mapZomeFn } from '../connection' +import { mapZomeFn, serializeHash, deserializeHash } from '../connection' import { Agent } from '@valueflows/vf-graphql' -// :TODO: remove this, backend should use HoloHashB64 eventually -const { Base64 } = require('js-base64') -function serializeHash (hash) { - return `u${Base64.fromUint8Array(hash, true)}` -} - export default (dnaConfig: DNAIdMappings, conductorUri: string) => { const readMyAgent = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_my_agent_pubkey') - const readAllAgents = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_registered_agents') - const agentExists = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'is_registered') + const readAllAgents = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_registered') + // special 'true' at the end is for skipEncodeDecode, because of the way this zome handles serialization and inputs + // which is different from others + const agentExists = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'is_registered', true) // read mapped DNA hash in order to construct VF-native IDs from DNA-local HC IDs const mappedDNA = dnaConfig['agent'] ? serializeHash(dnaConfig['agent'][0]) : null @@ -39,6 +35,8 @@ export default (dnaConfig: DNAIdMappings, conductorUri: string) => { } }), + // :TODO: this and the associated functionality in 'get_registered' needs to be revisited + // or potentially integrated from other projects affording similar functionality. agents: async (root, args): Promise => { return (await readAllAgents(null)).map(agentAddress => ({ // :TODO: wire to Personas hApp @@ -49,7 +47,8 @@ export default (dnaConfig: DNAIdMappings, conductorUri: string) => { }, agent: injectTypename('Person', async (root, { id }): Promise => { - const isAgent = await agentExists({ pubKey: id }) + const rawAgentPubKey = deserializeHash(id.split(':')[0]) + const isAgent = await agentExists({ pubKey: rawAgentPubKey }) if (!isAgent) { throw new Error('No agent exists with that ID') diff --git a/modules/vf-graphql-holochain/queries/economicEvent.ts b/modules/vf-graphql-holochain/queries/economicEvent.ts index 908d4200c..6584ad05c 100644 --- a/modules/vf-graphql-holochain/queries/economicEvent.ts +++ b/modules/vf-graphql-holochain/queries/economicEvent.ts @@ -5,7 +5,7 @@ * @since: 2019-05-27 */ -import { DNAIdMappings, injectTypename, addTypename } from '../types' +import { DNAIdMappings, injectTypename } from '../types' import { mapZomeFn } from '../connection' import { diff --git a/modules/vf-graphql-holochain/queries/index.ts b/modules/vf-graphql-holochain/queries/index.ts index 65fd71d32..18fc172d1 100644 --- a/modules/vf-graphql-holochain/queries/index.ts +++ b/modules/vf-graphql-holochain/queries/index.ts @@ -5,7 +5,7 @@ * @since: 2019-05-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import Action from './action' import Unit from './unit' @@ -28,15 +28,14 @@ import Proposal from './proposal' import Agreement from './agreement' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const VFmodules = enabledVFModules || [] - const hasAgent = -1 !== VFmodules.indexOf("agent") - const hasMeasurement = -1 !== VFmodules.indexOf("measurement") - const hasKnowledge = -1 !== VFmodules.indexOf("knowledge") - const hasObservation = -1 !== VFmodules.indexOf("observation") - const hasPlanning = -1 !== VFmodules.indexOf("planning") - const hasProposal = -1 !== VFmodules.indexOf("proposal") - const hasAgreement = -1 !== VFmodules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) return Object.assign({ ...Action(dnaConfig, conductorUri), diff --git a/modules/vf-graphql-holochain/resolvers/agent.ts b/modules/vf-graphql-holochain/resolvers/agent.ts index 1270744be..403d3b04d 100644 --- a/modules/vf-graphql-holochain/resolvers/agent.ts +++ b/modules/vf-graphql-holochain/resolvers/agent.ts @@ -5,9 +5,9 @@ * @since: 2020-05-28 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { return { __resolveType: (obj, ctx, info) => obj.__typename } diff --git a/modules/vf-graphql-holochain/resolvers/agreement.ts b/modules/vf-graphql-holochain/resolvers/agreement.ts index 3389cb06e..6fb54c04d 100644 --- a/modules/vf-graphql-holochain/resolvers/agreement.ts +++ b/modules/vf-graphql-holochain/resolvers/agreement.ts @@ -5,7 +5,7 @@ * @since: 2020-06-19 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -14,9 +14,9 @@ import { EconomicEvent, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) const queryCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') const queryEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') @@ -24,12 +24,20 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( (hasPlanning ? { commitments: async (record: Agreement): Promise => { - return (await queryCommitments({ params: { clauseOf: record.id } })).map(({ commitment }) => commitment) + const commitments = await queryCommitments({ params: { clauseOf: record.id } }) + if (!commitments.edges || !commitments.edges.length) { + return [] + } + return commitments.edges.map(({ node }) => node) }, } : {}), (hasObservation ? { economicEvents: async (record: Agreement): Promise => { - return (await queryEvents({ params: { realizationOf: record.id } })).map(({ economicEvent }) => economicEvent) + const economicEvents = await queryEvents({ params: { realizationOf: record.id } }) + if (!economicEvents.edges || !economicEvents.edges.length) { + return [] + } + return economicEvents.edges.map(({ node }) => node) }, } : {}), ) diff --git a/modules/vf-graphql-holochain/resolvers/commitment.ts b/modules/vf-graphql-holochain/resolvers/commitment.ts index 0018daf28..b20b22b44 100644 --- a/modules/vf-graphql-holochain/resolvers/commitment.ts +++ b/modules/vf-graphql-holochain/resolvers/commitment.ts @@ -5,8 +5,8 @@ * @since: 2019-08-28 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' -import { mapZomeFn } from '../connection' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' +import { extractEdges, mapZomeFn } from '../connection' import { Agent, @@ -22,11 +22,11 @@ import { import agentQueries from '../queries/agent' import agreementQueries from '../queries/agreement' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasAgreement = -1 !== enabledVFModules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) const readFulfillments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'fulfillment_index', 'query_fulfillments') const readSatisfactions = mapZomeFn(dnaConfig, conductorUri, 'planning', 'satisfaction_index', 'query_satisfactions') @@ -39,11 +39,13 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( { fulfilledBy: async (record: Commitment): Promise => { - return (await readFulfillments({ params: { fulfills: record.id } })).map(({ fulfillment }) => fulfillment) + const results = await readFulfillments({ params: { fulfills: record.id } }) + return extractEdges(results) }, satisfies: async (record: Commitment): Promise => { - return (await readSatisfactions({ params: { satisfiedBy: record.id } })).map(({ satisfaction }) => satisfaction) + const results = await readSatisfactions({ params: { satisfiedBy: record.id } }) + return extractEdges(results) }, }, (hasAgent ? { @@ -57,11 +59,13 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI } : {}), (hasObservation ? { inputOf: async (record: Commitment): Promise => { - return (await readProcesses({ params: { committedInputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { committedInputs: record.id } }) + return results.edges.pop()['node'] }, outputOf: async (record: Commitment): Promise => { - return (await readProcesses({ params: { committedOutputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { committedOutputs: record.id } }) + return results.edges.pop()['node'] }, } : {}), (hasKnowledge ? { diff --git a/modules/vf-graphql-holochain/resolvers/economicEvent.ts b/modules/vf-graphql-holochain/resolvers/economicEvent.ts index 02d898091..cb2cc4818 100644 --- a/modules/vf-graphql-holochain/resolvers/economicEvent.ts +++ b/modules/vf-graphql-holochain/resolvers/economicEvent.ts @@ -5,8 +5,8 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' -import { mapZomeFn } from '../connection' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' +import { extractEdges, mapZomeFn } from '../connection' import { Agent, @@ -24,11 +24,11 @@ import agentQueries from '../queries/agent' import agreementQueries from '../queries/agreement' import resourceQueries from '../queries/economicResource' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") - const hasAgreement = -1 !== enabledVFModules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) const readFulfillments = mapZomeFn(dnaConfig, conductorUri, 'observation', 'fulfillment_index', 'query_fulfillments') const readSatisfactions = mapZomeFn(dnaConfig, conductorUri, 'observation', 'satisfaction_index', 'query_satisfactions') @@ -42,11 +42,13 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( { inputOf: async (record: EconomicEvent): Promise => { - return (await readProcesses({ params: { inputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { inputs: record.id } }) + return results.edges.pop()['node'] }, outputOf: async (record: EconomicEvent): Promise => { - return (await readProcesses({ params: { outputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { outputs: record.id } }) + return results.edges.pop()['node'] }, resourceInventoriedAs: async (record: EconomicEvent): Promise => { @@ -65,11 +67,13 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI } : {}), (hasPlanning ? { fulfills: async (record: EconomicEvent): Promise => { - return (await readFulfillments({ params: { fulfilledBy: record.id } })).map(({ fulfillment }) => fulfillment) + const results = await readFulfillments({ params: { fulfilledBy: record.id } }) + return extractEdges(results) }, satisfies: async (record: EconomicEvent): Promise => { - return (await readSatisfactions({ params: { satisfiedBy: record.id } })).map(({ satisfaction }) => satisfaction) + const results = await readSatisfactions({ params: { satisfiedBy: record.id } }) + return extractEdges(results) }, } : {}), (hasKnowledge ? { diff --git a/modules/vf-graphql-holochain/resolvers/economicResource.ts b/modules/vf-graphql-holochain/resolvers/economicResource.ts index 8448f289f..23bfa807c 100644 --- a/modules/vf-graphql-holochain/resolvers/economicResource.ts +++ b/modules/vf-graphql-holochain/resolvers/economicResource.ts @@ -5,7 +5,7 @@ * @since: 2019-10-31 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -17,9 +17,9 @@ import { Maybe, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasMeasurement = -1 !== enabledVFModules.indexOf("measurement") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) const readResources = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_resource_index', 'query_economic_resources') const readUnit = mapZomeFn(dnaConfig, conductorUri, 'specification', 'unit', 'get_unit') @@ -29,12 +29,20 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( { - containedIn: async (record: EconomicResource): Promise => { - return (await readResources({ params: { contains: record.id } })).pop()['economicResource'] + containedIn: async (record: EconomicResource): Promise> => { + const resources = await readResources({ params: { contains: record.id } }) + if (!resources.edges || !resources.edges.length) { + return null + } + return resources.edges.pop()['node'] }, contains: async (record: EconomicResource): Promise => { - return (await readResources({ params: { containedIn: record.id } })).map(({ economicResource }) => economicResource) + const resources = await readResources({ params: { containedIn: record.id } }) + if (!resources.edges || !resources.edges.length) { + return [] + } + return resources.edges.map(({ node }) => node) }, }, (hasKnowledge ? { diff --git a/modules/vf-graphql-holochain/resolvers/fulfillment.ts b/modules/vf-graphql-holochain/resolvers/fulfillment.ts index d93d16c98..a23b9b6fd 100644 --- a/modules/vf-graphql-holochain/resolvers/fulfillment.ts +++ b/modules/vf-graphql-holochain/resolvers/fulfillment.ts @@ -5,8 +5,8 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES } from '../types' -import { mapZomeFn } from '../connection' +import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES, VfModule } from '../types' +import { mapZomeFn, remapCellId } from '../connection' import { Fulfillment, @@ -14,8 +14,8 @@ import { Commitment, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasObservation = -1 !== enabledVFModules.indexOf("observation") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) const readEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') const readCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') @@ -23,12 +23,15 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( { fulfills: injectTypename('Commitment', async (record: Fulfillment): Promise => { - return (await readCommitments({ params: { fulfilledBy: record.id } })).pop()['commitment'] + const results = await readCommitments({ params: { fulfilledBy: record.id } }) + return results.edges.pop()['node'] }), }, (hasObservation ? { fulfilledBy: injectTypename('EconomicEvent', async (record: Fulfillment): Promise => { - return (await readEvents({ params: { fulfills: record.id } })).pop()['economicEvent'] + const associatedId = remapCellId(record.id, record.fulfilledBy) + const results = await readEvents({ params: { fulfills: associatedId } }) + return results.edges.pop()['node'] }), } : {}), ) diff --git a/modules/vf-graphql-holochain/resolvers/index.ts b/modules/vf-graphql-holochain/resolvers/index.ts index 2073dff50..6f8cb70e5 100644 --- a/modules/vf-graphql-holochain/resolvers/index.ts +++ b/modules/vf-graphql-holochain/resolvers/index.ts @@ -5,7 +5,7 @@ * @since: 2019-05-20 */ -import { DNAIdMappings, ResolverOptions, URI, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, ResolverOptions, URI, DEFAULT_VF_MODULES, VfModule } from '../types' import { DateTimeResolver as DateTime } from 'graphql-scalars' import { openConnection } from '../connection' @@ -53,13 +53,13 @@ export default async (options: ResolverOptions) => { traceAppSignals = undefined, } = options - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasMeasurement = -1 !== enabledVFModules.indexOf("measurement") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") - const hasProposal = -1 !== enabledVFModules.indexOf("proposal") - const hasAgreement = -1 !== enabledVFModules.indexOf("agreement") + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) // prefetch connection for this API schema await openConnection(conductorUri, traceAppSignals) diff --git a/modules/vf-graphql-holochain/resolvers/intent.ts b/modules/vf-graphql-holochain/resolvers/intent.ts index 6c649ff22..d29c1d688 100644 --- a/modules/vf-graphql-holochain/resolvers/intent.ts +++ b/modules/vf-graphql-holochain/resolvers/intent.ts @@ -5,8 +5,8 @@ * @since: 2019-08-31 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' -import { mapZomeFn } from '../connection' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' +import { extractEdges, mapZomeFn } from '../connection' import { Maybe, @@ -23,11 +23,11 @@ import agentQueries from '../queries/agent' const extractProposedIntent = (data): ProposedIntent => data.proposedIntent -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasProposal = -1 !== enabledVFModules.indexOf("proposal") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) const readSatisfactions = mapZomeFn(dnaConfig, conductorUri, 'planning', 'satisfaction_index', 'query_satisfactions') const readProcesses = mapZomeFn(dnaConfig, conductorUri, 'observation', 'process_index', 'query_processes') @@ -39,7 +39,8 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( { satisfiedBy: async (record: Intent): Promise => { - return (await readSatisfactions({ params: { satisfies: record.id } })).map(({ satisfaction }) => satisfaction) + const results = await readSatisfactions({ params: { satisfies: record.id } }) + return extractEdges(results) }, }, (hasAgent ? { @@ -53,11 +54,13 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI } : {}), (hasObservation ? { inputOf: async (record: Intent): Promise => { - return (await readProcesses({ params: { intendedInputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { intendedInputs: record.id } }) + return results.edges.pop()['node'] }, outputOf: async (record: Intent): Promise => { - return (await readProcesses({ params: { intendedOutputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { intendedOutputs: record.id } }) + return results.edges.pop()['node'] }, } : {}), (hasProposal ? { diff --git a/modules/vf-graphql-holochain/resolvers/measure.ts b/modules/vf-graphql-holochain/resolvers/measure.ts index db8553cc7..b3ae00c79 100644 --- a/modules/vf-graphql-holochain/resolvers/measure.ts +++ b/modules/vf-graphql-holochain/resolvers/measure.ts @@ -5,19 +5,23 @@ * @since: 2019-12-24 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { + Maybe, Measure, Unit, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { const readUnit = mapZomeFn(dnaConfig, conductorUri, 'specification', 'unit', 'get_unit') return { - hasUnit: async (record: Measure): Promise => { + hasUnit: async (record: Measure): Promise> => { + if (!record.hasUnit) { + return null + } return (await readUnit({ id: record.hasUnit })).unit }, } diff --git a/modules/vf-graphql-holochain/resolvers/process.ts b/modules/vf-graphql-holochain/resolvers/process.ts index 18856b00c..2e3f6367d 100644 --- a/modules/vf-graphql-holochain/resolvers/process.ts +++ b/modules/vf-graphql-holochain/resolvers/process.ts @@ -5,8 +5,8 @@ * @since: 2019-09-12 */ -import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES } from '../types' -import { mapZomeFn } from '../connection' +import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES, VfModule } from '../types' +import { mapZomeFn, extractEdges } from '../connection' import { Process, @@ -16,9 +16,9 @@ import { ProcessSpecification } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) const readEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') const readCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') @@ -28,28 +28,34 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( { inputs: injectTypename('EconomicEvent', async (record: Process): Promise => { - return (await readEvents({ params: { inputOf: record.id } })).map(({ economicEvent }) => economicEvent) + const results = await readEvents({ params: { inputOf: record.id } }) + return extractEdges(results) }), outputs: injectTypename('EconomicEvent', async (record: Process): Promise => { - return (await readEvents({ params: { outputOf: record.id } })).map(({ economicEvent }) => economicEvent) + const results = await readEvents({ params: { outputOf: record.id } }) + return extractEdges(results) }), }, (hasPlanning ? { committedInputs: injectTypename('Commitment', async (record: Process): Promise => { - return (await readCommitments({ params: { inputOf: record.id } })).map(({ commitment }) => commitment) + const results = await readCommitments({ params: { inputOf: record.id } }) + return extractEdges(results) }), committedOutputs: injectTypename('Commitment', async (record: Process): Promise => { - return (await readCommitments({ params: { outputOf: record.id } })).map(({ commitment }) => commitment) + const results = await readCommitments({ params: { outputOf: record.id } }) + return extractEdges(results) }), intendedInputs: async (record: Process): Promise => { - return (await readIntents({ params: { inputOf: record.id } })).map(({ intent }) => intent) + const results = await readIntents({ params: { inputOf: record.id } }) + return extractEdges(results) }, intendedOutputs: async (record: Process): Promise => { - return (await readIntents({ params: { outputOf: record.id } })).map(({ intent }) => intent) + const results = await readIntents({ params: { outputOf: record.id } }) + return extractEdges(results) }, } : {}), (hasKnowledge ? { diff --git a/modules/vf-graphql-holochain/resolvers/proposal.ts b/modules/vf-graphql-holochain/resolvers/proposal.ts index ce44b4c4f..4c75b1c18 100644 --- a/modules/vf-graphql-holochain/resolvers/proposal.ts +++ b/modules/vf-graphql-holochain/resolvers/proposal.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -17,7 +17,7 @@ import { const extractProposedTo = (data): ProposedTo => data.proposedTo const extractProposedIntent = (data): ProposedIntent => data.proposedIntent -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { const readProposedTo = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposed_to', 'get_proposed_to') const readProposedIntent = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposed_intent', 'get_proposed_intent') diff --git a/modules/vf-graphql-holochain/resolvers/proposedIntent.ts b/modules/vf-graphql-holochain/resolvers/proposedIntent.ts index 7ac764d00..898d222d5 100644 --- a/modules/vf-graphql-holochain/resolvers/proposedIntent.ts +++ b/modules/vf-graphql-holochain/resolvers/proposedIntent.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -14,8 +14,8 @@ import { ProposedIntent, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) const readProposal = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposal', 'get_proposal') const readIntent = mapZomeFn(dnaConfig, conductorUri, 'planning', 'intent', 'get_intent') diff --git a/modules/vf-graphql-holochain/resolvers/proposedTo.ts b/modules/vf-graphql-holochain/resolvers/proposedTo.ts index b27b16dcd..8b5e7366a 100644 --- a/modules/vf-graphql-holochain/resolvers/proposedTo.ts +++ b/modules/vf-graphql-holochain/resolvers/proposedTo.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -16,8 +16,8 @@ import { import agentQueries from '../queries/agent' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) const readProposal = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposal', 'get_proposal') const readAgent = agentQueries(dnaConfig, conductorUri)['agent'] diff --git a/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts b/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts index a01cae5ed..2811b3834 100644 --- a/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts +++ b/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts @@ -5,27 +5,27 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { Maybe, - EconomicResource, + EconomicResourceConnection, ResourceSpecification, Unit, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasMeasurement = -1 !== enabledVFModules.indexOf("measurement") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) const queryResources = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_resource_index', 'query_economic_resources') const readUnit = mapZomeFn(dnaConfig, conductorUri, 'specification', 'unit', 'get_unit') return Object.assign( (hasObservation ? { - conformingResources: async (record: ResourceSpecification): Promise => { - return (await queryResources({ params: { conformsTo: record.id } })).results.map(({ economicResource }) => economicResource ) + conformingResources: async (record: ResourceSpecification): Promise => { + return await queryResources({ params: { conformsTo: record.id } }) }, } : {}), (hasMeasurement ? { diff --git a/modules/vf-graphql-holochain/resolvers/satisfaction.ts b/modules/vf-graphql-holochain/resolvers/satisfaction.ts index 777bea2df..5e5a32b66 100644 --- a/modules/vf-graphql-holochain/resolvers/satisfaction.ts +++ b/modules/vf-graphql-holochain/resolvers/satisfaction.ts @@ -5,8 +5,8 @@ * @since: 2019-08-31 */ -import { DNAIdMappings, addTypename, DEFAULT_VF_MODULES } from '../types' -import { mapZomeFn } from '../connection' +import { DNAIdMappings, addTypename, DEFAULT_VF_MODULES, VfModule } from '../types' +import { mapZomeFn, remapCellId } from '../connection' import { Satisfaction, @@ -14,16 +14,16 @@ import { Intent, } from '@valueflows/vf-graphql' -async function extractRecordsOrFail (query, subfieldId: string): Promise { +async function extractRecordsOrFail (query): Promise { const val = await query - if (!val || !val.length || !val[0][subfieldId]) { + if (!val || !val.edges || !val.edges.length || !val.edges[0].node) { throw new Error('Reference not found') } - return val[0][subfieldId] + return val.edges[0].node } -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasObservation = -1 !== enabledVFModules.indexOf("observation") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) const readEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') const readCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') @@ -31,14 +31,15 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return { satisfiedBy: async (record: Satisfaction): Promise => { + const associatedId = remapCellId(record.id, record.satisfiedBy) // :NOTE: this presumes a satisfaction will never be erroneously linked to 2 records return ( await Promise.all([ - extractRecordsOrFail(readCommitments({ params: { satisfies: record.id } }), 'commitment') + extractRecordsOrFail(readCommitments({ params: { satisfies: associatedId } })) .then(addTypename('Commitment')) .catch((e) => e), ].concat(hasObservation ? [ - extractRecordsOrFail(readEvents({ params: { satisfies: record.id } }), 'economicEvent') + extractRecordsOrFail(readEvents({ params: { satisfies: associatedId } })) .then(addTypename('EconomicEvent')) .catch((e) => e), ] : [])) @@ -48,7 +49,8 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI }, satisfies: async (record: Satisfaction): Promise => { - return (await readIntents({ params: { satisfiedBy: record.id } })).pop()['intent'] + const results = await readIntents({ params: { satisfiedBy: record.id } }) + return results.edges.pop()['node'] }, } } diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index 61e78ab12..7a40e0fe8 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -1,10 +1,6 @@ /** * base types for GraphQL query layer * - * @see https://github.com/valueflows/vf-graphql/blob/master/schemas/structs.gql - * - * @package: HoloREA prototype - * @since: 2019-01-03 * @package: HoloREA * @since: 2019-05-20 */ @@ -16,23 +12,22 @@ import { Kind } from 'graphql/language' // Configuration object to allow specifying custom conductor DNA IDs to bind to. // Default is to use a DNA with the same ID as the mapping ID (ie. agent = "agent") -export interface DNAMappings { - agent: CellId, - observation: CellId, - planning: CellId, - proposal: CellId, - specification: CellId, +export interface DNAIdMappings { + agent?: CellId, + agreement?: CellId, + observation?: CellId, + planning?: CellId, + proposal?: CellId, + specification?: CellId, } export { CellId } -export type DNAIdMappings = DNAMappings - // Options for resolver generator export interface ResolverOptions { // Array of ValueFlows module names to include in the schema // @see https://lab.allmende.io/valueflows/vf-schemas/vf-graphql#generating-schemas - enabledVFModules?: string[], + enabledVFModules?: VfModule[], // Mapping of DNA identifiers to runtime `CellId`s to bind to. dnaConfig: DNAIdMappings, @@ -82,13 +77,36 @@ export function injectTypename (name: string, fn: Resolver): Resolver { } } +// enum containing all the possible VF modules, including +// the ones that haven't been implemented within holo-rea yet +// -> https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/tree/sprout/lib/schemas +export enum VfModule { + Agent = 'agent', + Agreement = 'agreement', + Appreciation = 'appreciation', + Claim = 'claim', + Geolocation = 'geolocation', + History = 'history', + Knowledge = 'knowledge', + Measurement = 'measurement', + Observation = 'observation', + Plan = 'plan', + Planning = 'planning', + Proposal = 'proposal', + Recipe = 'recipe', + Scenario = 'scenario', +} + // default 'full suite' VF module set supported by Holo-REA export const DEFAULT_VF_MODULES = [ - 'knowledge', 'measurement', - 'agent', - 'observation', 'planning', - 'proposal', 'agreement', + VfModule.Agent, + VfModule.Agreement, + VfModule.Knowledge, + VfModule.Measurement, + VfModule.Observation, + VfModule.Planning, + VfModule.Proposal, ] // scalar types diff --git a/package.json b/package.json index 8d84b6a62..e3538900b 100644 --- a/package.json +++ b/package.json @@ -8,22 +8,24 @@ "preinstall": "npx only-allow pnpm && scripts/preinstall.sh", "postinstall": "scripts/postinstall.sh", "shell": "nix-shell", - "start": "npm run build; npm-run-all --parallel dev:graphql:adapter dev:graphql:client dev:graphql:explorer dht", - "build": "npm-run-all --parallel build:graphql build:crates", - "build:crates": "CARGO_TARGET_DIR=target cargo build --release --target wasm32-unknown-unknown && RUN_WASM_OPT=0 scripts/package-dnas.sh", + "start": "pnpm run build; npm-run-all --parallel dev:graphql:adapter dev:graphql:client dev:graphql:explorer dht", + "build": "npm-run-all --parallel build:graphql build:holochain:dev", + "build:crates": "CARGO_TARGET_DIR=target cargo build --release --target wasm32-unknown-unknown", + "build:holochain:dev": "npm run build:crates && RUN_WASM_OPT=0 scripts/package-dnas.sh", + "build:holochain:release": "npm run build:crates && RUN_WASM_OPT=1 BUNDLE_ZOMES=1 scripts/package-dnas.sh", "build:graphql": "npm-run-all build:graphql:adapter build:graphql:client", - "build:graphql:adapter": "cd modules/vf-graphql-holochain && npm run build", - "build:graphql:client": "cd modules/graphql-client && npm run build", - "build:apps": "npm run build:apps:explorer", - "build:apps:explorer": "npm run build:graphql:adapter && cd apps/holorea-graphql-explorer && npm run build", - "build:webhapp": "npm run build:apps:explorer && scripts/package-webhapp.sh", + "build:graphql:adapter": "cd modules/vf-graphql-holochain && pnpm run build", + "build:graphql:client": "cd modules/graphql-client && pnpm run build", + "build:apps": "pnpm run build:apps:explorer", + "build:apps:explorer": "pnpm run build:graphql:adapter && cd apps/holorea-graphql-explorer && pnpm run build", + "build:webhapp": "pnpm run build:apps:explorer && scripts/package-webhapp.sh", "build:example:custom-resource-attributes": "cd example/custom-resource-attributes && ./build-deps.sh && hc package", "build:example:knowledge-system-extensions": "cd example/knowledge-system-extensions && ./build-deps.sh && hc package", - "dht": "npm run dht:conductor", + "dht": "pnpm run dht:conductor", "dht:conductor": "scripts/run-dev-conductor.sh", "dev": "npm-run-all --parallel watch dev:graphql:adapter dev:graphql:client dev:graphql:explorer", - "dev:graphql:adapter": "cd modules/vf-graphql-holochain && npm run dev", - "dev:graphql:client": "cd modules/graphql-client && npm run dev", + "dev:graphql:adapter": "cd modules/vf-graphql-holochain && pnpm run dev", + "dev:graphql:client": "cd modules/graphql-client && pnpm run dev", "dev:graphql:explorer": "cd apps/holorea-graphql-explorer && npm start", "watch": "npm-watch", "test": "npm-run-all test:unit test:integration", @@ -31,7 +33,7 @@ "test:integration": "cd test && npm test", "clean": "npm-run-all --parallel clean:modules clean:build", "clean:modules": "scripts/clean-modules.sh", - "clean:build": "nix-shell --run hn-flush && rm happs/**/*.dna" + "clean:build": "scripts/clean-build.sh" }, "watch": { "test:unit": { @@ -43,7 +45,7 @@ "test:integration": { "patterns": [ "test", - "happs", + "bundles/dna", "modules/vf-graphql-holochain/build", "modules/graphql-client/build" ], @@ -51,7 +53,7 @@ }, "dht": { "patterns": [ - "bundles" + "bundles/app" ], "extensions": ".happ" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 53f814b6b..01dfb0ca4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -33,7 +33,7 @@ importers: typescript: 4.5.5 dependencies: '@apollo/client': 3.5.8_graphql@16.3.0+react@16.13.1 - '@valueflows/vf-graphql-holochain': link:../../modules/vf-graphql-holochain/build + '@valueflows/vf-graphql-holochain': link:../../modules/vf-graphql-holochain graphiql: 1.5.16_355f8d31a5628c68d7d9f31bdf4bf39a graphiql-explorer: 0.6.3_660a82361cf03a823b6e74d2a302c521 graphql: 16.3.0 @@ -55,33 +55,14 @@ importers: specifiers: '@apollo/client': ^3.5.7 '@apollo/link-schema': ^2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../vf-graphql-holochain/build - tslib: ^2.0.0 - tslint: 5.16.0 - tslint-config-standard: 8.0.1 - typescript: 4.5.5 - dependencies: - '@apollo/link-schema': 2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../vf-graphql-holochain/build - tslib: 2.3.1 - devDependencies: - '@apollo/client': 3.5.8 - tslint: 5.16.0_typescript@4.5.5 - tslint-config-standard: 8.0.1_tslint@5.16.0+typescript@4.5.5 - typescript: 4.5.5 - - modules/graphql-client/build: - specifiers: - '@apollo/client': ^3.5.7 - '@apollo/link-schema': ^2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../../vf-graphql-holochain/build + '@valueflows/vf-graphql-holochain': workspace:* tslib: ^2.0.0 tslint: 5.16.0 tslint-config-standard: 8.0.1 typescript: 4.5.5 dependencies: '@apollo/link-schema': 2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../../vf-graphql-holochain/build + '@valueflows/vf-graphql-holochain': link:../vf-graphql-holochain tslib: 2.3.1 devDependencies: '@apollo/client': 3.5.8 @@ -94,7 +75,7 @@ importers: '@graphql-tools/schema': ^8.3.1 '@graphql-tools/utils': ^8.6.1 '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2 + '@valueflows/vf-graphql': 0.9.0-alpha.3 buffer: ^6.0.3 dataloader: ^1.4.0 deep-for-each: ^3.0.0 @@ -111,44 +92,7 @@ importers: '@graphql-tools/schema': 8.3.1_graphql@16.3.0 '@graphql-tools/utils': 8.6.1_graphql@16.3.0 '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@16.3.0 - buffer: 6.0.3 - dataloader: 1.4.0 - deep-for-each: 3.0.0 - fecha: 4.2.1 - graphql-scalars: 1.14.1_graphql@16.3.0 - is-object: 1.0.2 - js-base64: 3.7.2 - devDependencies: - graphql: 16.3.0 - tslib: 1.10.0 - tslint: 5.16.0_typescript@4.5.5 - tslint-config-standard: 8.0.1_tslint@5.16.0+typescript@4.5.5 - typescript: 4.5.5 - - modules/vf-graphql-holochain/build: - specifiers: - '@graphql-tools/schema': ^8.3.1 - '@graphql-tools/utils': ^8.6.1 - '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2 - buffer: ^6.0.3 - dataloader: ^1.4.0 - deep-for-each: ^3.0.0 - fecha: ^4.1.0 - graphql: ^16.2.0 - graphql-scalars: ^1.14.1 - is-object: ^1.0.2 - js-base64: ^3.6.0 - tslib: 1.10.0 - tslint: 5.16.0 - tslint-config-standard: 8.0.1 - typescript: 4.5.5 - dependencies: - '@graphql-tools/schema': 8.3.1_graphql@16.3.0 - '@graphql-tools/utils': 8.6.1_graphql@16.3.0 - '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@16.3.0 + '@valueflows/vf-graphql': 0.9.0-alpha.3_graphql@16.3.0 buffer: 6.0.3 dataloader: 1.4.0 deep-for-each: 3.0.0 @@ -167,7 +111,7 @@ importers: specifiers: '@holochain-playground/cli': 0.0.8 '@holochain/tryorama': 0.4.10 - '@valueflows/vf-graphql': 0.9.0-alpha.2 + '@valueflows/vf-graphql': 0.9.0-alpha.3 '@valueflows/vf-graphql-holochain': workspace:* deep-for-each: ^3.0.0 easygraphql-tester: 6.0.1 @@ -178,12 +122,13 @@ importers: eslint-plugin-promise: ^4.1.1 eslint-plugin-standard: ^4.0.0 faucet: ^0.0.1 - graphql: ^16.2.0 + graphql: 15.8.0 is-function: ^1.0.1 js-base64: ^3.6.0 json3: ^3.3.2 randombytes: ^2.1.0 source-map-support: ^0.5.16 + tap-dot: 2.0.0 tape: ^4.9.2 dependencies: js-base64: 3.7.2 @@ -191,10 +136,10 @@ importers: devDependencies: '@holochain-playground/cli': 0.0.8 '@holochain/tryorama': 0.4.10 - '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@16.3.0 - '@valueflows/vf-graphql-holochain': link:../modules/vf-graphql-holochain/build + '@valueflows/vf-graphql': 0.9.0-alpha.3_graphql@15.8.0 + '@valueflows/vf-graphql-holochain': link:../modules/vf-graphql-holochain deep-for-each: 3.0.0 - easygraphql-tester: 6.0.1_graphql@16.3.0 + easygraphql-tester: 6.0.1_graphql@15.8.0 eslint: 5.16.0 eslint-config-standard: 12.0.0_68c275d3ee18e545b4013e907c923945 eslint-plugin-import: 2.25.4_eslint@5.16.0 @@ -202,10 +147,11 @@ importers: eslint-plugin-promise: 4.3.1 eslint-plugin-standard: 4.1.0_eslint@5.16.0 faucet: 0.0.1 - graphql: 16.3.0 + graphql: 15.8.0 is-function: 1.0.2 json3: 3.3.3 source-map-support: 0.5.21 + tap-dot: 2.0.0 tape: 4.15.0 packages: @@ -1822,14 +1768,24 @@ packages: tslib: 2.3.1 dev: false - /@graphql-tools/merge/6.2.17_graphql@16.3.0: + /@graphql-tools/merge/6.2.17_graphql@15.8.0: resolution: {integrity: sha512-G5YrOew39fZf16VIrc49q3c8dBqQDD0ax5LYPiNja00xsXDi0T9zsEWVt06ApjtSdSF6HDddlu5S12QjeN8Tow==} peerDependencies: graphql: ^14.0.0 || ^15.0.0 dependencies: - '@graphql-tools/schema': 8.3.1_graphql@16.3.0 - '@graphql-tools/utils': 8.0.2_graphql@16.3.0 - graphql: 16.3.0 + '@graphql-tools/schema': 8.3.1_graphql@15.8.0 + '@graphql-tools/utils': 8.0.2_graphql@15.8.0 + graphql: 15.8.0 + tslib: 2.3.1 + dev: true + + /@graphql-tools/merge/8.2.1_graphql@15.8.0: + resolution: {integrity: sha512-Q240kcUszhXiAYudjuJgNuLgy9CryDP3wp83NOZQezfA6h3ByYKU7xI6DiKrdjyVaGpYN3ppUmdj0uf5GaXzMA==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-tools/utils': 8.6.1_graphql@15.8.0 + graphql: 15.8.0 tslib: 2.3.1 dev: true @@ -1841,6 +1797,19 @@ packages: '@graphql-tools/utils': 8.6.1_graphql@16.3.0 graphql: 16.3.0 tslib: 2.3.1 + dev: false + + /@graphql-tools/schema/8.3.1_graphql@15.8.0: + resolution: {integrity: sha512-3R0AJFe715p4GwF067G5i0KCr/XIdvSfDLvTLEiTDQ8V/hwbOHEKHKWlEBHGRQwkG5lwFQlW1aOn7VnlPERnWQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-tools/merge': 8.2.1_graphql@15.8.0 + '@graphql-tools/utils': 8.6.1_graphql@15.8.0 + graphql: 15.8.0 + tslib: 2.3.1 + value-or-promise: 1.0.11 + dev: true /@graphql-tools/schema/8.3.1_graphql@16.3.0: resolution: {integrity: sha512-3R0AJFe715p4GwF067G5i0KCr/XIdvSfDLvTLEiTDQ8V/hwbOHEKHKWlEBHGRQwkG5lwFQlW1aOn7VnlPERnWQ==} @@ -1852,6 +1821,7 @@ packages: graphql: 16.3.0 tslib: 2.3.1 value-or-promise: 1.0.11 + dev: false /@graphql-tools/url-loader/7.7.1_82e2361edcf6fd636da034b389a21d41: resolution: {integrity: sha512-K/5amdeHtKYI976HVd/AXdSNvLL7vx5QVjMlwN0OHeYyxSgC+UOH+KkS7cshYgL13SekGu0Mxbg9ABfgQ34ECA==} @@ -1885,12 +1855,21 @@ packages: - utf-8-validate dev: false - /@graphql-tools/utils/8.0.2_graphql@16.3.0: + /@graphql-tools/utils/8.0.2_graphql@15.8.0: resolution: {integrity: sha512-gzkavMOgbhnwkHJYg32Adv6f+LxjbQmmbdD5Hty0+CWxvaiuJq+nU6tzb/7VSU4cwhbNLx/lGu2jbCPEW1McZQ==} peerDependencies: graphql: ^14.0.0 || ^15.0.0 dependencies: - graphql: 16.3.0 + graphql: 15.8.0 + tslib: 2.3.1 + dev: true + + /@graphql-tools/utils/8.6.1_graphql@15.8.0: + resolution: {integrity: sha512-uxcfHCocp4ENoIiovPxUWZEHOnbXqj3ekWc0rm7fUhW93a1xheARNHcNKhwMTR+UKXVJbTFQdGI1Rl5XdyvDBg==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + graphql: 15.8.0 tslib: 2.3.1 dev: true @@ -1901,6 +1880,7 @@ packages: dependencies: graphql: 16.3.0 tslib: 2.3.1 + dev: false /@graphql-tools/wrap/8.3.3_graphql@16.3.0: resolution: {integrity: sha512-TpXN1S4Cv+oMA1Zsg9Nu4N9yrFxLuJkX+CTtSRrrdfETGHIxqfyDkm5slPDCckxP+RILA00g8ny2jzsYyNvX1w==} @@ -2894,13 +2874,23 @@ packages: eslint-visitor-keys: 3.2.0 dev: true - /@valueflows/vf-graphql/0.9.0-alpha.2_graphql@16.3.0: - resolution: {integrity: sha512-vwO+6srw8/uiQ/VjSNOJVWoDJnlUpLyrq/EWw7Q6OjR2mKmIsHdE8RDeOvJKs1fGuDiTfxM4Nw07jb2pjymbvg==} + /@valueflows/vf-graphql/0.9.0-alpha.3_graphql@15.8.0: + resolution: {integrity: sha512-yIfLba+KNA6A9e3jJcKACKRv05cmHB/76v5yykSn3gNUIpx9NhYA8ggDVev+PZL5B2smer4zUxwWHKc3LN16wQ==} + peerDependencies: + graphql: '>=14' + dependencies: + '@graphql-tools/merge': 8.2.1_graphql@15.8.0 + graphql: 15.8.0 + dev: true + + /@valueflows/vf-graphql/0.9.0-alpha.3_graphql@16.3.0: + resolution: {integrity: sha512-yIfLba+KNA6A9e3jJcKACKRv05cmHB/76v5yykSn3gNUIpx9NhYA8ggDVev+PZL5B2smer4zUxwWHKc3LN16wQ==} peerDependencies: graphql: '>=14' dependencies: '@graphql-tools/merge': 8.2.1_graphql@16.3.0 graphql: 16.3.0 + dev: false /@webassemblyjs/ast/1.11.1: resolution: {integrity: sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==} @@ -3228,6 +3218,11 @@ packages: hasBin: true dev: true + /ansi-regex/2.1.1: + resolution: {integrity: sha1-w7M6te42DYbg5ijwRorn7yfWVN8=} + engines: {node: '>=0.10.0'} + dev: true + /ansi-regex/3.0.0: resolution: {integrity: sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=} engines: {node: '>=4'} @@ -3248,6 +3243,11 @@ packages: engines: {node: '>=12'} dev: true + /ansi-styles/2.2.1: + resolution: {integrity: sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=} + engines: {node: '>=0.10.0'} + dev: true + /ansi-styles/3.2.1: resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} engines: {node: '>=4'} @@ -3281,26 +3281,26 @@ packages: picomatch: 2.3.1 dev: true - /apollo-link/1.2.14_graphql@16.3.0: + /apollo-link/1.2.14_graphql@15.8.0: resolution: {integrity: sha512-p67CMEFP7kOG1JZ0ZkYZwRDa369w5PIjtMjvrQd/HnIV8FRsHRqLqK+oAZQnFa1DDdZtOtHTi+aMIW6EatC2jg==} peerDependencies: graphql: ^0.11.3 || ^0.12.3 || ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - apollo-utilities: 1.3.4_graphql@16.3.0 - graphql: 16.3.0 + apollo-utilities: 1.3.4_graphql@15.8.0 + graphql: 15.8.0 ts-invariant: 0.4.4 tslib: 1.14.1 zen-observable-ts: 0.8.21 dev: true - /apollo-utilities/1.3.4_graphql@16.3.0: + /apollo-utilities/1.3.4_graphql@15.8.0: resolution: {integrity: sha512-pk2hiWrCXMAy2fRPwEyhvka+mqwzeP60Jr1tRYi5xru+3ko94HI9o6lK0CT33/w4RDlxWchmdhDCrvdr+pHCig==} peerDependencies: graphql: ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: '@wry/equality': 0.1.11 fast-json-stable-stringify: 2.1.0 - graphql: 16.3.0 + graphql: 15.8.0 ts-invariant: 0.4.4 tslib: 1.14.1 dev: true @@ -4012,6 +4012,17 @@ packages: engines: {node: '>=4'} dev: true + /chalk/1.1.3: + resolution: {integrity: sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=} + engines: {node: '>=0.10.0'} + dependencies: + ansi-styles: 2.2.1 + escape-string-regexp: 1.0.5 + has-ansi: 2.0.0 + strip-ansi: 3.0.1 + supports-color: 2.0.0 + dev: true + /chalk/2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} @@ -5171,33 +5182,33 @@ packages: resolution: {integrity: sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=} dev: true - /easygraphql-mock/0.1.17_graphql@16.3.0: + /easygraphql-mock/0.1.17_graphql@15.8.0: resolution: {integrity: sha512-J+OLxUfV0dw5LjMlargd9iAjtur7ifgrC0djZQgDnxXLj0g5K0JQX48cY9S95Tw4MXZP24y79w5MJtJxPcDwgQ==} dependencies: chance: 1.1.8 - easygraphql-parser: 0.0.15_graphql@16.3.0 + easygraphql-parser: 0.0.15_graphql@15.8.0 transitivePeerDependencies: - graphql dev: true - /easygraphql-parser/0.0.15_graphql@16.3.0: + /easygraphql-parser/0.0.15_graphql@15.8.0: resolution: {integrity: sha512-0fEXFnFlIjgyo1rxBmEsOa1wYZwIEm5Qk3qLR1bY4d7iMsPNIPKy4M6eohyQHYXww0v3RYWrHNoks0QnktJ9bw==} peerDependencies: graphql: ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - '@graphql-tools/merge': 6.2.17_graphql@16.3.0 - graphql: 16.3.0 + '@graphql-tools/merge': 6.2.17_graphql@15.8.0 + graphql: 15.8.0 dev: true - /easygraphql-tester/6.0.1_graphql@16.3.0: + /easygraphql-tester/6.0.1_graphql@15.8.0: resolution: {integrity: sha512-Xn7wi5g8cep1QCy5wq5e7ZpfEPVUD0SnR+lwCogWXLmNnGhaw6jW9Gzmt00NSa4qyB5LHo1h6dcF+u8zYFwGrA==} peerDependencies: graphql: ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - easygraphql-mock: 0.1.17_graphql@16.3.0 - easygraphql-parser: 0.0.15_graphql@16.3.0 - graphql: 16.3.0 - graphql-tools: 4.0.8_graphql@16.3.0 + easygraphql-mock: 0.1.17_graphql@15.8.0 + easygraphql-parser: 0.0.15_graphql@15.8.0 + graphql: 15.8.0 + graphql-tools: 4.0.8_graphql@15.8.0 lodash.isobject: 3.0.2 dev: true @@ -6790,16 +6801,16 @@ packages: tslib: 2.3.1 dev: false - /graphql-tools/4.0.8_graphql@16.3.0: + /graphql-tools/4.0.8_graphql@15.8.0: resolution: {integrity: sha512-MW+ioleBrwhRjalKjYaLQbr+920pHBgy9vM/n47sswtns8+96sRn5M/G+J1eu7IMeKWiN/9p6tmwCHU7552VJg==} deprecated: This package has been deprecated and now it only exports makeExecutableSchema.\nAnd it will no longer receive updates.\nWe recommend you to migrate to scoped packages such as @graphql-tools/schema, @graphql-tools/utils and etc.\nCheck out https://www.graphql-tools.com to learn what package you should use instead peerDependencies: graphql: ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - apollo-link: 1.2.14_graphql@16.3.0 - apollo-utilities: 1.3.4_graphql@16.3.0 + apollo-link: 1.2.14_graphql@15.8.0 + apollo-utilities: 1.3.4_graphql@15.8.0 deprecated-decorator: 0.1.6 - graphql: 16.3.0 + graphql: 15.8.0 iterall: 1.3.0 uuid: 3.4.0 dev: true @@ -6813,6 +6824,11 @@ packages: graphql: 16.3.0 dev: false + /graphql/15.8.0: + resolution: {integrity: sha512-5gghUc24tP9HRznNpV2+FIoq3xKkj5dTQqf4v0CpdPbFVwFkWoxOM+o+2OC9ZSvjEMTjfmG9QT+gcvggTwW1zw==} + engines: {node: '>= 10.x'} + dev: true + /graphql/16.3.0: resolution: {integrity: sha512-xm+ANmA16BzCT5pLjuXySbQVFwH3oJctUVdy81w1sV0vBU0KgDdBGtxQOUd5zqOBk/JayAFeG8Dlmeq74rjm/A==} engines: {node: ^12.22.0 || ^14.16.0 || >=16.0.0} @@ -6832,6 +6848,13 @@ packages: resolution: {integrity: sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g==} dev: true + /has-ansi/2.0.0: + resolution: {integrity: sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=} + engines: {node: '>=0.10.0'} + dependencies: + ansi-regex: 2.1.1 + dev: true + /has-bigints/1.0.1: resolution: {integrity: sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==} dev: true @@ -10462,6 +10485,10 @@ packages: strip-json-comments: 2.0.1 dev: true + /re-emitter/1.1.4: + resolution: {integrity: sha512-C0SIXdXDSus2yqqvV7qifnb4NoWP7mEBXJq3axci301mXHCZb8Djwm4hrEZo4UeXRaEnfjH98uQ8EBppk2oNWA==} + dev: true + /react-app-polyfill/3.0.0: resolution: {integrity: sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w==} engines: {node: '>=14'} @@ -11445,6 +11472,12 @@ packages: extend-shallow: 3.0.2 dev: true + /split/1.0.1: + resolution: {integrity: sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==} + dependencies: + through: 2.3.8 + dev: true + /sprintf-js/1.0.3: resolution: {integrity: sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=} dev: true @@ -11607,6 +11640,13 @@ packages: is-regexp: 1.0.0 dev: true + /strip-ansi/3.0.1: + resolution: {integrity: sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=} + engines: {node: '>=0.10.0'} + dependencies: + ansi-regex: 2.1.1 + dev: true + /strip-ansi/4.0.0: resolution: {integrity: sha1-qEeQIusaw2iocTibY1JixQXuNo8=} engines: {node: '>=4'} @@ -11710,6 +11750,11 @@ packages: - utf-8-validate dev: false + /supports-color/2.0.0: + resolution: {integrity: sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=} + engines: {node: '>=0.8.0'} + dev: true + /supports-color/5.5.0: resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} engines: {node: '>=4'} @@ -11855,6 +11900,25 @@ packages: - ts-node dev: true + /tap-dot/2.0.0: + resolution: {integrity: sha512-7N1yPcRDgdfHCUbG6lZ0hXo53NyXhKIjJNhqKBixl9HVEG4QasG16Nlvr8wRnqr2ZRYVWmbmxwF3NOBbTLtQLQ==} + hasBin: true + dependencies: + chalk: 1.1.3 + tap-out: 1.4.2 + through2: 2.0.5 + dev: true + + /tap-out/1.4.2: + resolution: {integrity: sha1-yQfsG/lAURHQiCY+kvVgi4jLs3o=} + hasBin: true + dependencies: + re-emitter: 1.1.4 + readable-stream: 2.3.7 + split: 1.0.1 + trim: 0.0.1 + dev: true + /tap-parser/0.4.3: resolution: {integrity: sha1-pOrhkMENdsehEZIf84u+TVjwnuo=} dependencies: @@ -12188,6 +12252,10 @@ packages: punycode: 2.1.1 dev: true + /trim/0.0.1: + resolution: {integrity: sha1-WFhUf2spB1fulczMZm+1AITEYN0=} + dev: true + /triple-beam/1.3.0: resolution: {integrity: sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==} dev: true diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index a98ebe729..e647d095e 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,5 +1,6 @@ packages: - 'apps/**' - 'example/**' - - 'modules/**' + - 'modules/graphql-client' + - 'modules/vf-graphql-holochain' - 'test/**' diff --git a/scripts/clean-build.sh b/scripts/clean-build.sh index 5a0d289cb..e6a2dd133 100755 --- a/scripts/clean-build.sh +++ b/scripts/clean-build.sh @@ -7,8 +7,9 @@ # ## -rm -Rf happs/**/dist -rm -Rf happs/**/zomes/**/code/target +nix-shell --run hn-node-flush +nix-shell --run hn-rust-flush -# :IMPORTANT: after updating Holochain this can be needed to avoid unmet dependency errors -cargo update +rm -Rf bundles/dna/ +rm -Rf bundles/app/ +rm bundles/web-app/*.webhapp diff --git a/scripts/clean-modules.sh b/scripts/clean-modules.sh index a4891fc3f..4931d2708 100755 --- a/scripts/clean-modules.sh +++ b/scripts/clean-modules.sh @@ -8,6 +8,7 @@ ## rm -Rf node_modules +rm -Rf modules/**/build for DIR in $(find -type d -iname node_modules); do echo " Remove $DIR" diff --git a/scripts/fixTryoramaTimeout.js b/scripts/fixTryoramaTimeout.js new file mode 100755 index 000000000..7b9c579b6 --- /dev/null +++ b/scripts/fixTryoramaTimeout.js @@ -0,0 +1,21 @@ +/// Temporary script to fix Tryorama websocket timeouts +/// for tests involving DNAs in excess of 16MB.of + +const fs = require('fs') +const path = require('path') + +const filePath = path.resolve(__dirname, '../node_modules/.pnpm/@holochain+client@0.3.2/node_modules/@holochain/client/lib/websocket/common.js') + +if (!fs.existsSync(filePath)) { + console.error('Unable to find Tryorama websocket file for patching. Was it updated? Is this script still needed?') + process.exit(1) +} + +const contents = fs.readFileSync(filePath) + '' + +fs.writeFileSync(filePath, contents.replace( + /exports\.DEFAULT_TIMEOUT\s*=\s*\d+/, + 'exports.DEFAULT_TIMEOUT = 50000' +)) + +console.log('Tryorama websocket timeout patched successfully!') diff --git a/scripts/package-dnas.sh b/scripts/package-dnas.sh index 05ef4064b..db7768533 100755 --- a/scripts/package-dnas.sh +++ b/scripts/package-dnas.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash # -# Runs the Holochain DNA bundler utility against all configurations in the `happs` dir +# Runs the Holochain DNA bundler utility against all configurations in the `dna_bundles` dir # # @package: Holo-REA # @since: 2021-02-09 @@ -9,17 +9,41 @@ UTIL="${HOLOCHAIN_DNA_UTIL_PATH:-hc}" +# determine repository root for substitution +ROOT_PATH=$(dirname "$0") +ROOT_PATH=$(cd "$ROOT_PATH" && pwd) +ROOT_PATH=$(dirname "$ROOT_PATH") +ROOT_PATH=$(printf '%s\n' "$ROOT_PATH" | sed -e 's/[\/&]/\\&/g') # make safe for sed + # optimise all WASMs first -if [ $RUN_WASM_OPT -ne "0" ]; then +if [[ $RUN_WASM_OPT -ne "0" ]]; then for WASM in target/wasm32-unknown-unknown/release/*.wasm; do echo -e "\e[1mOptimising $WASM\e[0m..." wasm-opt -Oz "$WASM" --output "$WASM" done fi +# remove any stale DNA & app bundle files; refresh from templates +rm -Rf bundles/dna +cp -a bundles/dna_templates bundles/dna +rm -Rf bundles/app +cp -a bundles/app_templates bundles/app + +# sed -i.bak works on both mac and linux +# https://stackoverflow.com/a/22084103/2132755 + # compile DNAs by concatenating WASMs with properties -for DIR in happs/*; do +for DIR in bundles/dna/*; do if [[ -d "$DIR" ]]; then + # @see https://github.com/holochain/holochain/issues/966 + # toggle `path`/`bundled` depending on build mode + if [[ $BUNDLE_ZOMES -eq "1" ]]; then + sed -i.bak "s/path:/bundled:/g" "$DIR/dna.yaml" + fi + # substitute absolute paths for compatibility with `path` or `bundled` + sed -i.bak "s//${ROOT_PATH}/g" "$DIR/dna.yaml" + rm "$DIR/dna.yaml.bak" + echo -e "\e[1mCompiling DNA in $DIR\e[0m" if "$UTIL" dna pack "$DIR" 2>/dev/null; then echo -e "\e[1;32m packing succeeded.\e[0m" @@ -30,8 +54,19 @@ for DIR in happs/*; do done # compile hApp bundles by concatenating DNAs and specifying any config -for DIR in bundles/*; do +for DIR in bundles/app/*; do if [[ -d "$DIR" ]]; then + # @see https://github.com/holochain/holochain/issues/966 + # toggle `url`/`bundled` and inject paths depending on defn of release download URL + if [[ -n "$RELEASE_DOWNLOAD_URL" ]]; then + RELEASE_DOWNLOAD_URL=$(printf '%s\n' "$RELEASE_DOWNLOAD_URL" | sed -e 's/[\/&]/\\&/g') # make safe for sed + sed -i.bak "s/\\/\\w*/${RELEASE_DOWNLOAD_URL}/g" "$DIR/happ.yaml" + sed -i.bak "s/bundled:/url:/g" "$DIR/happ.yaml" + else + sed -i.bak "s//${ROOT_PATH}\/bundles\/dna/g" "$DIR/happ.yaml" + fi + rm "$DIR/happ.yaml.bak" + echo -e "\e[1mBundling hApp in $DIR\e[0m" if "$UTIL" app pack "$DIR" 2>/dev/null; then echo -e "\e[1;32m packing succeeded.\e[0m" diff --git a/scripts/package-webhapp.sh b/scripts/package-webhapp.sh index 172b6a040..84582c074 100755 --- a/scripts/package-webhapp.sh +++ b/scripts/package-webhapp.sh @@ -10,7 +10,7 @@ UTIL="${HOLOCHAIN_DNA_UTIL_PATH:-hc}" echo -e "\e[1mPacking webhapp\e[0m" -if "$UTIL" web-app pack webhapp 2>/dev/null; then +if "$UTIL" web-app pack bundles/web-app 2>/dev/null; then echo -e "\e[1;32m packing succeeded.\e[0m" else echo -e "\e[1;31m [FAIL]\e[0m" diff --git a/scripts/postinstall.sh b/scripts/postinstall.sh index 09e78f0b0..a597147c5 100755 --- a/scripts/postinstall.sh +++ b/scripts/postinstall.sh @@ -50,3 +50,5 @@ else exit 1 } fi + +node scripts/fixTryoramaTimeout.js diff --git a/scripts/run-dev-conductor.sh b/scripts/run-dev-conductor.sh index 87b0cb0bc..9e7939775 100755 --- a/scripts/run-dev-conductor.sh +++ b/scripts/run-dev-conductor.sh @@ -16,5 +16,5 @@ APP="${HOLOCHAIN_APP_PORT:-4000}" "$UTIL" s clean "$UTIL" s create -n 1 -d hrea_tester network quic -"$UTIL" s call install-app-bundle ./bundles/obs_and_agent/hrea_obs_agent.happ +"$UTIL" s call install-app-bundle ./bundles/app/full_suite/hrea_suite.happ "$UTIL" s run --all -p $APP diff --git a/test/.skip_tests b/test/.skip_tests new file mode 100644 index 000000000..e9476d3f5 --- /dev/null +++ b/test/.skip_tests @@ -0,0 +1 @@ +skip_*/*.js \ No newline at end of file diff --git a/test/agreement/test_agreement_links.js b/test/agreement/test_agreement_links.js index 8d7cf04ea..f17e22e15 100644 --- a/test/agreement/test_agreement_links.js +++ b/test/agreement/test_agreement_links.js @@ -3,30 +3,25 @@ const { buildConfig, buildRunner, buildPlayer, - bridge, + mockIdentifier, + mockAgentId, + sortById, } = require('../init') const runner = buildRunner() -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), - agreement: getDNA('agreement'), -}, [ - bridge('vf_agreement', 'planning', 'agreement'), - bridge('vf_agreement', 'observation', 'agreement'), -]) +const config = buildConfig() const testEventProps = { action: 'raise', resourceClassifiedAs: ['some-resource-type'], - resourceQuantity: { hasNumericalValue: 1, hasUnit: 'dangling-unit-todo-tidy-up' }, - provider: 'agentid-1-todo', - receiver: 'agentid-2-todo', + resourceQuantity: { hasNumericalValue: 1, hasUnit: mockIdentifier() }, + provider: mockAgentId(), + receiver: mockAgentId(), } runner.registerScenario('Agreement links & queries', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const alice = await buildPlayer(s, config, ['observation', 'planning', 'agreement']) let resp = await alice.graphQL(` mutation($rs: AgreementCreateParams!) { @@ -154,12 +149,19 @@ runner.registerScenario('Agreement links & queries', async (s, t) => { } } `) + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedCIds = [{ id: cId }, { id: c2Id }].sort(sortById) + resp.data.agreement.commitments.sort(sortById) + const sortedEIds = [{ id: eId }, { id: e2Id }].sort(sortById) + resp.data.agreement.economicEvents.sort(sortById) + t.equal(resp.data.agreement.commitments.length, 2, '2nd commitment ref added') - t.equal(resp.data.agreement.commitments[0].id, c2Id, 'commitment ref 1 OK') - t.equal(resp.data.agreement.commitments[1].id, cId, 'commitment ref 2 OK') + t.equal(resp.data.agreement.commitments[0].id, sortedCIds[0].id, 'commitment ref 1 OK') + t.equal(resp.data.agreement.commitments[1].id, sortedCIds[1].id, 'commitment ref 2 OK') t.equal(resp.data.agreement.economicEvents.length, 2, '2nd event ref added') - t.equal(resp.data.agreement.economicEvents[0].id, e2Id, 'event ref 1 OK') - t.equal(resp.data.agreement.economicEvents[1].id, eId, 'event ref 2 OK') + t.equal(resp.data.agreement.economicEvents[0].id, sortedEIds[0].id, 'event ref 1 OK') + t.equal(resp.data.agreement.economicEvents[1].id, sortedEIds[1].id, 'event ref 2 OK') }) runner.run() diff --git a/test/core-architecture/test_record_links_cross_dna.js b/test/core-architecture/test_record_links_cross_dna.js index cb867c7cf..8caf3567f 100644 --- a/test/core-architecture/test_record_links_cross_dna.js +++ b/test/core-architecture/test_record_links_cross_dna.js @@ -1,145 +1,153 @@ const { - getDNA, buildConfig, + buildPlayer, buildRunner, + mockAgentId, + mockIdentifier, } = require('../init') const runner = buildRunner() -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), -}, { - vf_observation: ['planning', 'observation'], -}) +const config = buildConfig() const testEventProps = { action: 'consume', resourceClassifiedAs: ['some-resource-type'], - resourceQuantity: { hasNumericalValue: 1, hasUnit: 'dangling-unit-todo-tidy-up' }, - provider: 'agentid-1-todo', - receiver: 'agentid-2-todo', + resourceQuantity: { hasNumericalValue: 1, hasUnit: mockIdentifier(false) }, + provider: mockAgentId(false), + receiver: mockAgentId(false), due: '2019-11-19T04:29:55.056Z', } runner.registerScenario('updating remote link fields syncs fields and associated indexes', async (s, t) => { - const { alice } = await s.players({ alice: config }, true) + const { cells: [observation, planning] } = await buildPlayer(s, config, ['observation', 'planning']) // SCENARIO: write initial records const process = { name: 'context record for testing relationships', } - const pResp = await alice.call('observation', 'process', 'create_process', { process }) - t.ok(pResp.Ok.process && pResp.Ok.process.id, 'target record created successfully') + const pResp = await observation.call('process', 'create_process', { process }) + t.ok(pResp.process && pResp.process.id, 'target record created successfully') await s.consistency() - const processId = pResp.Ok.process.id + const processId = pResp.process.id const process2 = { name: 'second context record for testing relationships', } - const pResp2 = await alice.call('observation', 'process', 'create_process', { process: process2 }) - t.ok(pResp2.Ok.process && pResp2.Ok.process.id, 'secondary record created successfully') + const pResp2 = await observation.call('process', 'create_process', { process: process2 }) + t.ok(pResp2.process && pResp2.process.id, 'secondary record created successfully') await s.consistency() - const differentProcessId = pResp2.Ok.process.id + const differentProcessId = pResp2.process.id const iCommitment = { note: 'test input commitment', inputOf: processId, ...testEventProps, } - const icResp = await alice.call('planning', 'commitment', 'create_commitment', { commitment: iCommitment }) - t.ok(icResp.Ok.commitment && icResp.Ok.commitment.id, 'input record created successfully') - t.equal(icResp.Ok.commitment.inputOf, processId, 'field reference OK in write') + const icResp = await planning.call('commitment', 'create_commitment', { commitment: iCommitment }) + t.ok(icResp.commitment && icResp.commitment.id, 'input record created successfully') + t.deepEqual(icResp.commitment.inputOf, processId, 'field reference OK in write') await s.consistency() - const iCommitmentId = icResp.Ok.commitment.id + const iCommitmentId = icResp.commitment.id + const iCommitmentRevisionId = icResp.commitment.revisionId // ASSERT: test forward link field - let readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, processId, 'field reference OK on read') + let readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.deepEqual(readResponse.commitment && readResponse.commitment.inputOf, processId, 'field reference OK on read') // ASSERT: test reciprocal link field - readResponse = await alice.call('observation', 'process', 'get_process', { address: processId }) - t.equal(readResponse.Ok.process - && readResponse.Ok.process.committedInputs - && readResponse.Ok.process.committedInputs[0], iCommitmentId, 'reciprocal field reference OK on read') + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.deepEqual(readResponse.process + && readResponse.process.committedInputs + && readResponse.process.committedInputs[0], iCommitmentId, 'reciprocal field reference OK on read') // ASSERT: test commitment input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'field query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].commitment && readResponse.Ok[0].commitment.id, iCommitmentId, 'query index OK') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iCommitmentId, 'query index OK') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'reciprocal query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].process && readResponse.Ok[0].process.id, processId, 'reciprocal query index OK') + readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') // SCENARIO: update link field const updateCommitment = { id: iCommitmentId, + revisionId: iCommitmentRevisionId, inputOf: differentProcessId, } - const ieResp2 = await alice.call('planning', 'commitment', 'update_commitment', { commitment: updateCommitment }) - t.equal(ieResp2.Ok.commitment && ieResp2.Ok.commitment.inputOf, differentProcessId, 'record link field updated successfully') + const ieResp2 = await planning.call('commitment', 'update_commitment', { commitment: updateCommitment }) + t.deepEqual(ieResp2.commitment && ieResp2.commitment.inputOf, differentProcessId, 'record link field updated successfully') await s.consistency() // ASSERT: test commitment fields - readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.ok(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, 'field reference OK on read') - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, differentProcessId, 'field updated successfully') + readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.ok(readResponse.commitment && readResponse.commitment.inputOf, 'field reference OK on read') + t.deepEqual(readResponse.commitment && readResponse.commitment.inputOf, differentProcessId, 'field updated successfully') // ASSERT: test new commitment input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.Ok[0] - && readResponse.Ok[0].commitment - && readResponse.Ok[0].commitment.id, iCommitmentId, 'new field query index applied') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) + t.deepEqual(readResponse.edges && readResponse.edges[0] + && readResponse.edges[0].node + && readResponse.edges[0].node.id, iCommitmentId, 'new field query index applied') // ASSERT: test stale commitment input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'stale field query index removed') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'stale field query index removed') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'reciprocal query index count ok') - t.equal(readResponse.Ok && readResponse.Ok[0] - && readResponse.Ok[0].process - && readResponse.Ok[0].process.id, differentProcessId, 'new reciprocal query index applied') + readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index count ok') + t.deepEqual(readResponse.edges && readResponse.edges[0] + && readResponse.edges[0].node + && readResponse.edges[0].node.id, differentProcessId, 'new reciprocal query index applied') // SCENARIO: update link field (no-op) - const ieResp3 = await alice.call('planning', 'commitment', 'update_commitment', { commitment: updateCommitment }) - t.equal(ieResp3.Ok.commitment && ieResp3.Ok.commitment.inputOf, differentProcessId, 'update with same fields is no-op') + const ieResp3 = await planning.call('commitment', 'update_commitment', { commitment: updateCommitment }) + t.deepEqual(ieResp3.commitment && ieResp3.commitment.inputOf, differentProcessId, 'update with same fields is no-op') await s.consistency() + const ieResp3RevisionId = ieResp3.commitment.revisionId // ASSERT: test event fields - readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, differentProcessId, 'field update no-op OK') + readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.deepEqual(readResponse.commitment && readResponse.commitment.inputOf, differentProcessId, 'field update no-op OK') // SCENARIO: remove link field const wipeEventInput = { id: iCommitmentId, + revisionId: ieResp3RevisionId, action: 'lower', inputOf: null, } - const ieResp4 = await alice.call('planning', 'commitment', 'update_commitment', { commitment: wipeEventInput }) - t.equal(ieResp4.Ok.commitment && ieResp4.Ok.commitment.inputOf, undefined, 'update with null value erases field') + let ieResp4 + try { + ieResp4 = await planning.call('commitment', 'update_commitment', { commitment: wipeEventInput }) + } catch (e) { + // to create a failure, pretend that we still have what was there + ieResp4 = { commitment: { inputOf: differentProcessId }} + console.error(e) + } + t.equal(ieResp4.commitment && ieResp4.commitment.inputOf, undefined, 'update with null value erases field') await s.consistency() // ASSERT: test event fields - readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, undefined, 'field erased successfully') + readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.equal(readResponse.commitment && readResponse.commitment.inputOf, undefined, 'field erased successfully') // ASSERT: test event input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'field query index updated') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'field query index updated') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'reciprocal field query index updated') + readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'reciprocal field query index updated') @@ -147,72 +155,4 @@ runner.registerScenario('updating remote link fields syncs fields and associated // :TODO: updates for fields when other values are present in the index array }) -runner.registerScenario('removing records with linked remote indexes clears them in associated records', async (s, t) => { - const { alice } = await s.players({ alice: config }, true) - - // SCENARIO: write initial records - const process = { - name: 'context record for testing relationships', - } - const pResp = await alice.call('observation', 'process', 'create_process', { process }) - t.ok(pResp.Ok.process && pResp.Ok.process.id, 'record created successfully') - await s.consistency() - const processId = pResp.Ok.process.id - - const iIntent = { - note: 'test input intent', - inputOf: processId, - ...testEventProps, - } - const iiResp = await alice.call('planning', 'intent', 'create_intent', { intent: iIntent }) - t.ok(iiResp.Ok.intent && iiResp.Ok.intent.id, 'input record created successfully') - t.equal(iiResp.Ok.intent.inputOf, processId, 'field reference OK in write') - await s.consistency() - const iIntentId = iiResp.Ok.intent.id - - // ASSERT: test forward link field - let readResponse = await alice.call('planning', 'intent', 'get_intent', { address: iIntentId }) - t.equal(readResponse.Ok.intent && readResponse.Ok.intent.inputOf, processId, 'field reference OK on read') - - // ASSERT: test reciprocal link field - readResponse = await alice.call('observation', 'process', 'get_process', { address: processId }) - t.equal(readResponse.Ok.process - && readResponse.Ok.process.intendedInputs - && readResponse.Ok.process.intendedInputs[0], iIntentId, 'reciprocal field reference OK on read') - - // ASSERT: test commitment input query edge - readResponse = await alice.call('planning', 'intent_index', 'query_intents', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'field query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].intent && readResponse.Ok[0].intent.id, iIntentId, 'query index OK') - - // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'reciprocal query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].process && readResponse.Ok[0].process.id, processId, 'reciprocal query index OK') - - - - // SCENARIO: wipe associated record - const delResp = await alice.call('planning', 'intent', 'delete_intent', { address: iIntentId }) - t.ok(delResp.Ok, 'input record deleted') - await s.consistency() - - // ASSERT: test forward link field - readResponse = await alice.call('planning', 'intent', 'get_intent', { address: iIntentId }) - t.equal(readResponse.Err && readResponse.Err.Internal, 'No entry at this address', 'record deletion OK') - - // ASSERT: test reciprocal link field - readResponse = await alice.call('observation', 'process', 'get_process', { address: processId }) - t.equal(readResponse.Ok.process - && readResponse.Ok.process.intendedInputs.length, 0, 'reciprocal field reference removed') - - // ASSERT: test commitment input query edge - readResponse = await alice.call('planning', 'intent_index', 'query_intents', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'field query index removed') - - // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'reciprocal query index removed') -}) - runner.run() diff --git a/test/core-architecture/test_record_links_cross_zome.js b/test/core-architecture/test_record_links_cross_zome.js index d3e19ac61..1ad0cd12f 100644 --- a/test/core-architecture/test_record_links_cross_zome.js +++ b/test/core-architecture/test_record_links_cross_zome.js @@ -58,13 +58,13 @@ runner.registerScenario('updating local link fields syncs fields and associated // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - t.equal(readResponse.results && readResponse.results.length, 1, 'field query index present') - t.deepEqual(readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'query index OK') + t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iEventId, 'query index OK') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse.results && readResponse.results.length, 1, 'reciprocal query index present') - t.deepEqual(readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'reciprocal query index OK') + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') // :TODO: need to find a new record with a local zome link to test, since EconomicEvent is not updateable /* @@ -84,13 +84,13 @@ runner.registerScenario('updating local link fields syncs fields and associated // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.results.length, 1, 'field query index present') - t.equal(readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'field query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 1, 'field query index present') + t.equal(readResponse.edges[0] && readResponse.edges[0].economicEvent && readResponse.edges[0].economicEvent.id, iEventId, 'field query index updated') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse.Ok && readResponse.results.length, 1, 'process query index present') - t.equal(readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, differentProcessId, 'process query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 1, 'process query index present') + t.equal(readResponse.edges[0] && readResponse.edges[0].process && readResponse.edges[0].process.id, differentProcessId, 'process query index updated') @@ -120,11 +120,11 @@ runner.registerScenario('updating local link fields syncs fields and associated // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.results.length, 0, 'field query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 0, 'field query index updated') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse.Ok && readResponse.results.length, 0, 'process query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 0, 'process query index updated') */ @@ -183,13 +183,13 @@ runner2.registerScenario('removing records with linked local indexes clears them // ASSERT: test commitment input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - t.equal(readResponse && readResponse.results.length, 1, 'field query index present') - t.deepEqual(readResponse && readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'query index OK') + t.equal(readResponse && readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iEventId, 'query index OK') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse && readResponse.results.length, 1, 'reciprocal query index present') - t.deepEqual(readResponse && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'reciprocal query index OK') + t.equal(readResponse && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') // SCENARIO: wipe associated record const delResp = await observation.call('economic_event', 'delete_economic_event', { address: iEventRev }) @@ -209,11 +209,11 @@ runner2.registerScenario('removing records with linked local indexes clears them // ASSERT: test commitment input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - t.equal(readResponse && readResponse.results.length, 0, 'field query index removed') + t.equal(readResponse && readResponse.edges.length, 0, 'field query index removed') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse && readResponse.results.length, 0, 'reciprocal query index removed') + t.equal(readResponse && readResponse.edges.length, 0, 'reciprocal query index removed') }) runner.run() diff --git a/test/core-architecture/test_record_links_remove_cross_dna.js b/test/core-architecture/test_record_links_remove_cross_dna.js new file mode 100644 index 000000000..274862ac6 --- /dev/null +++ b/test/core-architecture/test_record_links_remove_cross_dna.js @@ -0,0 +1,92 @@ +const { + buildConfig, + buildPlayer, + buildRunner, + mockAgentId, + mockIdentifier, +} = require('../init') + +const runner = buildRunner() + +const config = buildConfig() + +const testEventProps = { + action: 'consume', + resourceClassifiedAs: ['some-resource-type'], + resourceQuantity: { hasNumericalValue: 1, hasUnit: mockIdentifier(false) }, + provider: mockAgentId(false), + receiver: mockAgentId(false), + due: '2019-11-19T04:29:55.056Z', +} + +runner.registerScenario('removing records with linked remote indexes clears them in associated records', async (s, t) => { + const { cells: [observation, planning] } = await buildPlayer(s, config, ['observation', 'planning']) + + // SCENARIO: write initial records + const process = { + name: 'context record for testing relationships', + } + const pResp = await observation.call('process', 'create_process', { process }) + t.ok(pResp.process && pResp.process.id, 'record created successfully') + await s.consistency() + const processId = pResp.process.id + + const iIntent = { + note: 'test input intent', + inputOf: processId, + ...testEventProps, + } + const iiResp = await planning.call('intent', 'create_intent', { intent: iIntent }) + t.ok(iiResp.intent && iiResp.intent.id, 'input record created successfully') + t.deepEqual(iiResp.intent.inputOf, processId, 'field reference OK in write') + await s.consistency() + const iIntentId = iiResp.intent.id + const iIntentRevisionId = iiResp.intent.revisionId + + // ASSERT: test forward link field + let readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) + t.deepEqual(readResponse.intent && readResponse.intent.inputOf, processId, 'field reference OK on read') + + // ASSERT: test reciprocal link field + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.deepEqual(readResponse.process + && readResponse.process.intendedInputs + && readResponse.process.intendedInputs[0], iIntentId, 'reciprocal field reference OK on read') + + // ASSERT: test commitment input query edge + readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iIntentId, 'query index OK') + + // ASSERT: test process input query edge + readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') + + + + // SCENARIO: wipe associated record + await planning.call('intent', 'delete_intent', { address: iIntentRevisionId }) + await s.consistency() + + // ASSERT: test forward link field + try { + readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) + } catch (err) { + t.ok(err.data.data.includes('No entry at this address'), 'record deletion OK') + } + + // ASSERT: test reciprocal link field + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.equal(readResponse.process.intendedInputs, undefined, 'reciprocal field reference removed') + + // ASSERT: test commitment input query edge + readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'field query index removed') + + // ASSERT: test process input query edge + readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'reciprocal query index removed') +}) + +runner.run() diff --git a/test/economic-event/event_resource_list_api.js b/test/economic-event/event_resource_list_api.js index c95d0db8a..4da96f77b 100644 --- a/test/economic-event/event_resource_list_api.js +++ b/test/economic-event/event_resource_list_api.js @@ -118,22 +118,30 @@ runner.registerScenario('Event/Resource list APIs', async (s, t) => { resp = await alice.graphQL(`{ economicEvents { - id + edges { + node { + id + } + } } economicResources { - id + edges { + node { + id + } + } } }`) - t.equal(resp.data.economicEvents.length, 5, 'all events correctly retrievable') + t.equal(resp.data.economicEvents.edges.length, 5, 'all events correctly retrievable') t.deepEqual( - resp.data.economicEvents.sort(sortById), + resp.data.economicEvents.edges.map(e => e.node).sort(sortById), [{ id: event1Id }, { id: event2Id }, { id: event3Id }, { id: event4Id }, { id: event5Id }].sort(sortById), 'event IDs OK' ) - t.equal(resp.data.economicResources.length, 2, 'all resources correctly retrievable') + t.equal(resp.data.economicResources.edges.length, 2, 'all resources correctly retrievable') t.deepEqual( - resp.data.economicResources.sort(sortById), + resp.data.economicResources.edges.map(e => e.node).sort(sortById), [{ id: resource1Id }, { id: resource2Id }].sort(sortById), 'resource IDs OK' ) diff --git a/test/economic-resource/conforming_resources.js b/test/economic-resource/conforming_resources.js index 525ca7018..2fa6d62cd 100644 --- a/test/economic-resource/conforming_resources.js +++ b/test/economic-resource/conforming_resources.js @@ -81,14 +81,18 @@ runner.registerScenario('can locate EconomicResources conforming to a ResourceSp resp = await graphQL(`{ rs: resourceSpecification(id: "${rsId}") { conformingResources { - id + edges { + node { + id + } + } } } }`) - t.equal(resp.data.rs.conformingResources.length, 2, 'all resources indexed via ResourceSpecification link') - t.equal(resp.data.rs.conformingResources[0].id, resource1Id, 'resource 2 ref OK') - t.equal(resp.data.rs.conformingResources[1].id, resource2Id, 'resource 1 ref OK') + t.equal(resp.data.rs.conformingResources.edges.length, 2, 'all resources indexed via ResourceSpecification link') + t.equal(resp.data.rs.conformingResources.edges[0].node.id, resource1Id, 'resource 2 ref OK') + t.equal(resp.data.rs.conformingResources.edges[1].node.id, resource2Id, 'resource 1 ref OK') }) runner.run() diff --git a/test/economic-resource/resource_links.js b/test/economic-resource/resource_links.js index 61ed4cbc7..262008ceb 100644 --- a/test/economic-resource/resource_links.js +++ b/test/economic-resource/resource_links.js @@ -2,6 +2,7 @@ const { buildConfig, buildRunner, buildPlayer, + seralizeId, // :NOTE: needed due to mixing of direct API and GraphQL in same test mockAgentId, mockIdentifier, mockAddress, @@ -33,7 +34,7 @@ runner.registerScenario('EconomicResource composition / containment functionalit note: 'container resource', conformsTo: resourceSpecificationId, } - const cResp1 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent, new_inventoried_resource: inputResource }) + const cResp1 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent, newInventoriedResource: inputResource }) await s.consistency() const event1 = cResp1.economicEvent const resource1 = cResp1.economicResource @@ -52,7 +53,7 @@ runner.registerScenario('EconomicResource composition / containment functionalit conformsTo: resourceSpecificationId, note: 'internal resource', } - const cResp2 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent2, new_inventoried_resource: inputResource2 }) + const cResp2 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent2, newInventoriedResource: inputResource2 }) await s.consistency() t.ok(cResp2.economicResource, 'internal resource created successfully') const resource2 = cResp2.economicResource @@ -80,15 +81,16 @@ runner.registerScenario('EconomicResource composition / containment functionalit conformsTo: resourceSpecificationId, note: 'another internal resource', } - const cResp3 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent3, new_inventoried_resource: inputResource3 }) + const cResp3 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent3, newInventoriedResource: inputResource3 }) await s.consistency() t.ok(cResp3.economicResource, 'additional internal resource created successfully') const resource3 = cResp3.economicResource const resourceId3 = resource3.id readResp = await alice.call('economic_resource', 'get_economic_resource', { address: resourceId1 }) + readResource = readResp.economicResource - t.ok(readResource.id, 'container resource re-retrieval OK') + t.deepEqual(readResource.id, resourceId1, 'container resource re-retrieval OK') console.log(readResource) t.equal(readResource.contains && readResource.contains.length, 2, 'container resource reference appended') t.deepEqual(readResource.contains && readResource.contains[0], resourceId2, 'container resource reference B OK') @@ -116,12 +118,12 @@ runner.registerScenario('EconomicResource composition / containment functionalit await s.consistency() readResp = await graphQL(` { - container: economicResource(id: "${resourceId1}") { + container: economicResource(id: "${seralizeId(resourceId1)}") { contains { id } } - contained: economicResource(id: "${resourceId2}") { + contained: economicResource(id: "${seralizeId(resourceId2)}") { containedIn { id } @@ -129,11 +131,11 @@ runner.registerScenario('EconomicResource composition / containment functionalit }`) t.equal(readResp.data.container.contains.length, 1, 'contains ref present in GraphQL API') - t.equal(readResp.data.container.contains[0].id, resourceId2, 'contains ref OK in GraphQL API') - t.equal(readResp.data.contained.containedIn.id, resourceId1, 'containedIn ref OK in GraphQL API') + t.equal(readResp.data.container.contains[0].id, seralizeId(resourceId2), 'contains ref OK in GraphQL API') + t.equal(readResp.data.contained.containedIn.id, seralizeId(resourceId1), 'containedIn ref OK in GraphQL API') // SCENARIO: delete resource, check links are removed - // :TODO: needs some thought + // :TODO: needs some thought; resources should only be deleted via last linked EconomicEvent's deletion // const dResp = await alice.call('economic_resource', 'delete_resource', { address: resourceId3 }) // await s.consistency() // t.ok(dResp.economicResource, 'resource deleted successfully') diff --git a/test/economic-resource/resource_logic.js b/test/economic-resource/resource_logic.js index 798e1757b..36d6bdb8f 100644 --- a/test/economic-resource/resource_logic.js +++ b/test/economic-resource/resource_logic.js @@ -77,7 +77,7 @@ runner.registerScenario('EconomicResource & EconomicEvent record interactions', note: 'test resource observed in inventory', conformsTo: resourceSpecificationId, } - const cResp1 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent, new_inventoried_resource: inputResource }) + const cResp1 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent, newInventoriedResource: inputResource }) await s.consistency() const inputEventDest = { @@ -91,7 +91,7 @@ runner.registerScenario('EconomicResource & EconomicEvent record interactions', const inputResourceDest = { note: 'destination resource for move target', } - const dResp = await observation.call('economic_event', 'create_economic_event', { event: inputEventDest, new_inventoried_resource: inputResourceDest }) + const dResp = await observation.call('economic_event', 'create_economic_event', { event: inputEventDest, newInventoriedResource: inputResourceDest }) await s.consistency() t.ok(dResp.economicEvent, 'destination inventory created successfully') const destResourceId = dResp.economicResource.id @@ -225,7 +225,7 @@ runner.registerScenario('EconomicResource & EconomicEvent record interactions', note: 'receiver test resource', conformsTo: resourceSpecificationId, } - const cResp2 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent2, new_inventoried_resource: inputResource2 }) + const cResp2 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent2, newInventoriedResource: inputResource2 }) await s.consistency() const event2 = cResp2.economicEvent const resource2 = cResp2.economicResource diff --git a/test/flows/flow_records_graphql.js b/test/flows/flow_records_graphql.js index 9581052cd..8efd8fed3 100644 --- a/test/flows/flow_records_graphql.js +++ b/test/flows/flow_records_graphql.js @@ -3,24 +3,22 @@ const { buildConfig, buildRunner, buildPlayer, + mockAgentId, + mockIdentifier, + sortById, + remapCellId, } = require('../init') const runner = buildRunner() - -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), -}, { - vf_observation: ['planning', 'observation'], -}) +const config = buildConfig() runner.registerScenario('flow records and relationships', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { cells: [observation, planning], graphQL } = await buildPlayer(s, config, ['observation', 'planning']) - const tempProviderAgentId = 'some-agent-provider' - const tempReceiverAgentId = 'some-agent-receiver' + const tempProviderAgentId = mockAgentId() + const tempReceiverAgentId = mockAgentId() - const pResp = await alice.graphQL(` + const pResp = await graphQL(` mutation($process: ProcessCreateParams!) { createProcess(process: $process) { process { @@ -38,7 +36,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { t.ok(pResp.data.createProcess.process.id, "process created OK") const processId = pResp.data.createProcess.process.id - const cResp = await alice.graphQL(` + const cResp = await graphQL(` mutation( $eventI: EconomicEventCreateParams!, $commitmentI: CommitmentCreateParams!, @@ -91,7 +89,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "due": "2019-11-19T04:29:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "some input will be provided" }, @@ -101,7 +99,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "hasPointInTime": "2019-11-19T04:27:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "some input was used up" }, @@ -117,7 +115,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "due": "2019-11-19T04:29:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "I'll make the thing happen" }, @@ -127,7 +125,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "hasPointInTime": "2019-11-19T04:27:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "hooray, the thing happened!" }, @@ -148,7 +146,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { const outputCommitmentId = cResp.data.outputCommitment.commitment.id const outputEventId = cResp.data.outputEvent.economicEvent.id - let resp = await alice.graphQL(` + let resp = await graphQL(` { process(id: "${processId}") { inputs { @@ -223,7 +221,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { t.equal(resp.data.outputCommitment.outputOf.id, processId, 'output commitment process ref OK') t.equal(resp.data.outputIntent.outputOf.id, processId, 'output intent process ref OK') - const mResp = await alice.graphQL(` + const mResp = await graphQL(` mutation( $inputFulfillment: FulfillmentCreateParams!, $inputEventSatisfaction: SatisfactionCreateParams!, @@ -269,9 +267,10 @@ runner.registerScenario('flow records and relationships', async (s, t) => { const iesId = mResp.data.ies.satisfaction.id const icsId = mResp.data.ics.satisfaction.id - resp = await alice.graphQL(` + resp = await graphQL(` { inputEvent: economicEvent(id:"${inputEventId}") { + id fulfills { id } @@ -329,17 +328,25 @@ runner.registerScenario('flow records and relationships', async (s, t) => { } `) + // :TODO: revisit pending a decision on https://github.com/h-REA/hREA/issues/266 + const ifIdObs = remapCellId(ifId, resp.data.inputEvent.id) + const iesIdObs = remapCellId(iesId, resp.data.inputEvent.id) + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedSIds = [{ id: iesId }, { id: icsId }].sort(sortById) + resp.data.inputIntent.satisfiedBy.sort(sortById) + t.equal(resp.data.inputEvent.fulfills.length, 1, 'input event fulfillment ref added') - t.equal(resp.data.inputEvent.fulfills[0].id, ifId, 'input event fulfillment ref OK') + t.equal(resp.data.inputEvent.fulfills[0].id, ifIdObs, 'input event fulfillment ref OK') t.equal(resp.data.inputEvent.satisfies.length, 1, 'input event satisfaction ref added') - t.equal(resp.data.inputEvent.satisfies[0].id, iesId, 'input event satisfaction ref OK') + t.equal(resp.data.inputEvent.satisfies[0].id, iesIdObs, 'input event satisfaction ref OK') t.equal(resp.data.inputCommitment.fulfilledBy.length, 1, 'input commitment fulfillment ref added') t.equal(resp.data.inputCommitment.fulfilledBy[0].id, ifId, 'input commitment fulfillment ref OK') t.equal(resp.data.inputCommitment.satisfies.length, 1, 'input commitment satisfaction ref added') t.equal(resp.data.inputCommitment.satisfies[0].id, icsId, 'input commitment satisfaction ref OK') t.equal(resp.data.inputIntent.satisfiedBy.length, 2, 'input intent satisfaction refs added') - t.equal(resp.data.inputIntent.satisfiedBy[0].id, iesId, 'input intent>event satisfaction ref OK') - t.equal(resp.data.inputIntent.satisfiedBy[1].id, icsId, 'input intent>commitment satisfaction ref OK') + t.equal(resp.data.inputIntent.satisfiedBy[0].id, sortedSIds[0].id, 'input intent>event satisfaction ref OK') + t.equal(resp.data.inputIntent.satisfiedBy[1].id, sortedSIds[1].id, 'input intent>commitment satisfaction ref OK') t.equal(resp.data.if.fulfills.id, inputCommitmentId, 'input fulfillment commitment ref OK') t.equal(resp.data.if.fulfilledBy.id, inputEventId, 'input fulfillment event ref OK') diff --git a/test/fulfillment/fulfillment_records_e2e.js b/test/fulfillment/fulfillment_records_e2e.js index dac985295..5372b4256 100644 --- a/test/fulfillment/fulfillment_records_e2e.js +++ b/test/fulfillment/fulfillment_records_e2e.js @@ -4,6 +4,8 @@ const { buildPlayer, mockIdentifier, mockAgentId, + sortByIdBuffer, + sortIdBuffers, } = require('../init') const runner = buildRunner() @@ -51,38 +53,40 @@ runner.registerScenario('links can be written and read between DNAs', async (s, t.ok(fulfillmentResp.fulfillment && fulfillmentResp.fulfillment.id, 'fulfillment created successfully') await s.consistency() const fulfillmentId = fulfillmentResp.fulfillment.id + const fulfillmentIdObs = [eventId[0], fulfillmentId[1]] // :NOTE: ID in dest network will be same EntryHash, different DnaHash // ASSERT: check fulfillment in originating network let readResponse = await planning.call('fulfillment', 'get_fulfillment', { address: fulfillmentId }) - t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved') - t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved') + t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved in planning DNA') + t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved in planning DNA') // ASSERT: check event readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) + console.log('readResponse', readResponse) t.ok(readResponse.economicEvent.fulfills, 'EconomicEvent.fulfills value present') - t.equal(readResponse.economicEvent.fulfills.length, 1, 'EconomicEvent.fulfills reference saved') - t.deepEqual(readResponse.economicEvent.fulfills[0], fulfillmentId, 'EconomicEvent.fulfills reference OK') + t.equal(readResponse.economicEvent.fulfills.length, 1, 'EconomicEvent.fulfills reference saved in observation DNA') + t.deepEqual(readResponse.economicEvent.fulfills[0], fulfillmentIdObs, 'EconomicEvent.fulfills reference OK in observation DNA') // ASSERT: check commitment readResponse = await planning.call('commitment', 'get_commitment', { address: commitmentId }) t.ok(readResponse.commitment.fulfilledBy, 'Commitment.fulfilledBy reciprocal value present') - t.equal(readResponse.commitment.fulfilledBy.length, 1, 'Commitment.fulfilledBy reciprocal reference saved') - t.deepEqual(readResponse.commitment.fulfilledBy[0], fulfillmentId, 'Commitment.fulfilledBy reciprocal fulfillment reference OK') + t.equal(readResponse.commitment.fulfilledBy.length, 1, 'Commitment.fulfilledBy reciprocal reference saved in planning DNA') + t.deepEqual(readResponse.commitment.fulfilledBy[0], fulfillmentId, 'Commitment.fulfilledBy reciprocal fulfillment reference OK in planning DNA') - // ASSERT: check fulfillment in target network - readResponse = await observation.call('fulfillment', 'get_fulfillment', { address: fulfillmentId }) - t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved') - t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved') + // ASSERT: check fulfillment in destination network + readResponse = await observation.call('fulfillment', 'get_fulfillment', { address: fulfillmentIdObs }) + t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved in observation DNA') + t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved in observation DNA') // ASSERT: check forward query indexes readResponse = await planning.call('fulfillment_index', 'query_fulfillments', { params: { fulfills: commitmentId } }) - t.equal(readResponse.length, 1, 'read fulfillments by commitment OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'Fulfillment.fulfills indexed correctly') + t.equal(readResponse.edges.length, 1, 'read fulfillments by commitment OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, fulfillmentId, 'Fulfillment.fulfills indexed correctly in planning NDA') // ASSERT: check reverse query indexes readResponse = await observation.call('fulfillment_index', 'query_fulfillments', { params: { fulfilledBy: eventId } }) - t.equal(readResponse.length, 1, 'read fulfillments by event OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'Fulfillment.fulfilledBy indexed correctly') + t.equal(readResponse.edges.length, 1, 'read fulfillments by event OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id[1], fulfillmentId[1], 'Fulfillment.fulfilledBy indexed correctly in observation DNA') @@ -96,40 +100,56 @@ runner.registerScenario('links can be written and read between DNAs', async (s, t.ok(fulfillmentResp2.fulfillment && fulfillmentResp2.fulfillment.id, 'additional fulfillment created successfully') await s.consistency() const fulfillmentId2 = fulfillmentResp2.fulfillment.id + const fulfillmentId2Obs = [eventId[0], fulfillmentId2[1]] // ASSERT: check forward query indices readResponse = await planning.call('fulfillment_index', 'query_fulfillments', { params: { fulfills: commitmentId } }) - t.equal(readResponse.length, 2, 'appending fulfillments for read OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'fulfillment 1 indexed correctly') - t.deepEqual(readResponse.Ok[1].fulfillment.id, fulfillmentId2, 'fulfillment 2 indexed correctly') + t.equal(readResponse.edges.length, 2, 'appending fulfillments for read OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, fulfillmentId, 'fulfillment 1 indexed correctly') + t.deepEqual(readResponse.edges && readResponse.edges[1] && readResponse.edges[1].node && readResponse.edges[1].node.id, fulfillmentId2, 'fulfillment 2 indexed correctly') // ASSERT: ensure append is working on the event read side readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedFIds = [{ id: fulfillmentId }, { id: fulfillmentId2 }].sort(sortByIdBuffer) + const sortedFIdsObs = [{ id: fulfillmentIdObs }, { id: fulfillmentId2Obs }].sort(sortByIdBuffer) + readResponse.economicEvent.fulfills.sort(sortIdBuffers) + t.equal(readResponse.economicEvent.fulfills.length, 2, 'EconomicEvent.fulfills appending OK') - t.deepEqual(readResponse.economicEvent.fulfills[0], fulfillmentId, 'EconomicEvent.fulfills reference 1 OK') - t.deepEqual(readResponse.economicEvent.fulfills[1], fulfillmentId2, 'EconomicEvent.fulfills reference 2 OK') + t.deepEqual(readResponse.economicEvent.fulfills[0], sortedFIdsObs[0].id, 'EconomicEvent.fulfills reference 1 OK in observation DNA') + t.deepEqual(readResponse.economicEvent.fulfills[1], sortedFIdsObs[1].id, 'EconomicEvent.fulfills reference 2 OK in observation DNA') + // :TODO: test fulfillment reference in planning DNA // ASSERT: ensure query indices on the event read side - readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { fulfills: fulfillmentId } }) - t.equal(readResponse.length, 1, 'appending fulfillments for event query OK') - t.deepEqual(readResponse.Ok[0].economicEvent.id, eventId, 'event query indexed correctly') + readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { fulfills: fulfillmentIdObs } }) + t.equal(readResponse.edges.length, 1, 'appending fulfillments for event query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, eventId, 'event query indexed correctly') // ASSERT: ensure append is working on the commitment read side readResponse = await planning.call('commitment', 'get_commitment', { address: commitmentId }) + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + readResponse.commitment.fulfilledBy.sort(sortIdBuffers) + t.equal(readResponse.commitment.fulfilledBy.length, 2, 'Commitment.fulfilledBy appending OK') - t.deepEqual(readResponse.commitment.fulfilledBy[0], fulfillmentId, 'Commitment.fulfilledBy reference 1 OK') - t.deepEqual(readResponse.commitment.fulfilledBy[1], fulfillmentId2, 'Commitment.fulfilledBy reference 2 OK') + t.deepEqual(readResponse.commitment.fulfilledBy[0], sortedFIds[0].id, 'Commitment.fulfilledBy reference 1 OK') + t.deepEqual(readResponse.commitment.fulfilledBy[1], sortedFIds[1].id, 'Commitment.fulfilledBy reference 2 OK') // ASSERT: ensure query indices on the commitment read side readResponse = await planning.call('commitment_index', 'query_commitments', { params: { fulfilledBy: fulfillmentId } }) - t.equal(readResponse.length, 1, 'appending fulfillments for commitment query OK') - t.deepEqual(readResponse.Ok[0].commitment.id, commitmentId, 'commitment query indexed correctly') + t.equal(readResponse.edges.length, 1, 'appending fulfillments for commitment query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, commitmentId, 'commitment query indexed correctly') // ASSERT: check reciprocal query indexes readResponse = await observation.call('fulfillment_index', 'query_fulfillments', { params: { fulfilledBy: eventId } }) - t.equal(readResponse.length, 2, 'read fulfillments by event OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'fulfillment 1 indexed correctly') - t.deepEqual(readResponse.Ok[1].fulfillment.id, fulfillmentId2, 'fulfillment 2 indexed correctly') + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + readResponse.edges.sort(({ node }, { node: node2 }) => sortByIdBuffer(node, node2)) + + t.equal(readResponse.edges.length, 2, 'read fulfillments by event OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, sortedFIdsObs[0].id, 'fulfillment 1 indexed correctly in observation DNA') + t.deepEqual(readResponse.edges && readResponse.edges[1] && readResponse.edges[1].node && readResponse.edges[1].node.id, sortedFIdsObs[1].id, 'fulfillment 2 indexed correctly in observation DNA') }) runner.run() diff --git a/test/init.js b/test/init.js index 4908fbfb4..18d46b22f 100644 --- a/test/init.js +++ b/test/init.js @@ -13,13 +13,14 @@ const { randomBytes } = require('crypto') const { Base64 } = require('js-base64') const readline = require('readline') -const { Orchestrator, Config, combine, tapeExecutor, localOnly } = require('@holochain/tryorama') +const { Cell, Orchestrator, Config, combine, tapeExecutor, localOnly } = require('@holochain/tryorama') const { GraphQLError } = require('graphql') const GQLTester = require('easygraphql-tester') const resolverLoggerMiddleware = require('./graphql-logger-middleware') const schema = require('@valueflows/vf-graphql/ALL_VF_SDL') const { generateResolvers } = require('@valueflows/vf-graphql-holochain') +const { remapCellId } = require('@valueflows/vf-graphql-holochain') process.on('unhandledRejection', error => { console.error('unhandled rejection:', error) @@ -31,12 +32,12 @@ process.on('unhandledRejection', error => { // DNA loader, to be used with `buildTestScenario` when constructing DNAs for testing const getDNA = ((dnas) => (name) => (dnas[name]))({ - 'agent': path.resolve(__dirname, '../happs/agent/hrea_agent.dna'), - 'agreement': path.resolve(__dirname, '../happs/agreement/hrea_agreement.dna'), - 'observation': path.resolve(__dirname, '../happs/observation/hrea_observation.dna'), - 'planning': path.resolve(__dirname, '../happs/planning/hrea_planning.dna'), - 'proposal': path.resolve(__dirname, '../happs/proposal/hrea_proposal.dna'), - 'specification': path.resolve(__dirname, '../happs/specification/hrea_specification.dna'), + 'agent': path.resolve(__dirname, '../bundles/dna/agent/hrea_agent.dna'), + 'agreement': path.resolve(__dirname, '../bundles/dna/agreement/hrea_agreement.dna'), + 'observation': path.resolve(__dirname, '../bundles/dna/observation/hrea_observation.dna'), + 'planning': path.resolve(__dirname, '../bundles/dna/planning/hrea_planning.dna'), + 'proposal': path.resolve(__dirname, '../bundles/dna/proposal/hrea_proposal.dna'), + 'specification': path.resolve(__dirname, '../bundles/dna/specification/hrea_specification.dna'), }) /** @@ -52,12 +53,7 @@ const buildRunner = () => new Orchestrator({ /** * Create per-agent interfaces to the DNA */ -const buildGraphQL = async (player, apiOptions, appCellIds) => { - const appCells = await player.adminWs().listCellIds() - const appCellMapping = appCells.reduce((r, cell, idx) => { - r[appCellIds[idx]] = cell - return r - }, {}) +const buildGraphQL = async (player, apiOptions, appCellMapping) => { const tester = new GQLTester(schema, resolverLoggerMiddleware()(await generateResolvers({ ...apiOptions, @@ -88,23 +84,55 @@ const buildGraphQL = async (player, apiOptions, appCellIds) => { */ const buildPlayer = async (scenario, config, agentDNAs, graphQLAPIOptions) => { const [player] = await scenario.players([config]) - const [[firstHapp]] = await player.installAgentsHapps([[agentDNAs.map(getDNA)]]) - - // :SHONK: workaround nondeterministic return order for app cells, luckily nicknames are prefixed with numeric ID - // but :WARNING: this may also break if >10 DNAs running in the same player! - firstHapp.cells.sort((a, b) => { - if (a.cellNick === b.cellNick) return 0 - return a.cellNick > b.cellNick ? 1 : -1 + const agentPubKey = await player.adminWs().generateAgentPubKey() + const dnaSources = agentDNAs.map(getDNA) + const dnas = await Promise.all(dnaSources.map(async (dnaSource, index) => { + const dnaHash = await player.registerDna({ path: dnaSource }) + return { + hash: dnaHash, + role_id: agentDNAs[index] + } + })) + const installAppReq = { + installed_app_id: 'installed-app-id', + agent_key: agentPubKey, + dnas: dnas + } + await player.adminWs().installApp(installAppReq) + // must be enabled to be callable + const enabledAppResponse = await player.adminWs().enableApp({ + installed_app_id: installAppReq.installed_app_id + }) + if (enabledAppResponse.errors.length > 0) { + throw new Error(`Error - Failed to enable app: ${enabledAppResponse.errors}`) + } + const installedAppResponse = enabledAppResponse.app + // construct Cell instances which are the most useful class to the client + const cellsKeyedByRole = {} + const cellIdsKeyedByRole = {} + const rawCells = Object.entries(installedAppResponse.cell_data) + rawCells.forEach(([_, { cell_id, role_id }]) => { + cellsKeyedByRole[role_id] = new Cell({ + cellId: cell_id, + cellRole: role_id, + player: player + }) + cellIdsKeyedByRole[role_id] = cell_id + }) + // important: we should be returning Cells that + // occur in the same order as they were passed in via agentDNAs + // because the caller of this function assumes they can destructure the + // cells property of the response and call the right DNA/Cell + const cells = agentDNAs.map((dnaName) => { + return cellsKeyedByRole[dnaName] }) - - const appCellIds = firstHapp.cells.map(c => c.cellRole.match(/hrea_(\w+)\.dna/)[1]) shimConsistency(scenario) return { // :TODO: is it possible to derive GraphQL DNA binding config from underlying Tryorama `config`? - graphQL: await buildGraphQL(player, graphQLAPIOptions, appCellIds), - cells: firstHapp.cells, + graphQL: await buildGraphQL(player, graphQLAPIOptions, cellIdsKeyedByRole), + cells: cells, player, } } @@ -167,8 +195,8 @@ module.exports = { buildPlayer, buildGraphQL, buildRunner, - bridge: Config.bridge, buildConfig: Config.gen, + seralizeId, // :TODO: :SHONK: temporary code for mocking, eventually tests will need to populate mock data with referential integrity to pass mockAgentId: (asStr = true) => { @@ -191,12 +219,21 @@ module.exports = { return asStr ? `${id}:${serializeHash(dna)}` : [dna, id] }, + remapCellId, // :TODO: temporary code until date indexing order is implemented sortById: (a, b) => { if (a.id === b.id) return 0 return a.id < b.id ? -1 : 1 }, + sortByIdBuffer: (a, b) => { // :NOTE: this sorts on EntryHash, ignores DnaHash + if (a.id[1] === b.id[1]) return 0 + return a.id[1] < b.id[1] ? -1 : 1 + }, + sortBuffers: (a, b) => { // :NOTE: this sorts on EntryHash, ignores DnaHash + if (a[1] === b[1]) return 0 + return a[1] < b[1] ? -1 : 1 + }, waitForInput, } diff --git a/test/package.json b/test/package.json index 07f3ef523..109aff131 100644 --- a/test/package.json +++ b/test/package.json @@ -1,16 +1,17 @@ { "name": "@valueflows/holo-rea-tests", "version": "0.0.1", + "private": true, "description": "Integration tests for HoloREA DHTs", "main": "index.js", "scripts": { "playground": "holochain-playground", - "test": "WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape test_*.js **/*.js | faucet" + "test": "WASM_LOG=debug RUST_LOG=\"debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error,\" RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape --ignore .skip_tests **/*.js | tap-dot" }, "devDependencies": { "@holochain/tryorama": "0.4.10", "@holochain-playground/cli": "0.0.8", - "@valueflows/vf-graphql": "0.9.0-alpha.2", + "@valueflows/vf-graphql": "0.9.0-alpha.3", "@valueflows/vf-graphql-holochain": "workspace:*", "deep-for-each": "^3.0.0", "easygraphql-tester": "6.0.1", @@ -21,11 +22,12 @@ "eslint-plugin-promise": "^4.1.1", "eslint-plugin-standard": "^4.0.0", "faucet": "^0.0.1", - "graphql": "^16.2.0", + "graphql": "15.8.0", "is-function": "^1.0.1", "json3": "^3.3.2", "source-map-support": "^0.5.16", - "tape": "^4.9.2" + "tape": "^4.9.2", + "tap-dot": "2.0.0" }, "repository": { "type": "git", diff --git a/test/process/process_records_e2e.js b/test/process/process_records_e2e.js index b7672c276..56e48aa60 100644 --- a/test/process/process_records_e2e.js +++ b/test/process/process_records_e2e.js @@ -64,24 +64,23 @@ runner.registerScenario('process local query indexes and relationships', async ( // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - console.log(readResponse) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'event input query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'event input query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'event input query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iEventId, 'event input query index created') // ASSERT: test event output query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { outputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'event output query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, oEventId, 'event output query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'event output query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, oEventId, 'event output query index created') // ASSERT: check process event input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.inputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.inputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.inputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node.id, processId, 'process.inputs query index created') // ASSERT: check process event output query edge readResponse = await observation.call('process_index', 'query_processes', { params: { outputs: oEventId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.outputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.outputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.outputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.outputs query index created') }) const runner2 = buildRunner() @@ -157,23 +156,23 @@ runner2.registerScenario('process remote query indexes and relationships', async // ASSERT: test commitment input query edge readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'commitment input query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].commitment && readResponse.results[0].commitment.id, iCommitmentId, 'commitment input query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'commitment input query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iCommitmentId, 'commitment input query index created') // ASSERT: test commitment output query edge readResponse = await planning.call('commitment_index', 'query_commitments', { params: { outputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'commitment output query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].commitment && readResponse.results[0].commitment.id, oCommitmentId, 'commitment output query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'commitment output query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, oCommitmentId, 'commitment output query index created') // ASSERT: check process commitment input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.committedInputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.committedInputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.committedInputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.committedInputs query index created') // ASSERT: check process commitment output query edge readResponse = await observation.call('process_index', 'query_processes', { params: { committedOutputs: oCommitmentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.committedOutputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.committedOutputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.committedOutputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.committedOutputs query index created') // ASSERT: check input intent index links readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) @@ -187,23 +186,23 @@ runner2.registerScenario('process remote query indexes and relationships', async // ASSERT: test intent input query edge readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'intent input query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].intent && readResponse.results[0].intent.id, iIntentId, 'intent input query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'intent input query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iIntentId, 'intent input query index created') // ASSERT: test intent output query edge readResponse = await planning.call('intent_index', 'query_intents', { params: { outputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'intent output query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].intent && readResponse.results[0].intent.id, oIntentId, 'intent output query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'intent output query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, oIntentId, 'intent output query index created') // ASSERT: check process intent input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.intendedInputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.intendedInputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.intendedInputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.intendedInputs query index created') // ASSERT: check process intent output query edge readResponse = await observation.call('process_index', 'query_processes', { params: { intendedOutputs: oIntentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.intendedOutputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.intendedOutputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.intendedOutputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.intendedOutputs query index created') // TODO: modify }) diff --git a/test/proposal/test_proposal_crud.js b/test/proposal/test_proposal_crud.js index d4c1b3a93..c2471b71f 100644 --- a/test/proposal/test_proposal_crud.js +++ b/test/proposal/test_proposal_crud.js @@ -1,5 +1,4 @@ const { - getDNA, buildConfig, buildRunner, buildPlayer, @@ -7,9 +6,7 @@ const { const runner = buildRunner() -const config = buildConfig({ - proposal: getDNA('proposal'), -}, {}) +const config = buildConfig() const exampleEntry = { name: 'String', @@ -28,13 +25,14 @@ const updatedExampleEntry = { } runner.registerScenario('Proposal record API', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { graphQL } = await buildPlayer(s, config, ['proposal']) - let createResp = await alice.graphQL(` + let createResp = await graphQL(` mutation($rs: ProposalCreateParams!) { res: createProposal(proposal: $rs) { proposal { id + revisionId } } } @@ -44,11 +42,13 @@ runner.registerScenario('Proposal record API', async (s, t) => { await s.consistency() t.ok(createResp.data.res.proposal.id, 'record created') const psId = createResp.data.res.proposal.id + const psRev = createResp.data.res.proposal.revisionId - let getResp = await alice.graphQL(` + let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id + revisionId name hasBeginning hasEnd @@ -60,26 +60,30 @@ runner.registerScenario('Proposal record API', async (s, t) => { `, { id: psId, }) - t.deepEqual(getResp.data.res, { 'id': psId, ...exampleEntry }, 'record read OK') - const updateResp = await alice.graphQL(` + t.deepEqual(getResp.data.res, { 'id': psId, 'revisionId': psRev, ...exampleEntry }, 'record read OK') + const updateResp = await graphQL(` mutation($rs: ProposalUpdateParams!) { res: updateProposal(proposal: $rs) { proposal { id + revisionId } } } `, { - rs: { id: psId, ...updatedExampleEntry }, + rs: { revisionId: psRev, ...updatedExampleEntry }, }) await s.consistency() - t.equal(updateResp.data.res.proposal.id, psId, 'record updated') + t.equal(updateResp.data.res.proposal.id, psId, 'record ID consistent') + t.notEqual(updateResp.data.res.proposal.revisionId, psRev, 'record updated') + const psRev2 = updateResp.data.res.proposal.revisionId // now we fetch the Entry again to check that the update was successful - const updatedGetResp = await alice.graphQL(` + const updatedGetResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id + revisionId created name hasBeginning @@ -91,20 +95,20 @@ runner.registerScenario('Proposal record API', async (s, t) => { `, { id: psId, }) - t.deepEqual(updatedGetResp.data.res, { id: psId, created: exampleEntry.created, ...updatedExampleEntry }, 'record updated OK') + t.deepEqual(updatedGetResp.data.res, { id: psId, revisionId: psRev2, created: exampleEntry.created, ...updatedExampleEntry }, 'record updated OK') - const deleteResult = await alice.graphQL(` - mutation($id: ID!) { - res: deleteProposal(id: $id) + const deleteResult = await graphQL(` + mutation($revisionId: ID!) { + res: deleteProposal(revisionId: $revisionId) } `, { - id: psId, + revisionId: psRev2, }) await s.consistency() t.equal(deleteResult.data.res, true) - const queryForDeleted = await alice.graphQL(` + const queryForDeleted = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id diff --git a/test/proposal/test_proposedintent_crud.js b/test/proposal/test_proposedintent_crud.js index 7dea53a66..37a2f026b 100644 --- a/test/proposal/test_proposedintent_crud.js +++ b/test/proposal/test_proposedintent_crud.js @@ -1,26 +1,20 @@ const { - getDNA, buildConfig, buildRunner, buildPlayer, + sortById, } = require('../init') const runner = buildRunner() -const config = buildConfig({ - proposal: getDNA('proposal'), - planning: getDNA('planning'), - agent: getDNA('agent'), -}, { - vf_planning: ['proposal', 'planning'], -}) +const config = buildConfig() const exampleProposal = { name: 'String', - hasBeginning: '2019-11-19T00:00:00.056Z', - hasEnd: '2019-11-19T00:00:00.056Z', + hasBeginning: new Date('2019-11-19T00:00:00.056Z'), + hasEnd: new Date('2019-11-19T00:00:00.056Z'), unitBased: true, - created: '2019-11-19T00:00:00.056Z', + created: new Date('2019-11-19T00:00:00.056Z'), note: 'note', } @@ -29,12 +23,12 @@ const exampleIntent = { } runner.registerScenario('ProposedIntent external link', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { graphQL } = await buildPlayer(s, config, ['proposal', 'planning', 'agent']) /* * the next code is only for getting an intent and a proposal to link to the proposedIntent. * the idea is to verify the intent linking by getting Proposal->ProposedIntent->Intent */ - const agentAddress = (await alice.graphQL(`{ + const agentAddress = (await graphQL(`{ myAgent { id } @@ -42,7 +36,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { exampleIntent.provider = agentAddress // intent creation - let intentRes = await alice.graphQL(` + let intentRes = await graphQL(` mutation($rs: IntentCreateParams!) { res: createIntent(intent: $rs) { intent { @@ -58,7 +52,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.ok(intentAdress, 'can create intent') // proposal creation - let proposalRes = await alice.graphQL(` + let proposalRes = await graphQL(` mutation($rs: ProposalCreateParams!) { res: createProposal(proposal: $rs) { proposal { @@ -73,7 +67,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { let proposalAdress = proposalRes.data.res.proposal.id t.ok(proposalAdress, 'can create proposal') - proposalRes = await alice.graphQL(` + proposalRes = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -88,11 +82,12 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.equal(proposalRes.data.res.id, proposalAdress, 'proposal read OK') t.equal(proposalRes.data.res.publishes.length, 0, 'proposedIntent list empty') - let proposeIntentResp = await alice.graphQL(` + let proposeIntentResp = await graphQL(` mutation($pIn: ID!, $ps: ID!, $re: Boolean) { res: proposeIntent(publishedIn: $pIn, publishes: $ps, reciprocal: $re) { proposedIntent { id + revisionId } } } @@ -104,8 +99,9 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { await s.consistency() t.ok(proposeIntentResp.data.res.proposedIntent.id, 'can propose') const proposedIntentAdress = proposeIntentResp.data.res.proposedIntent.id + const proposedIntentRev = proposeIntentResp.data.res.proposedIntent.revisionId - let getResp = await alice.graphQL(` + let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -126,7 +122,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress, 'intent fetching from proposedIntent succesful') // another intent - intentRes = await alice.graphQL(` + intentRes = await graphQL(` mutation($rs: IntentCreateParams!) { res: createIntent(intent: $rs) { intent { @@ -136,7 +132,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { } `, { rs: { - hasPointInTime: '2019-11-19T00:00:00.056Z', + hasPointInTime: new Date('2019-11-19T00:00:00.056Z'), ...exampleIntent, }, }) @@ -145,11 +141,12 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.ok(intentAdress2, 'can create intent') // another proposed intent - let proposeIntentResp2 = await alice.graphQL(` + let proposeIntentResp2 = await graphQL(` mutation($pIn: ID!, $ps: ID!, $re: Boolean) { res: proposeIntent(publishedIn: $pIn, publishes: $ps, reciprocal: $re) { proposedIntent { id + revisionId } } } @@ -161,8 +158,9 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { await s.consistency() t.ok(proposeIntentResp2.data.res.proposedIntent.id, 'can propose') const proposedIntentAdress2 = proposeIntentResp2.data.res.proposedIntent.id + const proposedIntentRev2 = proposeIntentResp2.data.res.proposedIntent.revisionId - getResp = await alice.graphQL(` + getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -179,21 +177,32 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { }) t.equal(getResp.data.res.id, proposalAdress, 'proposal fetch succesful') t.equal(getResp.data.res.publishes.length, 2, 'proposedIntent count as expected') - t.equal(getResp.data.res.publishes[0].id, proposedIntentAdress2, 'proposedIntent B fetching from proposal succesful') - t.equal(getResp.data.res.publishes[1].id, proposedIntentAdress, 'proposedIntent A fetching from proposal succesful') - t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress2, 'intent B fetching from proposedIntent succesful') - t.equal(getResp.data.res.publishes[1].publishes.id, intentAdress, 'intent A fetching from proposedIntent succesful') - await alice.graphQL(` + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedPIIds = [{ id: proposedIntentAdress }, { id: proposedIntentAdress2 }].sort(sortById) + getResp.data.res.publishes.sort(sortById) + + const sortedIIds = [{ id: intentAdress }, { id: intentAdress2 }].sort(sortById) + const sortedPublishesIds = [ + { id: getResp.data.res.publishes[0].publishes.id }, + { id: getResp.data.res.publishes[1].publishes.id }, + ].sort(sortById) + + t.equal(getResp.data.res.publishes[0].id, sortedPIIds[0].id, 'proposedIntent B fetching from proposal succesful') + t.equal(getResp.data.res.publishes[1].id, sortedPIIds[1].id, 'proposedIntent A fetching from proposal succesful') + t.equal(sortedPublishesIds[0].id, sortedIIds[0].id, 'intent B fetching from proposedIntent succesful') + t.equal(sortedPublishesIds[1].id, sortedIIds[1].id, 'intent A fetching from proposedIntent succesful') + + await graphQL(` mutation($in: ID!) { - res: deleteProposedIntent(id: $in) + res: deleteProposedIntent(revisionId: $in) } `, { - in: proposedIntentAdress, + in: proposedIntentRev, }) await s.consistency() - getResp = await alice.graphQL(` + getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -213,16 +222,16 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.equal(getResp.data.res.publishes[0].id, proposedIntentAdress2, 'proposedIntent fetching from proposal after delete succesful') t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress2, 'intent fetching from proposedIntent after delete succesful') - await alice.graphQL(` + await graphQL(` mutation($in: ID!) { - res: deleteProposedIntent(id: $in) + res: deleteProposedIntent(revisionId: $in) } `, { - in: proposedIntentAdress2, + in: proposedIntentRev2, }) await s.consistency() - getResp = await alice.graphQL(` + getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id diff --git a/test/proposal/test_proposedto_crud.js b/test/proposal/test_proposedto_crud.js index 3c56da76a..94ab8eb88 100644 --- a/test/proposal/test_proposedto_crud.js +++ b/test/proposal/test_proposedto_crud.js @@ -1,5 +1,4 @@ const { - getDNA, buildConfig, buildRunner, buildPlayer, @@ -7,30 +6,27 @@ const { const runner = buildRunner() -const config = buildConfig({ - proposal: getDNA('proposal'), - agent: getDNA('agent'), -}, {}) +const config = buildConfig() const exampleProposal = { name: 'String', - hasBeginning: '2019-11-19T00:00:00.056Z', - hasEnd: '2019-11-19T00:00:00.056Z', + hasBeginning: new Date('2019-11-19T00:00:00.056Z'), + hasEnd: new Date('2019-11-19T00:00:00.056Z'), unitBased: true, - created: '2019-11-19T00:00:00.056Z', + created: new Date('2019-11-19T00:00:00.056Z'), note: 'note', } runner.registerScenario('ProposedTo record API', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { graphQL } = await buildPlayer(s, config, ['proposal', 'agent']) - const agentAddress = (await alice.graphQL(`{ + const agentAddress = (await graphQL(`{ myAgent { id } }`)).data.myAgent.id - let proposalRes = await alice.graphQL(` + let proposalRes = await graphQL(` mutation($rs: ProposalCreateParams!) { res: createProposal(proposal: $rs) { proposal { @@ -46,11 +42,12 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { await s.consistency() - let createResp = await alice.graphQL(` + let createResp = await graphQL(` mutation($p: ID!, $pTo: ID!) { res: proposeTo(proposed: $p,proposedTo: $pTo) { proposedTo { id + revisionId } } } @@ -62,7 +59,8 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { t.ok(createResp.data.res.proposedTo.id, 'record created') const psID = createResp.data.res.proposedTo.id - let getResp = await alice.graphQL(` + const psRev = createResp.data.res.proposedTo.revisionId + let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -82,18 +80,18 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { t.equal(getResp.data.res.publishedTo[0].id, psID, 'proposedTo fetching from proposal succesful') t.equal(getResp.data.res.publishedTo[0].proposedTo.id, agentAddress, 'agent fetching from proposedTo succesful') - const deleteResult = await alice.graphQL(` + const deleteResult = await graphQL(` mutation($id: ID!) { - res: deleteProposedTo(id: $id) + res: deleteProposedTo(revisionId: $id) } `, { - id: psID, + id: psRev, }) await s.consistency() t.equal(deleteResult.data.res, true) - const queryForDeleted = await alice.graphQL(` + const queryForDeleted = await graphQL(` query { res: proposal(id: "${proposalID}") { id diff --git a/test/satisfaction/satisfaction_records_e2e.js b/test/satisfaction/satisfaction_records_e2e.js index e7c5b7ee7..915940b4f 100644 --- a/test/satisfaction/satisfaction_records_e2e.js +++ b/test/satisfaction/satisfaction_records_e2e.js @@ -4,6 +4,7 @@ const { buildPlayer, mockIdentifier, mockAgentId, + sortByIdBuffer, sortIdBuffers, } = require('../init') const runner = buildRunner() @@ -51,48 +52,49 @@ runner.registerScenario('satisfactions can be written and read between DNAs by a t.ok(satisfactionResp.satisfaction && satisfactionResp.satisfaction.id, 'satisfaction by event created successfully') await s.consistency() const satisfactionId = satisfactionResp.satisfaction.id + const satisfactionIdObs = [eventId[0], satisfactionId[1]] // :NOTE: ID in dest network will be same EntryHash, different DnaHash // ASSERT: check satisfaction in originating network let readResponse = await planning.call('satisfaction', 'get_satisfaction', { address: satisfactionId }) - t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved') - t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved') + t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved in planning DNA') + t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved in planning DNA') // ASSERT: check satisfaction in target network - readResponse = await observation.call('satisfaction', 'get_satisfaction', { address: satisfactionId }) - t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved') - t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved') + readResponse = await observation.call('satisfaction', 'get_satisfaction', { address: satisfactionIdObs }) + t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved in observation DNA') + t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved in observation DNA') // ASSERT: check event field refs readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) t.ok(readResponse.economicEvent.satisfies, 'EconomicEvent.satisfies value present') - t.equal(readResponse.economicEvent.satisfies.length, 1, 'EconomicEvent.satisfies reference saved') - t.deepEqual(readResponse.economicEvent.satisfies[0], satisfactionId, 'EconomicEvent.satisfies reference OK') + t.equal(readResponse.economicEvent.satisfies.length, 1, 'EconomicEvent.satisfies reference saved in observation DNA') + t.deepEqual(readResponse.economicEvent.satisfies[0], satisfactionIdObs, 'EconomicEvent.satisfies reference OK in observation DNA') // ASSERT: check intent field refs readResponse = await planning.call('intent', 'get_intent', { address: intentId }) t.ok(readResponse.intent.satisfiedBy, 'intent.satisfiedBy reciprocal value present') t.equal(readResponse.intent.satisfiedBy.length, 1, 'Intent.satisfiedBy reciprocal reference saved') - t.deepEqual(readResponse.intent.satisfiedBy[0], satisfactionId, 'Intent.satisfiedBy reciprocal satisfaction reference OK') + t.deepEqual(readResponse.intent.satisfiedBy[0], satisfactionId, 'Intent.satisfiedBy reciprocal satisfaction reference OK in planning DNA') // ASSERT: check intent query indexes readResponse = await planning.call('satisfaction_index', 'query_satisfactions', { params: { satisfies: intentId } }) - t.equal(readResponse.length, 1, 'read satisfactions by intent OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId, 'Satisfaction.satisfies indexed correctly') + t.equal(readResponse.edges.length, 1, 'read satisfactions by intent OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, satisfactionId, 'Satisfaction.satisfies indexed correctly in planning DNA') // ASSERT: check event query indexes readResponse = await observation.call('satisfaction_index', 'query_satisfactions', { params: { satisfiedBy: eventId } }) - t.equal(readResponse.length, 1, 'read satisfactions by event OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId, 'Satisfaction.satisfiedBy indexed correctly') + t.equal(readResponse.edges.length, 1, 'read satisfactions by event OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, satisfactionIdObs, 'Satisfaction.satisfiedBy indexed correctly in observation DNA') // ASSERT: check intent satisfaction query indexes readResponse = await planning.call('intent_index', 'query_intents', { params: { satisfiedBy: satisfactionId } }) - t.equal(readResponse.length, 1, 'indexing satisfactions for intent query OK') - t.deepEqual(readResponse.Ok[0].intent.id, intentId, 'intent query 1 indexed correctly') + t.equal(readResponse.edges.length, 1, 'indexing satisfactions for intent query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, intentId, 'intent query 1 indexed correctly in planning DNA') // ASSERT: check event satisfaction query indexes - readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { satisfies: satisfactionId } }) - t.equal(readResponse.length, 1, 'indexing satisfactions for event query OK') - t.deepEqual(readResponse.Ok[0].economicEvent.id, eventId, 'event query 1 indexed correctly') + readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { satisfies: satisfactionIdObs } }) + t.equal(readResponse.edges.length, 1, 'indexing satisfactions for event query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, eventId, 'event query 1 indexed correctly in observation DNA') @@ -124,25 +126,34 @@ runner.registerScenario('satisfactions can be written and read between DNAs by a // ASSERT: check intent query indices readResponse = await planning.call('satisfaction_index', 'query_satisfactions', { params: { satisfies: intentId } }) - t.equal(readResponse.length, 2, 'appending satisfactions for read OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId2, 'satisfaction 2 indexed correctly') - t.deepEqual(readResponse.Ok[1].satisfaction.id, satisfactionId, 'satisfaction 1 indexed correctly') + t.equal(readResponse.edges.length, 2, 'appending satisfactions for read OK') + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedSIds = [{ id: satisfactionId }, { id: satisfactionId2 }].sort(sortByIdBuffer) + readResponse.edges.sort(({ node }, { node: node2 }) => sortByIdBuffer(node, node2)) + + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, sortedSIds[0].id, 'satisfaction 1 indexed correctly') + t.deepEqual(readResponse.edges && readResponse.edges[1] && readResponse.edges[1].node && readResponse.edges[1].node.id, sortedSIds[1].id, 'satisfaction 2 indexed correctly') // ASSERT: check intent field refs readResponse = await planning.call('intent', 'get_intent', { address: intentId }) t.equal(readResponse.intent.satisfiedBy.length, 2, 'Intent.satisfiedBy appending OK') - t.deepEqual(readResponse.intent.satisfiedBy[0], satisfactionId2, 'Intent.satisfiedBy reference 2 OK') - t.deepEqual(readResponse.intent.satisfiedBy[1], satisfactionId, 'Intent.satisfiedBy reference 1 OK') + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + readResponse.intent.satisfiedBy.sort(sortIdBuffers) + + t.deepEqual(readResponse.intent.satisfiedBy[0], sortedSIds[0].id, 'Intent.satisfiedBy reference 1 OK') + t.deepEqual(readResponse.intent.satisfiedBy[1], sortedSIds[1].id, 'Intent.satisfiedBy reference 2 OK') // ASSERT: check commitment query indexes readResponse = await planning.call('satisfaction_index', 'query_satisfactions', { params: { satisfiedBy: commitmentId } }) - t.equal(readResponse.length, 1, 'read satisfactions by commitment OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId2, 'Satisfaction.satisfiedBy indexed correctly') + t.equal(readResponse.edges.length, 1, 'read satisfactions by commitment OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, satisfactionId2, 'Satisfaction.satisfiedBy indexed correctly') // ASSERT: check intent satisfaction query indexes readResponse = await planning.call('intent_index', 'query_intents', { params: { satisfiedBy: satisfactionId2 } }) - t.equal(readResponse.length, 1, 'appending satisfactions for intent query OK') - t.deepEqual(readResponse.Ok[0].intent.id, intentId, 'intent query 2 indexed correctly') + t.equal(readResponse.edges.length, 1, 'appending satisfactions for intent query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, intentId, 'intent query 2 indexed correctly') }) runner.run() diff --git a/test/agent/test_agent_core.js b/test/skip_agent/test_agent_core.js similarity index 81% rename from test/agent/test_agent_core.js rename to test/skip_agent/test_agent_core.js index a33741ef2..b2dfa59ba 100644 --- a/test/agent/test_agent_core.js +++ b/test/skip_agent/test_agent_core.js @@ -7,16 +7,14 @@ const { const runner = buildRunner() -const config = buildConfig({ - agent: getDNA('agent'), -}, { -}) +const config = buildConfig() runner.registerScenario('REA economic agent functionality', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) - const aliceAddr = alice.instance('agent').agentAddress + const alice = await buildPlayer(s, config, ['agent']) + const graphQL = alice.graphQL + const aliceAddr = alice.instance('agent').agentAddress // :TODO: update for latest tryorama - let res = await alice.graphQL(`{ + let res = await graphQL(`{ myAgent { id name @@ -28,7 +26,7 @@ runner.registerScenario('REA economic agent functionality', async (s, t) => { t.ok(res.data.myAgent.id, 'agent A can retrieve own agent ID') t.ok(res.data.myAgent.name, 'agent A can retrieve own agent name') - res = await alice.graphQL(`{ + res = await graphQL(`{ agents { id name @@ -51,7 +49,7 @@ runner.registerScenario('REA economic agent functionality', async (s, t) => { await s.consistency() // wait for Bob's join to propagate to Alice - res = await alice.graphQL(`{ + res = await graphQL(`{ agents { id name @@ -61,7 +59,7 @@ runner.registerScenario('REA economic agent functionality', async (s, t) => { t.equal(res.data.agents[1].id, aliceAddr, 'own agent ID returned in list') t.equal(res.data.agents[0].id, bobAddr, 'new agent ID returned in list') - res = await alice.graphQL(`{ + res = await graphQL(`{ agent(id: "${bobAddr}") { id name diff --git a/test/agent/test_agent_links.js b/test/skip_agent/test_agent_links.js similarity index 89% rename from test/agent/test_agent_links.js rename to test/skip_agent/test_agent_links.js index 855de8e6d..a2f73d7f9 100644 --- a/test/agent/test_agent_links.js +++ b/test/skip_agent/test_agent_links.js @@ -7,15 +7,8 @@ const { const runner = buildRunner() -const config = buildConfig({ - agent: getDNA('agent'), - observation: getDNA('observation'), - planning: getDNA('planning'), - proposal: getDNA('proposal'), -}, { - vf_observation: ['planning', 'observation'], - vf_planning: ['proposal', 'planning'], -}) +const config = buildConfig() +const config2 = buildConfig() // required attributes, not involved with test logic const testEventProps = { @@ -24,10 +17,10 @@ const testEventProps = { } runner.registerScenario('Agent relationship traversal', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) - const aliceAddr = alice.instance('agent').agentAddress - const bob = await buildPlayer(s, 'bob', config) - const bobAddr = bob.instance('agent').agentAddress + const alice = await buildPlayer(s, config, ['agent', 'observation', 'planning', 'proposal']) + const aliceAddr = alice.instance('agent').agentAddress // :TODO: update for latest tryorama + const bob = await buildPlayer(s, config2, ['agent', 'observation', 'planning', 'proposal']) + const bobAddr = bob.instance('agent').agentAddress // :TODO: update for latest tryorama // event which shares provider & receiver diff --git a/test/agent/test_agent_registration.js b/test/skip_agent/test_agent_registration.js similarity index 84% rename from test/agent/test_agent_registration.js rename to test/skip_agent/test_agent_registration.js index 813585e15..a0ef2c193 100644 --- a/test/agent/test_agent_registration.js +++ b/test/skip_agent/test_agent_registration.js @@ -7,13 +7,12 @@ const { const runner = buildRunner() -const config = buildConfig({ - agents: getDNA('agent'), -}, {}) +const config = buildConfig() +const config2 = buildConfig() runner.registerScenario('Agent registration API (happ-agent-registration module)', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) - const aliceAddr = alice.instance('agents').agentAddress + const { cells: [alice] } = await buildPlayer(s, config, ['agent']) + const aliceAddr = alice.instance('agents').agentAddress // :TODO: update for latest tryorama await s.consistency() @@ -31,7 +30,7 @@ runner.registerScenario('Agent registration API (happ-agent-registration module) t.equal(resp.Ok, false, 'can check other registration statuses') // Load Bob - const bob = await buildPlayer(s, 'bob', config) + const { cells: [bob] } = await buildPlayer(s, config2, ['agent']) const bobAddr = bob.instance('agents').agentAddress // Bob hits the DNA for the first time diff --git a/test/agent/test_group_memberships.js b/test/skip_agent/test_group_memberships.js similarity index 100% rename from test/agent/test_group_memberships.js rename to test/skip_agent/test_group_memberships.js diff --git a/test/social-architectures/cross_border_trade.js b/test/skip_social-architectures/cross_border_trade.js similarity index 100% rename from test/social-architectures/cross_border_trade.js rename to test/skip_social-architectures/cross_border_trade.js diff --git a/test/specification/test_incoming_links.js b/test/specification/test_incoming_links.js index 58282996a..eb4996b07 100644 --- a/test/specification/test_incoming_links.js +++ b/test/specification/test_incoming_links.js @@ -3,29 +3,21 @@ const { buildConfig, buildRunner, buildPlayer, + mockAgentId, } = require('../init') const runner = buildRunner() -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), - specification: getDNA('specification'), -}, { - vf_observation: ['planning', 'observation'], - vf_specification: ['observation', 'specification'], -}) +const config = buildConfig() -const tempProviderAgentId = 'some-agent-provider' -const tempReceiverAgentId = 'some-agent-receiver' const fillerProps = { - provider: tempProviderAgentId, - receiver: tempReceiverAgentId, + provider: mockAgentId(), + receiver: mockAgentId(), hasPointInTime: '2019-11-19T04:27:55.056Z', } runner.registerScenario('inbound Specification link references', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const alice = await buildPlayer(s, config, ['observation', 'planning', 'specification']) // setup some records for linking to let resp = await alice.graphQL(` @@ -84,7 +76,7 @@ runner.registerScenario('inbound Specification link references', async (s, t) => await s.consistency() t.ok(resp.data.res.resourceSpecification.id, 'resource specification created') - t.equal(resp.data.res.resourceSpecification.defaultUnitOfEffort.id, 'm', 'resource specification default unit ok') + t.ok(resp.data.res.resourceSpecification.defaultUnitOfEffort.id, 'resource specification default unit ok') const rsId = resp.data.res.resourceSpecification.id // test simple links diff --git a/test/specification/test_processspecification_crud.js b/test/specification/test_processspecification_crud.js index 87fac6801..5598ff9d9 100644 --- a/test/specification/test_processspecification_crud.js +++ b/test/specification/test_processspecification_crud.js @@ -25,6 +25,7 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { res: createProcessSpecification(processSpecification: $rs) { processSpecification { id + revisionId } } } @@ -56,6 +57,7 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { res: updateProcessSpecification(processSpecification: $rs) { processSpecification { id + revisionId } } } @@ -65,12 +67,14 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { await s.consistency() t.equal(updateResp.data.res.processSpecification.id, psId, 'record updated') + const updatedPsRevId = updateResp.data.res.processSpecification.revisionId // now we fetch the Entry again to check that the update was successful const updatedGetResp = await alice.graphQL(` query($id: ID!) { res: processSpecification(id: $id) { id + revisionId name note } @@ -78,14 +82,14 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { `, { id: psId, }) - t.deepEqual(updatedGetResp.data.res, { id: psId, ...updatedExampleEntry }, 'record updated OK') + t.deepEqual(updatedGetResp.data.res, { id: psId, revisionId: updatedPsRevId, ...updatedExampleEntry }, 'record updated OK') const deleteResult = await alice.graphQL(` - mutation($id: ID!) { - res: deleteProcessSpecification(id: $id) + mutation($revisionId: ID!) { + res: deleteProcessSpecification(revisionId: $revisionId) } `, { - id: psId, + revisionId: updatedPsRevId, }) await s.consistency() diff --git a/test/specification/test_resourcespecification_crud.js b/test/specification/test_resourcespecification_crud.js index 051849809..33e5cf116 100644 --- a/test/specification/test_resourcespecification_crud.js +++ b/test/specification/test_resourcespecification_crud.js @@ -27,6 +27,7 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { res: createResourceSpecification(resourceSpecification: $rs) { resourceSpecification { id + revisionId } } } @@ -59,12 +60,14 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { res: updateResourceSpecification(resourceSpecification: $rs) { resourceSpecification { id + revisionId } } } `, { rs: { revisionId: rsRev, ...updatedExampleEntry }, }) + const updatedRsRevId = updateResp.data.res.resourceSpecification.revisionId await s.consistency() t.equal(updateResp.data.res.resourceSpecification.id, rsId, 'record update OK') @@ -74,6 +77,7 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { query($id: ID!) { res: resourceSpecification(id: $id) { id + revisionId name image note @@ -83,14 +87,14 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { id: rsId, }) - t.deepEqual(updatedGetResp.data.res, { id: rsId, ...updatedExampleEntry }, 'record properties updated') + t.deepEqual(updatedGetResp.data.res, { id: rsId, revisionId: updatedRsRevId, ...updatedExampleEntry }, 'record properties updated') const deleteResult = await alice.graphQL(` - mutation($id: ID!) { - res: deleteResourceSpecification(id: $id) + mutation($revisionId: ID!) { + res: deleteResourceSpecification(revisionId: $revisionId) } `, { - id: rsId, + revisionId: updatedRsRevId, }) await s.consistency() diff --git a/test/specification/test_unit_crud.js b/test/specification/test_unit_crud.js index 427b8fe73..f1d3501e0 100644 --- a/test/specification/test_unit_crud.js +++ b/test/specification/test_unit_crud.js @@ -25,6 +25,7 @@ runner.registerScenario('Unit record API', async (s, t) => { res: createUnit(unit: $rs) { unit { id + revisionId } } } @@ -37,11 +38,11 @@ runner.registerScenario('Unit record API', async (s, t) => { t.equal(createResp.data.res.unit.id.split(':')[0], exampleEntry.symbol, 'record index set') let uId = createResp.data.res.unit.id let uRevision = createResp.data.res.unit.revisionId - const getResp = await alice.graphQL(` query($id: ID!) { res: unit(id: $id) { id + revisionId label symbol } @@ -50,23 +51,25 @@ runner.registerScenario('Unit record API', async (s, t) => { id: uId, }) - t.deepEqual(getResp.data.res, { 'id': uId, ...exampleEntry }, 'record read OK') + t.deepEqual(getResp.data.res, { 'id': uId, revisionId: uRevision, ...exampleEntry }, 'record read OK') const updateResp = await alice.graphQL(` mutation($rs: UnitUpdateParams!) { res: updateUnit(unit: $rs) { unit { id + revisionId } } } `, { rs: { revisionId: uRevision, ...updatedExampleEntry }, }) + const updatedUnitRevId = updateResp.data.res.unit.revisionId await s.consistency() t.notEqual(updateResp.data.res.unit.id, uId, 'update operation succeeded') - t.equal(updateResp.data.res.unit.id, updatedExampleEntry.symbol, 'record index updated') + t.equal(updateResp.data.res.unit.id.split(':')[0], updatedExampleEntry.symbol, 'record index updated') uId = updateResp.data.res.unit.id // now we fetch the Entry again to check that the update was successful @@ -74,6 +77,7 @@ runner.registerScenario('Unit record API', async (s, t) => { query($id: ID!) { res: unit(id: $id) { id + revisionId label symbol } @@ -82,14 +86,14 @@ runner.registerScenario('Unit record API', async (s, t) => { id: uId, }) - t.deepEqual(updatedGetResp.data.res, { id: uId, ...updatedExampleEntry }, 'record updated OK') + t.deepEqual(updatedGetResp.data.res, { id: uId, revisionId: updatedUnitRevId, ...updatedExampleEntry }, 'record updated OK') const deleteResult = await alice.graphQL(` - mutation($id: ID!) { - res: deleteUnit(id: $id) + mutation($revisionId: ID!) { + res: deleteUnit(revisionId: $revisionId) } `, { - id: uId, + revisionId: updatedUnitRevId, }) await s.consistency() diff --git a/webhapp/web-happ.yaml b/webhapp/web-happ.yaml deleted file mode 100644 index 74ad3f08c..000000000 --- a/webhapp/web-happ.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -manifest_version: "1" -name: hrea -ui: - bundled: "../apps/holorea-graphql-explorer/ui.zip" -happ_manifest: - bundled: "../bundles/full_suite_release_template/hrea_suite.happ" diff --git a/zomes/agent_registration/Cargo.toml b/zomes/agent_registration/Cargo.toml index b81e7eda1..5958146d5 100644 --- a/zomes/agent_registration/Cargo.toml +++ b/zomes/agent_registration/Cargo.toml @@ -5,7 +5,7 @@ authors = ["pospi "] edition = "2018" [dependencies] -hc_zome_agent_registration = {git = "https://github.com/holochain-open-dev/agent-registration", rev = "a37cc26", package = "hc_zome_agent_registration"} +hc_zome_agent_registration = {git = "https://github.com/holochain-open-dev/agent-registration", branch = "hdk-123-validation", package = "hc_zome_agent_registration"} [lib] path = "src/lib.rs" diff --git a/zomes/rea_action/zome/Cargo.toml b/zomes/rea_action/zome/Cargo.toml index a33c1f40d..ff0e95883 100644 --- a/zomes/rea_action/zome/Cargo.toml +++ b/zomes/rea_action/zome/Cargo.toml @@ -6,7 +6,7 @@ edition = "2018" [dependencies] serde = "1" -hdk = "0.0.122" +hdk = "0.0.124" vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } vf_actions = { path = "../../../lib/vf_actions" } diff --git a/zomes/rea_agreement/lib/src/lib.rs b/zomes/rea_agreement/lib/src/lib.rs index 0cafc8ea0..766c30b76 100644 --- a/zomes/rea_agreement/lib/src/lib.rs +++ b/zomes/rea_agreement/lib/src/lib.rs @@ -33,7 +33,7 @@ pub fn handle_create_agreement(entry_def_id: S, agreement: CreateRequest) -> pub fn handle_get_agreement(entry_def_id: S, address: AgreementAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, revision, &entry, get_link_fields(&base_address)?) } @@ -45,13 +45,13 @@ pub fn handle_update_agreement(entry_def_id: S, agreement: UpdateRequest) -> construct_response(&identity_address, revision_id, &entry, get_link_fields(&identity_address)?) } -pub fn handle_delete_agreement(address: RevisionHash) -> RecordAPIResult { - delete_record::(&address) +pub fn handle_delete_agreement(address: HeaderHash) -> RecordAPIResult { + delete_record::(&address) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &AgreementAddress, revision: RevisionHash, e: &EntryData, ( + address: &AgreementAddress, revision: HeaderHash, e: &EntryData, ( commitments, economic_events, ): ( diff --git a/zomes/rea_agreement/rpc/src/lib.rs b/zomes/rea_agreement/rpc/src/lib.rs index 54cff2579..e6eba82b7 100644 --- a/zomes/rea_agreement/rpc/src/lib.rs +++ b/zomes/rea_agreement/rpc/src/lib.rs @@ -10,12 +10,12 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, AgreementAddress, CommitmentAddress, EconomicEventAddress, DateTime, FixedOffset, + ByHeader, HeaderHash, }; //---------------- EXTERNAL RECORD STRUCTURE ---------------- @@ -26,7 +26,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: AgreementAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(skip_serializing_if = "Option::is_none")] pub name: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -80,7 +80,7 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] #[serde(skip_serializing_if = "MaybeUndefined::is_undefined")] pub name: MaybeUndefined, @@ -93,7 +93,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&self) -> RevisionHash { + pub fn get_revision_id(&self) -> HeaderHash { self.revision_id.to_owned().into() } diff --git a/zomes/rea_agreement/storage_consts/src/lib.rs b/zomes/rea_agreement/storage_consts/src/lib.rs index a58aadcd0..2b6a3834b 100644 --- a/zomes/rea_agreement/storage_consts/src/lib.rs +++ b/zomes/rea_agreement/storage_consts/src/lib.rs @@ -6,9 +6,3 @@ * @package Holo-REA */ pub const AGREEMENT_ENTRY_TYPE: &str = "vf_agreement"; - -pub const AGREEMENT_EVENTS_LINK_TAG: &str = "economic_events"; -pub const AGREEMENT_COMMITMENTS_LINK_TAG: &str = "commitments"; - -pub const AGREEMENT_COMMITMENTS_READ_API_METHOD: &str = "_internal_read_agreement_clauses"; -pub const AGREEMENT_EVENTS_READ_API_METHOD: &str = "_internal_read_agreement_realizations"; diff --git a/zomes/rea_agreement/zome/Cargo.toml b/zomes/rea_agreement/zome/Cargo.toml index acc4361df..34573be64 100644 --- a/zomes/rea_agreement/zome/Cargo.toml +++ b/zomes/rea_agreement/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_agreement_rpc = { path = "../rpc" } hc_zome_rea_agreement_lib = { path = "../lib" } diff --git a/zomes/rea_agreement/zome/src/lib.rs b/zomes/rea_agreement/zome/src/lib.rs index 97ef5405e..2ff3ee437 100644 --- a/zomes/rea_agreement/zome/src/lib.rs +++ b/zomes/rea_agreement/zome/src/lib.rs @@ -16,6 +16,7 @@ use hc_zome_rea_agreement_lib::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + AgreementAddress::entry_def(), EntryDef { id: AGREEMENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, @@ -56,12 +57,7 @@ fn update_agreement(UpdateParams { agreement }: UpdateParams) -> ExternResult ExternResult { +fn delete_agreement(ByHeader { address }: ByHeader) -> ExternResult { Ok(handle_delete_agreement(address)?) } diff --git a/zomes/rea_agreement/zome_idx_agreement/Cargo.toml b/zomes/rea_agreement/zome_idx_agreement/Cargo.toml index 2e230d0c9..393c09880 100644 --- a/zomes/rea_agreement/zome_idx_agreement/Cargo.toml +++ b/zomes/rea_agreement/zome_idx_agreement/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_agreement/zome_idx_agreement/src/lib.rs b/zomes/rea_agreement/zome_idx_agreement/src/lib.rs index 7fdeb8f8b..d8eaf3342 100644 --- a/zomes/rea_agreement/zome_idx_agreement/src/lib.rs +++ b/zomes/rea_agreement/zome_idx_agreement/src/lib.rs @@ -1,14 +1,24 @@ /** * Agreement query indexes for agreement DNA * - * :TODO: - * * @package Holo-REA * @since 2021-09-06 */ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_agreement_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + AgreementAddress::entry_def(), + EconomicEventAddress::entry_def(), + CommitmentAddress::entry_def(), + ])) +} + #[index_zome] struct Agreement { economic_events: Remote, diff --git a/zomes/rea_commitment/lib/Cargo.toml b/zomes/rea_commitment/lib/Cargo.toml index 3ac5aadd4..114962217 100644 --- a/zomes/rea_commitment/lib/Cargo.toml +++ b/zomes/rea_commitment/lib/Cargo.toml @@ -6,6 +6,8 @@ edition = "2018" [dependencies] paste = "1.0" +# :DUPE: hdk-rust-revid +hdk = "0.0.124" hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index 472b23ed9..f5292f443 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -31,14 +31,18 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - let (header_addr, base_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, commitment.to_owned())?; // handle link fields + // :TODO: propogate errors if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &commitment { - create_index!(Remote(commitment.input_of(input_of), process.committed_inputs(&base_address)))?; + let e = create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address)); + hdk::prelude::debug!("handle_create_commitment::input_of index {:?}", e); }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { - create_index!(Remote(commitment.output_of(output_of), process.committed_outputs(&base_address)))?; + let e = create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address)); + hdk::prelude::debug!("handle_create_commitment::output_of index {:?}", e); }; if let CreateRequest { clause_of: MaybeUndefined::Some(clause_of), .. } = &commitment { - create_index!(Remote(commitment.clause_of(clause_of), agreement.commitments(&base_address)))?; + let e = create_index!(commitment.clause_of(clause_of), agreement.commitments(&base_address)); + hdk::prelude::debug!("handle_create_commitment::clause_of index {:?}", e); }; // :TODO: pass results from link creation rather than re-reading @@ -48,7 +52,7 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - pub fn handle_get_commitment(entry_def_id: S, address: CommitmentAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } @@ -62,60 +66,66 @@ pub fn handle_update_commitment(entry_def_id: S, commitment: UpdateRequest) - if new_entry.input_of != prev_entry.input_of { let new_value = match &new_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + let e = update_index!( commitment .input_of(new_value.as_slice()) .not(prev_value.as_slice()), process.committed_inputs(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_commitment::input_of index {:?}", e); } if new_entry.output_of != prev_entry.output_of { let new_value = match &new_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + let e = update_index!( commitment .output_of(new_value.as_slice()) .not(prev_value.as_slice()), process.committed_outputs(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_commitment::output_of index {:?}", e); } if new_entry.clause_of != prev_entry.clause_of { let new_value = match &new_entry.clause_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.clause_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + let e = update_index!( commitment .clause_of(new_value.as_slice()) .not(prev_value.as_slice()), agreement.commitments(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_commitment::clause_of index {:?}", e); } construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) } -pub fn handle_delete_commitment(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_commitment(revision_id: HeaderHash) -> RecordAPIResult { // load the record to ensure it is of the correct type let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; // handle link fields if let Some(process_address) = entry.input_of { - update_index!(Remote(commitment.input_of.not(&vec![process_address]), process.committed_inputs(&base_address)))?; + let e = update_index!(commitment.input_of.not(&vec![process_address]), process.committed_inputs(&base_address)); + hdk::prelude::debug!("handle_delete_commitment::input_of index {:?}", e); } if let Some(process_address) = entry.output_of { - update_index!(Remote(commitment.output_of.not(&vec![process_address]), process.committed_outputs(&base_address)))?; + let e = update_index!(commitment.output_of.not(&vec![process_address]), process.committed_outputs(&base_address)); + hdk::prelude::debug!("handle_delete_commitment::output_of index {:?}", e); } if let Some(agreement_address) = entry.clause_of { - update_index!(Remote(commitment.clause_of.not(&vec![agreement_address]), agreement.commitments(&base_address)))?; + let e = update_index!(commitment.clause_of.not(&vec![agreement_address]), agreement.commitments(&base_address)); + hdk::prelude::debug!("handle_delete_commitment::clause_of index {:?}", e); } // delete entry last, as it must be present in order for links to be removed - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &CommitmentAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &CommitmentAddress, revision_id: &HeaderHash, e: &EntryData, ( fulfillments, satisfactions, involved_agents, @@ -165,6 +175,16 @@ fn read_commitment_index_zome(conf: DnaConfigSlice) -> Option { Some(conf.commitment.index_zome) } +/// Properties accessor for zome config +fn read_process_index_zome(conf: DnaConfigSlice) -> Option { + conf.commitment.process_index_zome +} + +/// Properties accessor for zome config +fn read_agreement_index_zome(conf: DnaConfigSlice) -> Option { + conf.commitment.agreement_index_zome +} + // @see construct_response fn get_link_fields(commitment: &CommitmentAddress) -> RecordAPIResult<( Vec, diff --git a/zomes/rea_commitment/rpc/src/lib.rs b/zomes/rea_commitment/rpc/src/lib.rs index 2a91a67fe..ce7c6ca9a 100644 --- a/zomes/rea_commitment/rpc/src/lib.rs +++ b/zomes/rea_commitment/rpc/src/lib.rs @@ -11,7 +11,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined, default_false}; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, + HeaderHash, ByHeader, ActionId, DateTime, FixedOffset, ExternalURL, @@ -37,7 +37,7 @@ pub use vf_attributes_hdk::{ CommitmentAddress }; #[serde(rename_all = "camelCase")] pub struct Response { pub id: CommitmentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub action: ActionId, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -163,7 +163,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub action: MaybeUndefined, #[serde(default)] @@ -211,7 +211,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_commitment/storage/src/lib.rs b/zomes/rea_commitment/storage/src/lib.rs index 57edd068f..aee613a76 100644 --- a/zomes/rea_commitment/storage/src/lib.rs +++ b/zomes/rea_commitment/storage/src/lib.rs @@ -16,7 +16,6 @@ use hdk_records::{ use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ActionId, DateTime, FixedOffset, ExternalURL, @@ -48,6 +47,8 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct CommitmentZomeConfig { pub index_zome: String, + pub process_index_zome: Option, + pub agreement_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_commitment/storage_consts/src/lib.rs b/zomes/rea_commitment/storage_consts/src/lib.rs index b4cd72b79..afacedab3 100644 --- a/zomes/rea_commitment/storage_consts/src/lib.rs +++ b/zomes/rea_commitment/storage_consts/src/lib.rs @@ -6,23 +6,3 @@ * @package Holo-REA */ pub const COMMITMENT_ENTRY_TYPE: &str = "vf_commitment"; -pub const COMMITMENT_FULFILLEDBY_LINK_TAG: &str = "fulfilled_by"; -pub const COMMITMENT_SATISFIES_LINK_TAG: &str = "satisfies"; -pub const COMMITMENT_INPUT_OF_LINK_TAG: &str = "input_of"; -pub const COMMITMENT_OUTPUT_OF_LINK_TAG: &str = "output_of"; -pub const COMMITMENT_CLAUSE_OF_LINK_TAG: &str = "clause_of"; - -pub const COMMITMENT_FULFILLEDBY_READ_API_METHOD: &str = "_internal_read_commitment_fulfillments"; -pub const COMMITMENT_SATISFIES_READ_API_METHOD: &str = "_internal_read_commitment_satisfactions"; - -pub const COMMITMENT_INPUT_READ_API_METHOD: &str = "_internal_read_commitment_process_inputs"; -pub const COMMITMENT_INPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_inputs"; -pub const PROCESS_INPUT_INDEXING_API_METHOD: &str = "index_process_input_commitments"; - -pub const COMMITMENT_OUTPUT_READ_API_METHOD: &str = "_internal_read_commitment_process_outputs"; -pub const COMMITMENT_OUTPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_outputs"; -pub const PROCESS_OUTPUT_INDEXING_API_METHOD: &str = "index_process_output_commitments"; - -pub const COMMITMENT_CLAUSEOF_READ_API_METHOD: &str = "_internal_read_commitment_agreements"; -pub const COMMITMENT_CLAUSEOF_INDEXING_API_METHOD: &str = "_internal_reindex_agreement_clauses"; -pub const AGREEMENT_CLAUSE_INDEXING_API_METHOD: &str = "index_agreement_clauses"; diff --git a/zomes/rea_commitment/zome/Cargo.toml b/zomes/rea_commitment/zome/Cargo.toml index c2eaf21ad..5ae5dcd68 100644 --- a/zomes/rea_commitment/zome/Cargo.toml +++ b/zomes/rea_commitment/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_commitment_rpc = { path = "../rpc" } hc_zome_rea_commitment_lib = { path = "../lib" } diff --git a/zomes/rea_commitment/zome/src/lib.rs b/zomes/rea_commitment/zome/src/lib.rs index 060028586..6727ef1bc 100644 --- a/zomes/rea_commitment/zome/src/lib.rs +++ b/zomes/rea_commitment/zome/src/lib.rs @@ -19,14 +19,19 @@ use hc_zome_rea_commitment_storage::*; use hc_zome_rea_commitment_storage_consts::*; #[hdk_extern] -fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; +fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } +} +fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(event_storage) => { let record = event_storage.entry(); @@ -43,6 +48,7 @@ fn validate(validation_data: ValidateData) -> ExternResult ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + CommitmentAddress::entry_def(), EntryDef { id: CAP_STORAGE_ENTRY_DEF_ID.into(), visibility: EntryVisibility::Private, @@ -90,11 +96,6 @@ fn update_commitment(UpdateParams { commitment }: UpdateParams) -> ExternResult< Ok(handle_update_commitment(COMMITMENT_ENTRY_TYPE, commitment)?) } -#[derive(Debug, Serialize, Deserialize)] -struct ByHeader { - pub address: RevisionHash, -} - #[hdk_extern] fn delete_commitment(ByHeader { address }: ByHeader) -> ExternResult { Ok(handle_delete_commitment(address)?) diff --git a/zomes/rea_commitment/zome_idx_planning/Cargo.toml b/zomes/rea_commitment/zome_idx_planning/Cargo.toml index 48d10ea3a..c0f8876e9 100644 --- a/zomes/rea_commitment/zome_idx_planning/Cargo.toml +++ b/zomes/rea_commitment/zome_idx_planning/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" paste = "1.0" serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_commitment/zome_idx_planning/src/lib.rs b/zomes/rea_commitment/zome_idx_planning/src/lib.rs index be4718b67..a392b552d 100644 --- a/zomes/rea_commitment/zome_idx_planning/src/lib.rs +++ b/zomes/rea_commitment/zome_idx_planning/src/lib.rs @@ -7,6 +7,21 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_commitment_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProcessAddress::entry_def(), + SatisfactionAddress::entry_def(), + CommitmentAddress::entry_def(), + ProcessAddress::entry_def(), + AgreementAddress::entry_def(), + FulfillmentAddress::entry_def(), + ])) +} + #[index_zome] struct Commitment { fulfilled_by: Local, diff --git a/zomes/rea_economic_event/lib/src/lib.rs b/zomes/rea_economic_event/lib/src/lib.rs index 526051813..576ffba2e 100644 --- a/zomes/rea_economic_event/lib/src/lib.rs +++ b/zomes/rea_economic_event/lib/src/lib.rs @@ -46,6 +46,9 @@ use hc_zome_rea_economic_resource_lib::{ get_link_fields as get_resource_link_fields, }; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + /// Properties accessor for zome config. fn read_economic_resource_index_zome(conf: DnaConfigSlice) -> Option { @@ -64,8 +67,8 @@ impl API for EconomicEventZomePermissableDefault { entry_def_id: Self::S, process_entry_def_id: Self::S, event: EconomicEventCreateRequest, new_inventoried_resource: Option ) -> RecordAPIResult { - let mut resources_affected: Vec<(RevisionHash, EconomicResourceAddress, EconomicResourceData, EconomicResourceData)> = vec![]; - let mut resource_created: Option<(RevisionHash, EconomicResourceAddress, EconomicResourceData)> = None; + let mut resources_affected: Vec<(HeaderHash, EconomicResourceAddress, EconomicResourceData, EconomicResourceData)> = vec![]; + let mut resource_created: Option<(HeaderHash, EconomicResourceAddress, EconomicResourceData)> = None; // if the event observes a new resource, create that resource & return it in the response if let Some(economic_resource) = new_inventoried_resource { @@ -93,7 +96,8 @@ impl API for EconomicEventZomePermissableDefault { // Link any affected resources to this event so that we can pull all the events which affect any resource for resource_data in resources_affected.iter() { - create_index!(Local(economic_event.affects(&(resource_data.1)), economic_resource.affected_by(&event_address)))?; + let e = create_index!(economic_event.affects(resource_data.1), economic_resource.affected_by(&event_address)); + hdk::prelude::debug!("create_economic_event::affects index {:?}", e); } match resource_created { @@ -113,7 +117,7 @@ impl API for EconomicEventZomePermissableDefault { } fn get_economic_event(entry_def_id: Self::S, address: EconomicEventAddress) -> RecordAPIResult { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } @@ -125,26 +129,29 @@ impl API for EconomicEventZomePermissableDefault { construct_response(&identity_address, &revision_id, &new_entry, get_link_fields(&identity_address)?) } - fn delete_economic_event(revision_id: RevisionHash) -> RecordAPIResult { + fn delete_economic_event(revision_id: HeaderHash) -> RecordAPIResult { // read any referencing indexes let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; // handle link fields if let Some(process_address) = entry.input_of { - update_index!(Local(economic_event.input_of.not(&vec![process_address.to_owned()]), process.inputs(&base_address)))?; + let e = update_index!(economic_event.input_of.not(&vec![process_address.to_owned()]), process.inputs(&base_address)); + hdk::prelude::debug!("delete_economic_event::input_of index {:?}", e); } if let Some(process_address) = entry.output_of { - update_index!(Local(economic_event.output_of.not(&vec![process_address.to_owned()]), process.outputs(&base_address)))?; + let e = update_index!(economic_event.output_of.not(&vec![process_address.to_owned()]), process.outputs(&base_address)); + hdk::prelude::debug!("delete_economic_event::output_of index {:?}", e); } if let Some(agreement_address) = entry.realization_of { - let _ = update_index!(Remote(economic_event.realization_of.not(&vec![agreement_address.to_owned()]), agreement.economic_events(&base_address))); + let e = update_index!(economic_event.realization_of.not(&vec![agreement_address.to_owned()]), agreement.economic_events(&base_address)); + hdk::prelude::debug!("delete_economic_event::realization_of index {:?}", e); } // :TODO: handle cleanup of foreign key fields? (fulfillment, satisfaction) // May not be needed due to cross-record deletion validation logic. // delete entry last as it must be present in order for links to be removed - delete_record::(&revision_id) + delete_record::(&revision_id) } fn get_all_economic_events(entry_def_id: Self::S) -> RecordAPIResult { @@ -167,8 +174,12 @@ fn read_process_index_zome(conf: DnaConfigSlice) -> Option { conf.economic_event.process_index_zome } +fn read_agreement_index_zome(conf: DnaConfigSlice) -> Option { + conf.economic_event.agreement_index_zome +} + fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEventCreateRequest, resource_address: Option, -) -> RecordAPIResult<(RevisionHash, EconomicEventAddress, EntryData)> +) -> RecordAPIResult<(HeaderHash, EconomicEventAddress, EntryData)> where S: AsRef { let (revision_id, base_address, entry_resp): (_, EconomicEventAddress, EntryData) = create_record( @@ -180,15 +191,18 @@ fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEvent )?; // handle link fields - // :TODO: propagate errors + // :TODO: propagate errors https://github.com/h-REA/hREA/issues/264 if let EconomicEventCreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = event { - create_index!(Local(economic_event.input_of(input_of), process.inputs(&base_address)))?; - }; - if let EconomicEventCreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = event { - create_index!(Local(economic_event.output_of(output_of), process.outputs(&base_address)))?; - }; - if let EconomicEventCreateRequest { realization_of: MaybeUndefined::Some(realization_of), .. } = event { - create_index!(Remote(economic_event.realization_of(realization_of), agreement.realized(&base_address)))?; + let e = create_index!(economic_event.input_of(input_of), process.inputs(&base_address)); + hdk::prelude::debug!("handle_create_economic_event_record::input_of index {:?}", e); + }; + if let EconomicEventCreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = event { + let e = create_index!(economic_event.output_of(output_of), process.outputs(&base_address)); + hdk::prelude::debug!("handle_create_economic_event_record::output_of index {:?}", e); + }; + if let EconomicEventCreateRequest { realization_of: MaybeUndefined::Some(realization_of), .. } = event { + let e = create_index!(economic_event.realization_of(realization_of), agreement.economic_events(&base_address)); + hdk::prelude::debug!("handle_create_economic_event_record::realization_of index {:?}", e); }; Ok((revision_id, base_address, entry_resp)) @@ -206,7 +220,7 @@ fn read_resource_zome(conf: DnaConfigSlice) -> Option { /// fn handle_create_inventory_from_event( economic_resource: &ResourceCreateRequest, event: &CreateRequest, -) -> OtherCellResult<(RevisionHash, EconomicResourceAddress, EconomicResourceData)> +) -> OtherCellResult<(HeaderHash, EconomicResourceAddress, EconomicResourceData)> { Ok(call_local_zome_method( read_resource_zome, @@ -226,7 +240,7 @@ fn resource_creation(event: &CreateRequest, resource: &ResourceCreateRequest) -> /// fn handle_update_resource_inventory( event: &EconomicEventCreateRequest, -) -> RecordAPIResult> +) -> RecordAPIResult> { Ok(call_local_zome_method( read_resource_zome, @@ -235,7 +249,7 @@ fn handle_update_resource_inventory( )?) } -fn handle_list_output(entries_result: Vec>) -> RecordAPIResult { +fn handle_list_output(entries_result: Vec>) -> RecordAPIResult { let edges = entries_result.iter() .cloned() .filter_map(Result::ok) @@ -271,7 +285,7 @@ fn handle_list_output(entries_result: Vec( event_address: &EconomicEventAddress, - revision_id: &RevisionHash, + revision_id: &HeaderHash, event: &EntryData, ( fulfillments, satisfactions, @@ -280,7 +294,7 @@ pub fn construct_response_with_resource<'a>( Vec, ), resource_address: Option, - resource_revision_id: &RevisionHash, + resource_revision_id: &HeaderHash, resource: EconomicResourceData, ( contained_in, stage, @@ -329,7 +343,7 @@ pub fn construct_response_with_resource<'a>( // Same as above, but omits EconomicResource object pub fn construct_response<'a>( - address: &EconomicEventAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicEventAddress, revision_id: &HeaderHash, e: &EntryData, ( fulfillments, satisfactions, ): ( @@ -369,7 +383,7 @@ pub fn construct_response<'a>( } pub fn construct_list_response<'a>( - address: &EconomicEventAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicEventAddress, revision_id: &HeaderHash, e: &EntryData, ( fulfillments, satisfactions, ): ( diff --git a/zomes/rea_economic_event/rpc/src/lib.rs b/zomes/rea_economic_event/rpc/src/lib.rs index 2109e1f3f..dbefe3480 100644 --- a/zomes/rea_economic_event/rpc/src/lib.rs +++ b/zomes/rea_economic_event/rpc/src/lib.rs @@ -12,7 +12,7 @@ use serde_maybe_undefined::MaybeUndefined; use vf_measurement::QuantityValue; use hdk_relay_pagination::PageInfo; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, EconomicEventAddress, EconomicResourceAddress, ActionId, @@ -39,7 +39,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: EconomicEventAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub action: ActionId, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -94,7 +94,7 @@ pub struct Response { #[serde(rename_all = "camelCase")] pub struct ResourceResponse { pub id: EconomicResourceAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(skip_serializing_if = "Option::is_none")] pub conforms_to: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -271,6 +271,9 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct ResourceCreateRequest { + #[serde(default)] + pub name: MaybeUndefined, + #[serde(default)] pub conforms_to: MaybeUndefined, #[serde(default)] pub tracking_identifier: MaybeUndefined, @@ -305,7 +308,7 @@ pub struct CreateParams { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub note: MaybeUndefined, #[serde(default)] @@ -319,7 +322,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_economic_event/storage/src/lib.rs b/zomes/rea_economic_event/storage/src/lib.rs index 5c4b6f14f..ec58c0e4c 100644 --- a/zomes/rea_economic_event/storage/src/lib.rs +++ b/zomes/rea_economic_event/storage/src/lib.rs @@ -48,6 +48,7 @@ pub struct EconomicEventZomeConfig { pub economic_resource_zome: Option, pub economic_resource_index_zome: Option, pub process_index_zome: Option, + pub agreement_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_economic_event/storage_consts/src/lib.rs b/zomes/rea_economic_event/storage_consts/src/lib.rs index fd420ed27..3dcd11561 100644 --- a/zomes/rea_economic_event/storage_consts/src/lib.rs +++ b/zomes/rea_economic_event/storage_consts/src/lib.rs @@ -7,12 +7,5 @@ */ pub const EVENT_ENTRY_TYPE: &str = "vf_economic_event"; -pub const EVENT_FULFILLS_LINK_TAG: &str = "fulfills"; -pub const EVENT_SATISFIES_LINK_TAG: &str = "satisfies"; -pub const EVENT_INPUT_OF_LINK_TAG: &str = "input_of"; -pub const EVENT_OUTPUT_OF_LINK_TAG: &str = "output_of"; -pub const EVENT_REALIZATION_OF_LINK_TAG: &str = "realization_of"; -pub const EVENT_AFFECTS_RESOURCE_LINK_TAG: &str = "affects"; - pub const INVENTORY_CREATION_API_METHOD: &str = "_internal_create_inventory"; pub const INVENTORY_UPDATE_API_METHOD: &str = "_internal_update_inventory"; diff --git a/zomes/rea_economic_event/zome/Cargo.toml b/zomes/rea_economic_event/zome/Cargo.toml index bba699a18..cb4052261 100644 --- a/zomes/rea_economic_event/zome/Cargo.toml +++ b/zomes/rea_economic_event/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_economic_event_zome_api = { path = "../zome_api" } hc_zome_rea_economic_event_lib = { path = "../lib" } diff --git a/zomes/rea_economic_event/zome/src/lib.rs b/zomes/rea_economic_event/zome/src/lib.rs index 519a40d9f..a4ae5d312 100644 --- a/zomes/rea_economic_event/zome/src/lib.rs +++ b/zomes/rea_economic_event/zome/src/lib.rs @@ -18,12 +18,20 @@ use hc_zome_rea_process_storage_consts::PROCESS_ENTRY_TYPE; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + EconomicEventAddress::entry_def(), EntryDef { id: EVENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, crdt_type: CrdtType, required_validations: 2.into(), required_validation_type: RequiredValidationType::default(), + }, + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), } ])) } diff --git a/zomes/rea_economic_event/zome_api/src/lib.rs b/zomes/rea_economic_event/zome_api/src/lib.rs index d0ac078ec..c26c0af1a 100644 --- a/zomes/rea_economic_event/zome_api/src/lib.rs +++ b/zomes/rea_economic_event/zome_api/src/lib.rs @@ -10,7 +10,7 @@ pub trait API { ) -> RecordAPIResult; fn get_economic_event(entry_def_id: Self::S, address: EconomicEventAddress) -> RecordAPIResult; fn update_economic_event(entry_def_id: Self::S, event: UpdateRequest) -> RecordAPIResult; - fn delete_economic_event(revision_id: RevisionHash) -> RecordAPIResult; + fn delete_economic_event(revision_id: HeaderHash) -> RecordAPIResult; fn get_all_economic_events(entry_def_id: Self::S) -> RecordAPIResult; } @@ -67,14 +67,19 @@ macro_rules! declare_economic_event_zome_api { macro_rules! declare_economic_event_zome_validation_defaults { ( /*$zome_api:ty*/ ) => { #[hdk_extern] - fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; + fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } + } + fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(event_storage) => { let record = event_storage.entry(); diff --git a/zomes/rea_economic_event/zome_idx_observation/Cargo.toml b/zomes/rea_economic_event/zome_idx_observation/Cargo.toml index d697278fd..ab9bf2b5c 100644 --- a/zomes/rea_economic_event/zome_idx_observation/Cargo.toml +++ b/zomes/rea_economic_event/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs index 2e5fa2562..573f0be72 100644 --- a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs @@ -7,13 +7,28 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_economic_event_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProcessAddress::entry_def(), + AgreementAddress::entry_def(), + SatisfactionAddress::entry_def(), + FulfillmentAddress::entry_def(), + EconomicResourceAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} + #[index_zome] struct EconomicEvent { input_of: Local, output_of: Local, - realization_of: Remote, - satisfies: Remote, - fulfills: Remote, + realization_of: Local, + satisfies: Local, + fulfills: Local, // internal indexes (not part of REA spec) affects: Local, diff --git a/zomes/rea_economic_resource/lib/src/lib.rs b/zomes/rea_economic_resource/lib/src/lib.rs index 5ae500b7f..844427f3a 100644 --- a/zomes/rea_economic_resource/lib/src/lib.rs +++ b/zomes/rea_economic_resource/lib/src/lib.rs @@ -70,7 +70,7 @@ impl API for EconomicResourceZomePermissableDefault { /// /// :TODO: assess whether this should use the same standardised API format as external endpoints /// - fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(RevisionHash, EconomicResourceAddress, EntryData)> + fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(HeaderHash, EconomicResourceAddress, EntryData)> { // :TODO: move this assertion to validation callback if let MaybeUndefined::Some(_sent_inventory_id) = ¶ms.get_event_params().resource_inventoried_as { @@ -87,10 +87,12 @@ impl API for EconomicResourceZomePermissableDefault { // :NOTE: this will always run- resource without a specification ID would fail entry validation (implicit in the above) if let Some(conforms_to) = resource_spec { - let _ = create_index!(Remote(economic_resource.conforms_to(conforms_to), resource_specification.conforming_resources(&base_address))); - } - if let Some(contained_in) = resource_params.get_contained_in() { - create_index!(Self(economic_resource(&base_address).contained_in(&contained_in)))?; + let e = create_index!(economic_resource.conforms_to(conforms_to), resource_specification.conforming_resources(&base_address)); + hdk::prelude::debug!("create_inventory_from_event::conforms_to index {:?}", e); + } + if let Some(contained_in) = resource_params.get_contained_in() { + let e = create_index!(economic_resource(&base_address).contained_in(&contained_in)); + hdk::prelude::debug!("create_inventory_from_event::contained_in index {:?}", e); }; Ok((revision_id, base_address, entry_resp)) @@ -98,7 +100,7 @@ impl API for EconomicResourceZomePermissableDefault { fn get_economic_resource(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S, address: EconomicResourceAddress) -> RecordAPIResult { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&event_entry_def_id, &process_entry_def_id, &address)?) } @@ -107,9 +109,9 @@ impl API for EconomicResourceZomePermissableDefault { fn update_inventory_from_event( resource_entry_def_id: Self::S, event: EventCreateRequest, - ) -> RecordAPIResult> + ) -> RecordAPIResult> { - let mut resources_affected: Vec<(RevisionHash, EconomicResourceAddress, EntryData, EntryData)> = vec![]; + let mut resources_affected: Vec<(HeaderHash, EconomicResourceAddress, EntryData, EntryData)> = vec![]; // if the event is a transfer-like event, run the receiver's update first if let MaybeUndefined::Some(receiver_inventory) = &event.to_resource_inventoried_as { @@ -139,9 +141,12 @@ impl API for EconomicResourceZomePermissableDefault { let (revision_id, identity_address, entry, prev_entry): (_,_, EntryData, EntryData) = update_record(&entry_def_id, &address, resource)?; // :TODO: this may eventually be moved to an EconomicEvent update, see https://lab.allmende.io/valueflows/valueflows/-/issues/637 - let now_contained = if let Some(contained) = &entry.contained_in { vec![contained.clone()] } else { vec![] }; - let prev_contained = if let Some(contained) = &prev_entry.contained_in { vec![contained.clone()] } else { vec![] }; - update_index!(Self(economic_resource(&identity_address).contained_in(now_contained.as_slice()).not(prev_contained.as_slice())))?; + if entry.contained_in != prev_entry.contained_in { + let now_contained = if let Some(contained) = &entry.contained_in { vec![contained.clone()] } else { vec![] }; + let prev_contained = if let Some(contained) = &prev_entry.contained_in { vec![contained.clone()] } else { vec![] }; + let e = update_index!(economic_resource(&identity_address).contained_in(now_contained.as_slice()).not(prev_contained.as_slice())); + hdk::prelude::debug!("update_economic_resource::contained_in index {:?}", e); + } // :TODO: optimise this- should pass results from `replace_direct_index` instead of retrieving from `get_link_fields` where updates construct_response(&identity_address, &revision_id, &entry, get_link_fields(&event_entry_def_id, &process_entry_def_id, &identity_address)?) @@ -160,17 +165,22 @@ fn read_economic_resource_index_zome(conf: DnaConfigSlice) -> Option { Some(conf.economic_resource.index_zome) } +/// Properties accessor for zome config +fn read_resource_specification_index_zome(conf: DnaConfigSlice) -> Option { + conf.economic_resource.resource_specification_index_zome +} + fn handle_update_inventory_resource( resource_entry_def_id: S, - resource_addr: &RevisionHash, + resource_addr: &HeaderHash, event: EventCreateRequest, -) -> RecordAPIResult<(RevisionHash, EconomicResourceAddress, EntryData, EntryData)> +) -> RecordAPIResult<(HeaderHash, EconomicResourceAddress, EntryData, EntryData)> where S: AsRef, { Ok(update_record(&resource_entry_def_id, resource_addr, event)?) } -fn handle_list_output(event_entry_def_id: S, process_entry_def_id: S, entries_result: Vec>) -> RecordAPIResult +fn handle_list_output(event_entry_def_id: S, process_entry_def_id: S, entries_result: Vec>) -> RecordAPIResult where S: AsRef { let edges = entries_result.iter() @@ -203,7 +213,7 @@ fn handle_list_output(event_entry_def_id: S, process_entry_def_id: S, entries /// Create response from input DHT primitives pub fn construct_response<'a>( - address: &EconomicResourceAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicResourceAddress, revision_id: &HeaderHash, e: &EntryData, ( contained_in, stage, state, @@ -222,7 +232,7 @@ pub fn construct_response<'a>( /// Create response from input DHT primitives pub fn construct_response_record<'a>( - address: &EconomicResourceAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicResourceAddress, revision_id: &HeaderHash, e: &EntryData, ( contained_in, stage, state, @@ -258,7 +268,7 @@ pub fn construct_response_record<'a>( } pub fn construct_list_response<'a>( - address: &EconomicResourceAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicResourceAddress, revision_id: &HeaderHash, e: &EntryData, ( contained_in, stage, state, @@ -311,7 +321,7 @@ fn get_resource_state(event_entry_def_id: S, resource: &EconomicResourceAddre return result; } - let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); + let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); match evt { Err(_) => result, // :TODO: this indicates some data integrity error Ok((_, _, entry)) => { @@ -340,14 +350,14 @@ fn get_resource_stage(event_entry_def_id: S, process_entry_def_id: S, resourc return result; } - let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); + let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); match evt { Err(_) => result, // :TODO: this indicates some data integrity error Ok((_, _, entry)) => { match &entry.output_of { Some(output_of) => { // get the associated process - let maybe_process_entry = read_record_entry::(&process_entry_def_id, output_of.as_ref()); + let maybe_process_entry = read_record_entry::(&process_entry_def_id, output_of.as_ref()); // check to see if it has an associated specification match &maybe_process_entry { Ok((_,_, process_entry)) => match &process_entry.based_on { diff --git a/zomes/rea_economic_resource/rpc/src/lib.rs b/zomes/rea_economic_resource/rpc/src/lib.rs index 02d38de3b..f4494eba3 100644 --- a/zomes/rea_economic_resource/rpc/src/lib.rs +++ b/zomes/rea_economic_resource/rpc/src/lib.rs @@ -10,7 +10,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, + HeaderHash, ByAddress, EconomicResourceAddress, EconomicEventAddress, ExternalURL, @@ -63,7 +63,7 @@ impl<'a> CreationPayload { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub classified_as: MaybeUndefined>, #[serde(default)] @@ -77,7 +77,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_economic_resource/storage/src/lib.rs b/zomes/rea_economic_resource/storage/src/lib.rs index 444ccb24a..fa67d279b 100644 --- a/zomes/rea_economic_resource/storage/src/lib.rs +++ b/zomes/rea_economic_resource/storage/src/lib.rs @@ -48,12 +48,14 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct EconomicResourceZomeConfig { pub index_zome: String, + pub resource_specification_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] pub struct EntryData { + pub name: Option, pub conforms_to: Option, pub classified_as: Option>, pub tracking_identifier: Option, @@ -89,6 +91,7 @@ impl From for EntryData let r = t.resource; let e = t.event; EntryData { + name: r.name.to_option(), conforms_to: conforming.clone(), classified_as: if e.resource_classified_as == MaybeUndefined::Undefined { None } else { e.resource_classified_as.to_owned().to_option() }, tracking_identifier: if r.tracking_identifier == MaybeUndefined::Undefined { None } else { r.tracking_identifier.to_owned().to_option() }, @@ -157,6 +160,7 @@ fn get_default_unit_for_specification(specification_id: ResourceSpecificationAdd impl Updateable for EntryData { fn update_with(&self, e: UpdateRequest) -> EntryData { EntryData { + name: self.name.to_owned(), conforms_to: self.conforms_to.to_owned(), classified_as: if e.classified_as == MaybeUndefined::Undefined { self.classified_as.to_owned() } else { e.classified_as.to_owned().to_option() }, tracking_identifier: self.tracking_identifier.to_owned(), @@ -183,6 +187,7 @@ impl Updateable for EntryData { impl Updateable for EntryData { fn update_with(&self, e: EventCreateRequest) -> EntryData { EntryData { + name: self.name.to_owned(), conforms_to: self.conforms_to.to_owned(), classified_as: { if let MaybeUndefined::Some(classified_as) = e.resource_classified_as.to_owned() { diff --git a/zomes/rea_economic_resource/storage_consts/src/lib.rs b/zomes/rea_economic_resource/storage_consts/src/lib.rs index 1cb0713a3..f941ed0c2 100644 --- a/zomes/rea_economic_resource/storage_consts/src/lib.rs +++ b/zomes/rea_economic_resource/storage_consts/src/lib.rs @@ -6,16 +6,3 @@ * @package Holo-REA */ pub const RESOURCE_ENTRY_TYPE: &str = "vf_economic_resource"; - -pub const RESOURCE_CONTAINS_LINK_TAG: &str = "contains"; -pub const RESOURCE_CONTAINED_IN_LINK_TAG: &str = "contained_in"; -pub const RESOURCE_AFFECTED_BY_EVENT_LINK_TAG: &str = "affected_by"; -pub const RESOURCE_CONFORMS_TO_LINK_TAG: &str = "conforms_to"; - -pub const RESOURCE_CONTAINS_INDEXING_API_METHOD: &str = "_internal_reindex_contained_resources"; -pub const RESOURCE_CONTAINS_READ_API_METHOD: &str = "_internal_read_contained_resources"; -pub const RESOURCE_CONTAINEDIN_INDEXING_API_METHOD: &str = "_internal_reindex_container_resources"; -pub const RESOURCE_CONTAINEDIN_READ_API_METHOD: &str = "_internal_read_container_resource"; -pub const RESOURCE_AFFECTED_BY_READ_API_METHOD: &str = "_internal_read_affecting_events"; -pub const RESOURCE_SPECIFICATION_RESOURCES_INDEXING_API_METHOD: &str = "index_resource_specification_resources"; -pub const RESOURCE_CONFORMSTO_INDEXING_API_METHOD: &str = "_internal_reindex_resource_specifications"; diff --git a/zomes/rea_economic_resource/zome/Cargo.toml b/zomes/rea_economic_resource/zome/Cargo.toml index 0e2b26742..f69559473 100644 --- a/zomes/rea_economic_resource/zome/Cargo.toml +++ b/zomes/rea_economic_resource/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_economic_resource_zome_api = { path = "../zome_api" } hc_zome_rea_economic_resource_lib = { path = "../lib" } diff --git a/zomes/rea_economic_resource/zome/src/lib.rs b/zomes/rea_economic_resource/zome/src/lib.rs index b1fca882c..cf6b1cb31 100644 --- a/zomes/rea_economic_resource/zome/src/lib.rs +++ b/zomes/rea_economic_resource/zome/src/lib.rs @@ -18,6 +18,7 @@ use hc_zome_rea_economic_resource_storage::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + EconomicResourceAddress::entry_def(), EntryDef { id: CAP_STORAGE_ENTRY_DEF_ID.into(), visibility: EntryVisibility::Private, diff --git a/zomes/rea_economic_resource/zome_api/src/lib.rs b/zomes/rea_economic_resource/zome_api/src/lib.rs index 950b6e6a1..8b7876e47 100644 --- a/zomes/rea_economic_resource/zome_api/src/lib.rs +++ b/zomes/rea_economic_resource/zome_api/src/lib.rs @@ -11,11 +11,11 @@ use hc_zome_rea_economic_resource_storage::{EntryData}; pub trait API { type S: AsRef; - fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(RevisionHash, EconomicResourceAddress, EntryData)>; + fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(HeaderHash, EconomicResourceAddress, EntryData)>; fn update_inventory_from_event( resource_entry_def_id: Self::S, event: EventCreateRequest, - ) -> RecordAPIResult>; + ) -> RecordAPIResult>; fn get_economic_resource(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S, address: EconomicResourceAddress) -> RecordAPIResult; fn update_economic_resource(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S, resource: UpdateRequest) -> RecordAPIResult; fn get_all_economic_resources(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S) -> RecordAPIResult; @@ -33,7 +33,7 @@ macro_rules! declare_economic_resource_zome_api { // :TODO: The signature of this method, and its decoupling from the EconomicEvent zome, means that resources can be // instantiated from the receiving inventory. Is this desirable? What are the repercussions? #[hdk_extern] - fn _internal_create_inventory(params: CreationPayload) -> ExternResult<(RevisionHash, EconomicResourceAddress, EntryData)> + fn _internal_create_inventory(params: CreationPayload) -> ExternResult<(HeaderHash, EconomicResourceAddress, EntryData)> { Ok(<$zome_api>::create_inventory_from_event( RESOURCE_ENTRY_TYPE, @@ -42,7 +42,7 @@ macro_rules! declare_economic_resource_zome_api { } #[hdk_extern] - fn _internal_update_inventory(event: EventCreateRequest) -> ExternResult> + fn _internal_update_inventory(event: EventCreateRequest) -> ExternResult> { Ok(<$zome_api>::update_inventory_from_event(RESOURCE_ENTRY_TYPE, event)?) } @@ -82,14 +82,19 @@ macro_rules! declare_economic_resource_zome_api { macro_rules! declare_economic_resource_zome_validation_defaults { ( /*$zome_api:ty*/ ) => { #[hdk_extern] - fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; + fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } + } + fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(resource_storage) => { let record = resource_storage.entry(); diff --git a/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml b/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml index 53792681c..2673ba401 100644 --- a/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml +++ b/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs b/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs index a273ed0b7..9e7b41fea 100644 --- a/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs @@ -6,7 +6,22 @@ */ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_economic_resource_rpc::*; -use hc_zome_rea_economic_event_rpc::ResourceResponseData as ResponseData; +use hc_zome_rea_economic_event_rpc::{ + ResourceResponse as Response, + ResourceResponseData as ResponseData, +}; + +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + EconomicResourceAddress::entry_def(), + ResourceSpecificationAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} #[index_zome] struct EconomicResource { diff --git a/zomes/rea_fulfillment/lib/src/lib.rs b/zomes/rea_fulfillment/lib/src/lib.rs index e22a0afd3..8f635ca14 100644 --- a/zomes/rea_fulfillment/lib/src/lib.rs +++ b/zomes/rea_fulfillment/lib/src/lib.rs @@ -10,12 +10,12 @@ * @package Holo-REA */ use hdk_records::RecordAPIResult; -use vf_attributes_hdk::{RevisionHash, FulfillmentAddress}; +use vf_attributes_hdk::{HeaderHash, FulfillmentAddress}; use hc_zome_rea_fulfillment_storage::EntryData; use hc_zome_rea_fulfillment_rpc::*; /// Create response from input DHT primitives -pub fn construct_response(address: &FulfillmentAddress, revision_id: &RevisionHash, e: &EntryData) -> RecordAPIResult { +pub fn construct_response(address: &FulfillmentAddress, revision_id: &HeaderHash, e: &EntryData) -> RecordAPIResult { Ok(ResponseData { fulfillment: Response { id: address.to_owned(), diff --git a/zomes/rea_fulfillment/lib_destination/Cargo.toml b/zomes/rea_fulfillment/lib_destination/Cargo.toml index 01ff40093..7d249a913 100644 --- a/zomes/rea_fulfillment/lib_destination/Cargo.toml +++ b/zomes/rea_fulfillment/lib_destination/Cargo.toml @@ -6,6 +6,10 @@ edition = "2018" [dependencies] paste = "1.0" +# :TODO: remove if removing debug logging +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_fulfillment_storage = { path = "../storage" } diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index cd9d6b8d4..37365a756 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -26,13 +26,17 @@ use hc_zome_rea_fulfillment_storage::*; use hc_zome_rea_fulfillment_rpc::*; use hc_zome_rea_fulfillment_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) -> RecordAPIResult where S: AsRef { let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - create_index!(Local(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), event.fulfills(&fulfillment_address)))?; + let e = create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), economic_event.fulfills(&fulfillment_address)); + hdk::prelude::debug!("handle_create_fulfillment::fulfilled_by index (destination) {:?}", e); // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs @@ -42,7 +46,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) pub fn handle_get_fulfillment(entry_def_id: S, address: FulfillmentAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } @@ -52,30 +56,32 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) let (revision_id, base_address, new_entry, prev_entry): (_, FulfillmentAddress, EntryData, EntryData) = update_record(&entry_def_id, &fulfillment.get_revision_id(), fulfillment.to_owned())?; if new_entry.fulfilled_by != prev_entry.fulfilled_by { - update_index!(Local( + let e = update_index!( fulfillment .fulfilled_by(&vec![new_entry.fulfilled_by.clone()]) .not(&vec![prev_entry.fulfilled_by]), - event.fulfills(&base_address) - ))?; + economic_event.fulfills(&base_address) + ); + hdk::prelude::debug!("handle_update_fulfillment::fulfilled_by index (destination) {:?}", e); } construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_fulfillment(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult { // read any referencing indexes let (base_address, fulfillment) = read_record_entry_by_header::(&revision_id)?; // handle link fields - update_index!(Local(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), event.fulfills(&base_address)))?; + let e = update_index!(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), economic_event.fulfills(&base_address)); + hdk::prelude::debug!("handle_delete_fulfillment::fulfilled_by index (destination) {:?}", e); - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Properties accessor for zome config. -fn read_event_index_zome(conf: DnaConfigSliceObservation) -> Option { +fn read_economic_event_index_zome(conf: DnaConfigSliceObservation) -> Option { Some(conf.fulfillment.economic_event_index_zome) } diff --git a/zomes/rea_fulfillment/lib_origin/Cargo.toml b/zomes/rea_fulfillment/lib_origin/Cargo.toml index eea43dcc2..f3992a730 100644 --- a/zomes/rea_fulfillment/lib_origin/Cargo.toml +++ b/zomes/rea_fulfillment/lib_origin/Cargo.toml @@ -6,6 +6,9 @@ edition = "2018" [dependencies] paste = "1.0" +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_fulfillment_storage_consts = { path = "../storage_consts" } diff --git a/zomes/rea_fulfillment/lib_origin/src/lib.rs b/zomes/rea_fulfillment/lib_origin/src/lib.rs index 5b6fc3a5f..d42c3ef5e 100644 --- a/zomes/rea_fulfillment/lib_origin/src/lib.rs +++ b/zomes/rea_fulfillment/lib_origin/src/lib.rs @@ -28,21 +28,26 @@ use hc_zome_rea_fulfillment_storage::*; use hc_zome_rea_fulfillment_rpc::*; use hc_zome_rea_fulfillment_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) -> RecordAPIResult where S: AsRef { let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - create_index!(Local(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address)))?; + let e = create_index!(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address)); + hdk::prelude::debug!("handle_create_fulfillment::fulfills index (origin) {:?}", e); + // :TODO: report any error // update in the associated foreign DNA as well - let _pingback: OtherCellResult = call_zome_method( - fulfillment.get_fulfilled_by(), - &REPLICATE_CREATE_API_METHOD, - CreateParams { fulfillment: fulfillment.to_owned() }, + let pingback: OtherCellResult = call_zome_method( + fulfillment.get_fulfilled_by(), + &REPLICATE_CREATE_API_METHOD, + CreateParams { fulfillment: fulfillment.to_owned() }, ); - // :TODO: report any error + hdk::prelude::debug!("handle_create_fulfillment::call_zome_method::{:?} {:?}", REPLICATE_CREATE_API_METHOD, pingback); construct_response(&fulfillment_address, &revision_id, &entry_resp) } @@ -50,7 +55,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) pub fn handle_get_fulfillment(entry_def_id: S, address: FulfillmentAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } @@ -61,44 +66,48 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) // update commitment indexes in local DNA if new_entry.fulfills != prev_entry.fulfills { - update_index!(Local( + let e = update_index!( fulfillment .fulfills(&vec![new_entry.fulfills.clone()]) .not(&vec![prev_entry.fulfills]), commitment.fulfilled_by(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_fulfillment::fulfills index (origin) {:?}", e); } // update fulfillment records in remote DNA (and by proxy, event indexes in remote DNA) if new_entry.fulfilled_by != prev_entry.fulfilled_by { - let _pingback: OtherCellResult = call_zome_method( + let pingback: OtherCellResult = call_zome_method( // :TODO: update to intelligently call remote DNAs if new & old target record are not in same network &prev_entry.fulfilled_by, &REPLICATE_UPDATE_API_METHOD, UpdateParams { fulfillment: fulfillment.to_owned() }, ); // :TODO: report any error + hdk::prelude::debug!("handle_update_fulfillment::call_zome_method::{:?} {:?}", REPLICATE_UPDATE_API_METHOD, pingback); } construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_fulfillment(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; // update commitment indexes in local DNA - update_index!(Local(fulfillment.fulfills.not(&vec![entry.fulfills]), commitment.fulfilled_by(&base_address)))?; + let e = update_index!(fulfillment.fulfills.not(&vec![entry.fulfills]), commitment.fulfilled_by(&base_address)); + hdk::prelude::debug!("handle_delete_fulfillment::fulfills index (origin) {:?}", e); // update fulfillment records in remote DNA (and by proxy, event indexes in remote DNA) - let _pingback: OtherCellResult = call_zome_method( + let pingback: OtherCellResult = call_zome_method( &entry.fulfilled_by, &REPLICATE_DELETE_API_METHOD, ByHeader { address: revision_id.to_owned() }, ); // :TODO: report any error + hdk::prelude::debug!("handle_delete_fulfillment::call_zome_method::{:?} {:?}", REPLICATE_DELETE_API_METHOD, pingback); - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Properties accessor for zome config. diff --git a/zomes/rea_fulfillment/rpc/src/lib.rs b/zomes/rea_fulfillment/rpc/src/lib.rs index 05b00c605..79058c419 100644 --- a/zomes/rea_fulfillment/rpc/src/lib.rs +++ b/zomes/rea_fulfillment/rpc/src/lib.rs @@ -11,7 +11,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined}; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ByHeader, ByAddress, + HeaderHash, ByHeader, ByAddress, EconomicEventAddress, CommitmentAddress, }; @@ -39,7 +39,7 @@ pub use vf_attributes_hdk::{ FulfillmentAddress }; #[serde(rename_all = "camelCase")] pub struct Response { pub id: FulfillmentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub fulfilled_by: EconomicEventAddress, pub fulfills: CommitmentAddress, #[serde(skip_serializing_if = "Option::is_none")] @@ -99,7 +99,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] #[serde(skip_serializing_if = "MaybeUndefined::is_undefined")] pub fulfilled_by: MaybeUndefined, // note this setup allows None to be passed but `update_with` ignores it @@ -118,7 +118,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_fulfillment/storage/src/lib.rs b/zomes/rea_fulfillment/storage/src/lib.rs index 3d28f94e1..bcaf0a9b8 100644 --- a/zomes/rea_fulfillment/storage/src/lib.rs +++ b/zomes/rea_fulfillment/storage/src/lib.rs @@ -16,7 +16,6 @@ use hdk_records::{ use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, FulfillmentAddress, EconomicEventAddress, CommitmentAddress, diff --git a/zomes/rea_fulfillment/storage_consts/src/lib.rs b/zomes/rea_fulfillment/storage_consts/src/lib.rs index 40ed7767a..cc6e11f81 100644 --- a/zomes/rea_fulfillment/storage_consts/src/lib.rs +++ b/zomes/rea_fulfillment/storage_consts/src/lib.rs @@ -5,18 +5,8 @@ * * @package Holo-REA */ -pub const FULFILLMENT_BASE_ENTRY_TYPE: &str = "vf_fulfillment_baseurl"; -pub const FULFILLMENT_INITIAL_ENTRY_LINK_TYPE: &str = "vf_fulfillment_entry"; pub const FULFILLMENT_ENTRY_TYPE: &str = "vf_fulfillment"; -pub const FULFILLMENT_FULFILLS_LINK_TYPE: &str = "vf_fulfillment_fulfills"; -pub const FULFILLMENT_FULFILLS_LINK_TAG: &str = "fulfills"; -pub const FULFILLMENT_FULFILLEDBY_LINK_TYPE: &str = "vf_fulfillment_fulfilled_by"; -pub const FULFILLMENT_FULFILLEDBY_LINK_TAG: &str = "fulfilled_by"; pub const REPLICATE_CREATE_API_METHOD: &str = "create_fulfillment"; pub const REPLICATE_UPDATE_API_METHOD: &str = "update_fulfillment"; pub const REPLICATE_DELETE_API_METHOD: &str = "delete_fulfillment"; -pub const COMMITMENT_FULFILLEDBY_INDEXING_API_METHOD: &str = "_internal_reindex_fulfillments"; -pub const FULFILLMENT_FULFILLS_INDEXING_API_METHOD: &str = "_internal_reindex_commitments"; -pub const EVENT_FULFILLS_INDEXING_API_METHOD: &str = "_internal_reindex_fulfillments"; -pub const FULFILLMENT_FULFILLEDBY_INDEXING_API_METHOD: &str = "_internal_reindex_events"; diff --git a/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml b/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml index 237edab7a..80081aecd 100644 --- a/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml +++ b/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs b/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs index c01a44322..300a20855 100644 --- a/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs +++ b/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs @@ -8,6 +8,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_fulfillment_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + FulfillmentAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} + #[index_zome] struct Fulfillment { fulfilled_by: Local, diff --git a/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml b/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml index fa5efb789..6480c3c03 100644 --- a/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml +++ b/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs b/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs index 582128de6..5a37b80bc 100644 --- a/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs +++ b/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs @@ -8,6 +8,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_fulfillment_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + FulfillmentAddress::entry_def(), + CommitmentAddress::entry_def(), + ])) +} + #[index_zome] struct Fulfillment { fulfills: Local, diff --git a/zomes/rea_fulfillment/zome_observation/Cargo.toml b/zomes/rea_fulfillment/zome_observation/Cargo.toml index cf0e75f43..507acbe10 100644 --- a/zomes/rea_fulfillment/zome_observation/Cargo.toml +++ b/zomes/rea_fulfillment/zome_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_fulfillment_lib_destination = { path = "../lib_destination" } hc_zome_rea_fulfillment_rpc = { path = "../rpc" } diff --git a/zomes/rea_fulfillment/zome_observation/src/lib.rs b/zomes/rea_fulfillment/zome_observation/src/lib.rs index 9d849050f..09a72a164 100644 --- a/zomes/rea_fulfillment/zome_observation/src/lib.rs +++ b/zomes/rea_fulfillment/zome_observation/src/lib.rs @@ -19,6 +19,14 @@ use hc_zome_rea_fulfillment_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + FulfillmentAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: FULFILLMENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_fulfillment/zome_planning/Cargo.toml b/zomes/rea_fulfillment/zome_planning/Cargo.toml index fe9ca36a5..92543abba 100644 --- a/zomes/rea_fulfillment/zome_planning/Cargo.toml +++ b/zomes/rea_fulfillment/zome_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_fulfillment_rpc = { path = "../rpc" } hc_zome_rea_fulfillment_lib_origin = { path = "../lib_origin" } diff --git a/zomes/rea_fulfillment/zome_planning/src/lib.rs b/zomes/rea_fulfillment/zome_planning/src/lib.rs index 581ac4730..469d280ca 100644 --- a/zomes/rea_fulfillment/zome_planning/src/lib.rs +++ b/zomes/rea_fulfillment/zome_planning/src/lib.rs @@ -13,10 +13,19 @@ use hc_zome_rea_fulfillment_lib_origin::*; use hc_zome_rea_fulfillment_rpc::*; use hc_zome_rea_fulfillment_storage_consts::*; + #[hdk_extern] fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + FulfillmentAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: FULFILLMENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_intent/lib/Cargo.toml b/zomes/rea_intent/lib/Cargo.toml index ed04b06b5..bbae58647 100644 --- a/zomes/rea_intent/lib/Cargo.toml +++ b/zomes/rea_intent/lib/Cargo.toml @@ -6,6 +6,11 @@ edition = "2018" [dependencies] paste = "1.0" + +# :TODO: remove if removing debug outputs from this crate +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_intent_storage = { path = "../storage" } diff --git a/zomes/rea_intent/lib/src/lib.rs b/zomes/rea_intent/lib/src/lib.rs index a018bd096..bf9b43327 100644 --- a/zomes/rea_intent/lib/src/lib.rs +++ b/zomes/rea_intent/lib/src/lib.rs @@ -33,10 +33,12 @@ pub fn handle_create_intent(entry_def_id: S, intent: CreateRequest) -> Record // handle link fields if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &intent { - create_index!(Remote(intent.input_of(input_of), process.intended_inputs(&base_address)))?; + let e = create_index!(intent.input_of(input_of), process.intended_inputs(&base_address)); + hdk::prelude::debug!("handle_create_intent::input_of index {:?}", e); }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &intent { - create_index!(Remote(intent.output_of(output_of), process.intended_outputs(&base_address)))?; + let e = create_index!(intent.output_of(output_of), process.intended_outputs(&base_address)); + hdk::prelude::debug!("handle_create_intent::output_of index {:?}", e); }; // return entire record structure @@ -46,7 +48,7 @@ pub fn handle_create_intent(entry_def_id: S, intent: CreateRequest) -> Record pub fn handle_get_intent(entry_def_id: S, address: IntentAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } @@ -60,47 +62,51 @@ pub fn handle_update_intent(entry_def_id: S, intent: UpdateRequest) -> Record if new_entry.input_of != prev_entry.input_of { let new_value = match &new_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + let e = update_index!( intent .input_of(new_value.as_slice()) .not(prev_value.as_slice()), process.intended_inputs(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_intent::input_of index {:?}", e); } if new_entry.output_of != prev_entry.output_of { let new_value = match &new_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + let e = update_index!( intent .output_of(new_value.as_slice()) .not(prev_value.as_slice()), process.intended_outputs(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_intent::output_of index {:?}", e); } construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) } -pub fn handle_delete_intent(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_intent(revision_id: HeaderHash) -> RecordAPIResult { // load the record to ensure it is of the correct type let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; // handle link fields if let Some(process_address) = entry.input_of { - update_index!(Remote(intent.input_of.not(&vec![process_address]), process.intended_inputs(&base_address)))?; + let e = update_index!(intent.input_of.not(&vec![process_address]), process.intended_inputs(&base_address)); + hdk::prelude::debug!("handle_delete_intent::input_of index {:?}", e); } if let Some(process_address) = entry.output_of { - update_index!(Remote(intent.output_of.not(&vec![process_address]), process.intended_outputs(&base_address)))?; + let e = update_index!(intent.output_of.not(&vec![process_address]), process.intended_outputs(&base_address)); + hdk::prelude::debug!("handle_delete_intent::output_of index {:?}", e); } // delete entry last, as it must be present in order for links to be removed - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives pub fn construct_response<'a>( - address: &IntentAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &IntentAddress, revision_id: &HeaderHash, e: &EntryData, ( satisfactions, // published_in, ): ( @@ -146,6 +152,11 @@ fn read_intent_index_zome(conf: DnaConfigSlice) -> Option { Some(conf.intent.index_zome) } +/// Properties accessor for zome config +fn read_process_index_zome(conf: DnaConfigSlice) -> Option { + conf.intent.process_index_zome +} + // @see construct_response pub fn get_link_fields(intent: &IntentAddress) -> RecordAPIResult<( Vec, diff --git a/zomes/rea_intent/rpc/src/lib.rs b/zomes/rea_intent/rpc/src/lib.rs index 76b00876a..e897d6355 100644 --- a/zomes/rea_intent/rpc/src/lib.rs +++ b/zomes/rea_intent/rpc/src/lib.rs @@ -9,7 +9,6 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined, default_false}; -use vf_attributes_hdk::RevisionHash; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ ActionId, @@ -22,6 +21,7 @@ pub use vf_attributes_hdk::{ SatisfactionAddress, LocationAddress, ProposedIntentAddress, + HeaderHash, ByHeader, }; //---------------- EXTERNAL RECORD STRUCTURE ---------------- @@ -35,7 +35,7 @@ pub use vf_attributes_hdk::{ IntentAddress }; #[serde(rename_all = "camelCase")] pub struct Response { pub id: IntentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub action: ActionId, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -154,7 +154,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub action: MaybeUndefined, #[serde(default)] @@ -200,7 +200,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_intent/storage/src/lib.rs b/zomes/rea_intent/storage/src/lib.rs index dfe743123..4e7289f20 100644 --- a/zomes/rea_intent/storage/src/lib.rs +++ b/zomes/rea_intent/storage/src/lib.rs @@ -16,7 +16,6 @@ use hdk_records::{ use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ActionId, DateTime, FixedOffset, ExternalURL, @@ -46,6 +45,7 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct IntentZomeConfig { pub index_zome: String, + pub process_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_intent/storage_consts/src/lib.rs b/zomes/rea_intent/storage_consts/src/lib.rs index 3d9e4fe24..4a6407015 100644 --- a/zomes/rea_intent/storage_consts/src/lib.rs +++ b/zomes/rea_intent/storage_consts/src/lib.rs @@ -6,18 +6,3 @@ * @package Holo-REA */ pub const INTENT_ENTRY_TYPE: &str = "vf_intent"; -pub const INTENT_SATISFIEDBY_LINK_TAG: &str = "satisfied_by"; -pub const INTENT_INPUT_OF_LINK_TAG: &str = "input_of"; -pub const INTENT_OUTPUT_OF_LINK_TAG: &str = "output_of"; - -pub const INTENT_PUBLISHED_IN_LINK_TAG: &str = "published_in"; - -pub const INTENT_SATISFIEDBY_READ_API_METHOD: &str = "_internal_read_intent_satisfactions"; - -pub const INTENT_INPUT_READ_API_METHOD: &str = "_internal_read_intent_process_inputs"; -pub const INTENT_INPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_inputs"; -pub const PROCESS_INPUT_INDEXING_API_METHOD: &str = "index_process_input_intents"; - -pub const INTENT_OUTPUT_READ_API_METHOD: &str = "_internal_read_intent_process_outputs"; -pub const INTENT_OUTPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_outputs"; -pub const PROCESS_OUTPUT_INDEXING_API_METHOD: &str = "index_process_output_intents"; diff --git a/zomes/rea_intent/zome/Cargo.toml b/zomes/rea_intent/zome/Cargo.toml index 5b1aa674e..9d26dbe74 100644 --- a/zomes/rea_intent/zome/Cargo.toml +++ b/zomes/rea_intent/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_intent_rpc = { path = "../rpc" } hc_zome_rea_intent_lib = { path = "../lib" } diff --git a/zomes/rea_intent/zome/src/lib.rs b/zomes/rea_intent/zome/src/lib.rs index 1a4014c84..f47d3a758 100644 --- a/zomes/rea_intent/zome/src/lib.rs +++ b/zomes/rea_intent/zome/src/lib.rs @@ -15,14 +15,19 @@ use hc_zome_rea_intent_storage::*; use hc_zome_rea_intent_storage_consts::*; #[hdk_extern] -fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; +fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } +} +fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(event_storage) => { let record = event_storage.entry(); @@ -39,6 +44,7 @@ fn validate(validation_data: ValidateData) -> ExternResult ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + IntentAddress::entry_def(), EntryDef { id: CAP_STORAGE_ENTRY_DEF_ID.into(), visibility: EntryVisibility::Private, @@ -89,11 +95,6 @@ fn update_intent(UpdateParams { intent }: UpdateParams) -> ExternResult ExternResult { Ok(handle_delete_intent(address)?) diff --git a/zomes/rea_intent/zome_idx_planning/Cargo.toml b/zomes/rea_intent/zome_idx_planning/Cargo.toml index 2a007dcbc..52969cd12 100644 --- a/zomes/rea_intent/zome_idx_planning/Cargo.toml +++ b/zomes/rea_intent/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_intent/zome_idx_planning/src/lib.rs b/zomes/rea_intent/zome_idx_planning/src/lib.rs index a67bf390e..bd74e1fdf 100644 --- a/zomes/rea_intent/zome_idx_planning/src/lib.rs +++ b/zomes/rea_intent/zome_idx_planning/src/lib.rs @@ -7,6 +7,19 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_intent_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + IntentAddress::entry_def(), + SatisfactionAddress::entry_def(), + ProcessAddress::entry_def(), + ProposedIntentAddress::entry_def(), + ])) +} + #[index_zome] struct Intent { satisfied_by: Local, diff --git a/zomes/rea_process/lib/src/lib.rs b/zomes/rea_process/lib/src/lib.rs index 855ff74be..9e7a62e6e 100644 --- a/zomes/rea_process/lib/src/lib.rs +++ b/zomes/rea_process/lib/src/lib.rs @@ -32,7 +32,7 @@ pub fn handle_create_process(entry_def_id: S, process: CreateRequest) -> Reco pub fn handle_get_process(entry_def_id: S, address: ProcessAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } @@ -44,17 +44,17 @@ pub fn handle_update_process(entry_def_id: S, process: UpdateRequest) -> Reco construct_response(&identity_address, &revision_id, &entry, get_link_fields(&identity_address)?) } -pub fn handle_delete_process(_entry_def_id: S, revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_process(_entry_def_id: S, revision_id: HeaderHash) -> RecordAPIResult { // load the record to ensure it is of the correct type let (_base_address, _entry) = read_record_entry_by_header::(&revision_id)?; - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &ProcessAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &ProcessAddress, revision_id: &HeaderHash, e: &EntryData, ( inputs, outputs, unplanned_economic_events, committed_inputs, committed_outputs, diff --git a/zomes/rea_process/rpc/src/lib.rs b/zomes/rea_process/rpc/src/lib.rs index 251ba78f5..164d7ffd5 100644 --- a/zomes/rea_process/rpc/src/lib.rs +++ b/zomes/rea_process/rpc/src/lib.rs @@ -8,9 +8,9 @@ use serde_maybe_undefined::{ default_false, }; pub use vf_attributes_hdk::{ - RevisionHash, + HeaderHash, ByHeader, ProcessAddress, - Timestamp, + DateTime, FixedOffset, ExternalURL, ProcessSpecificationAddress, PlanAddress, @@ -27,16 +27,16 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProcessAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] - pub has_beginning: Option, + pub has_beginning: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub has_end: Option, + pub has_end: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub before: Option, + pub before: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub after: Option, + pub after: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub classified_as: Option>, #[serde(skip_serializing_if = "Option::is_none")] @@ -104,13 +104,13 @@ pub struct ResponseData { pub struct CreateRequest { pub name: String, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] - pub before: MaybeUndefined, + pub before: MaybeUndefined>, #[serde(default)] - pub after: MaybeUndefined, + pub after: MaybeUndefined>, #[serde(default)] pub classified_as: MaybeUndefined>, #[serde(default)] @@ -135,17 +135,17 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] - pub before: MaybeUndefined, + pub before: MaybeUndefined>, #[serde(default)] - pub after: MaybeUndefined, + pub after: MaybeUndefined>, #[serde(default)] pub classified_as: MaybeUndefined>, #[serde(default)] @@ -161,7 +161,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_process/storage/src/lib.rs b/zomes/rea_process/storage/src/lib.rs index 823085c71..6d519ab20 100644 --- a/zomes/rea_process/storage/src/lib.rs +++ b/zomes/rea_process/storage/src/lib.rs @@ -16,7 +16,7 @@ use hdk_records::{ use vf_attributes_hdk::{ ProcessAddress, - Timestamp, + DateTime, FixedOffset, ExternalURL, ProcessSpecificationAddress, PlanAddress, @@ -42,10 +42,10 @@ pub struct ProcessZomeConfig { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] pub struct EntryData { pub name: String, - pub has_beginning: Option, - pub has_end: Option, - pub before: Option, - pub after: Option, + pub has_beginning: Option>, + pub has_end: Option>, + pub before: Option>, + pub after: Option>, pub classified_as: Option>, pub based_on: Option, pub planned_within: Option, diff --git a/zomes/rea_process/storage_consts/src/lib.rs b/zomes/rea_process/storage_consts/src/lib.rs index 5ceb2ef13..df9c6da2a 100644 --- a/zomes/rea_process/storage_consts/src/lib.rs +++ b/zomes/rea_process/storage_consts/src/lib.rs @@ -6,16 +6,3 @@ * @package Holo-REA */ pub const PROCESS_ENTRY_TYPE: &str = "vf_process"; -pub const PROCESS_EVENT_INPUTS_LINK_TAG: &str = "inputs"; -pub const PROCESS_EVENT_OUTPUTS_LINK_TAG: &str = "outputs"; -pub const PROCESS_COMMITMENT_INPUTS_LINK_TAG: &str = "committed_inputs"; -pub const PROCESS_COMMITMENT_OUTPUTS_LINK_TAG: &str = "committed_outputs"; -pub const PROCESS_INTENT_INPUTS_LINK_TAG: &str = "intended_inputs"; -pub const PROCESS_INTENT_OUTPUTS_LINK_TAG: &str = "intended_outputs"; - -pub const PROCESS_EVENT_INPUTS_READ_API_METHOD: &str = "_internal_read_process_inputs"; -pub const PROCESS_EVENT_OUTPUTS_READ_API_METHOD: &str = "_internal_read_process_outputs"; -pub const PROCESS_COMMITMENT_INPUTS_READ_API_METHOD: &str = "_internal_read_process_committed_inputs"; -pub const PROCESS_COMMITMENT_OUTPUTS_READ_API_METHOD: &str = "_internal_read_process_committed_outputs"; -pub const PROCESS_INTENT_INPUTS_READ_API_METHOD: &str = "_internal_read_process_intended_inputs"; -pub const PROCESS_INTENT_OUTPUTS_READ_API_METHOD: &str = "_internal_read_process_intended_outputs"; diff --git a/zomes/rea_process/zome/Cargo.toml b/zomes/rea_process/zome/Cargo.toml index 9f6f0b083..bae5424b4 100644 --- a/zomes/rea_process/zome/Cargo.toml +++ b/zomes/rea_process/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_records = { path = "../../../lib/hdk_records" } vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } diff --git a/zomes/rea_process/zome/src/lib.rs b/zomes/rea_process/zome/src/lib.rs index 0b383d277..9e752b0b3 100644 --- a/zomes/rea_process/zome/src/lib.rs +++ b/zomes/rea_process/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_process_rpc::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProcessAddress::entry_def(), EntryDef { id: PROCESS_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, @@ -57,12 +58,7 @@ fn update_process(UpdateParams { process }: UpdateParams) -> ExternResult ExternResult { +fn delete_process(ByHeader { address }: ByHeader) -> ExternResult { Ok(handle_delete_process(PROCESS_ENTRY_TYPE, address)?) } diff --git a/zomes/rea_process/zome_idx_observation/Cargo.toml b/zomes/rea_process/zome_idx_observation/Cargo.toml index 13fbb134d..e85f273fa 100644 --- a/zomes/rea_process/zome_idx_observation/Cargo.toml +++ b/zomes/rea_process/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_process/zome_idx_observation/src/lib.rs b/zomes/rea_process/zome_idx_observation/src/lib.rs index c401215cb..0fe4a7df0 100644 --- a/zomes/rea_process/zome_idx_observation/src/lib.rs +++ b/zomes/rea_process/zome_idx_observation/src/lib.rs @@ -7,6 +7,19 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_process_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + IntentAddress::entry_def(), + CommitmentAddress::entry_def(), + ProcessAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} + #[index_zome(query_fn_name="query_processes")] struct Process { inputs: Local, diff --git a/zomes/rea_process_specification/lib/src/lib.rs b/zomes/rea_process_specification/lib/src/lib.rs index 9e742b658..f038dd6b5 100644 --- a/zomes/rea_process_specification/lib/src/lib.rs +++ b/zomes/rea_process_specification/lib/src/lib.rs @@ -30,7 +30,7 @@ pub fn handle_create_process_specification(entry_def_id: S, process_specifica pub fn handle_get_process_specification(entry_def_id: S, address: ProcessSpecificationAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry)) } @@ -42,14 +42,14 @@ pub fn handle_update_process_specification(entry_def_id: S, process_specifica Ok(construct_response(&base_address, &revision_id, &new_entry)) } -pub fn handle_delete_process_specification(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_process_specification(revision_id: HeaderHash) -> RecordAPIResult { - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &ProcessSpecificationAddress, revision_id: &RevisionHash, e: &EntryData, + address: &ProcessSpecificationAddress, revision_id: &HeaderHash, e: &EntryData, ) -> ResponseData { ResponseData { process_specification: Response { diff --git a/zomes/rea_process_specification/rpc/src/lib.rs b/zomes/rea_process_specification/rpc/src/lib.rs index 114e4397c..cad3459a6 100644 --- a/zomes/rea_process_specification/rpc/src/lib.rs +++ b/zomes/rea_process_specification/rpc/src/lib.rs @@ -9,7 +9,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ProcessSpecificationAddress, }; @@ -32,7 +32,7 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProcessSpecificationAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -67,7 +67,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] @@ -75,7 +75,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_process_specification/storage_consts/src/lib.rs b/zomes/rea_process_specification/storage_consts/src/lib.rs index 30d6c6efd..bd123619a 100644 --- a/zomes/rea_process_specification/storage_consts/src/lib.rs +++ b/zomes/rea_process_specification/storage_consts/src/lib.rs @@ -6,5 +6,3 @@ * @package Holo-REA */ pub const PROCESS_SPECIFICATION_ENTRY_TYPE: &str = "vf_process_specification"; -pub const PROCESS_SPECIFICATION_BASE_ENTRY_TYPE: &str = "vf_process_specification_baseurl"; -pub const PROCESS_SPECIFICATION_INITIAL_ENTRY_LINK_TYPE: &str = "vf_process_specification_entry"; diff --git a/zomes/rea_process_specification/zome/Cargo.toml b/zomes/rea_process_specification/zome/Cargo.toml index 45b403314..721a2c215 100644 --- a/zomes/rea_process_specification/zome/Cargo.toml +++ b/zomes/rea_process_specification/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_process_specification_rpc = { path = "../rpc" } hc_zome_rea_process_specification_lib = { path = "../lib" } diff --git a/zomes/rea_process_specification/zome/src/lib.rs b/zomes/rea_process_specification/zome/src/lib.rs index 666465880..d519e153b 100644 --- a/zomes/rea_process_specification/zome/src/lib.rs +++ b/zomes/rea_process_specification/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_process_specification_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProcessSpecificationAddress::entry_def(), EntryDef { id: PROCESS_SPECIFICATION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_proposal/lib/src/lib.rs b/zomes/rea_proposal/lib/src/lib.rs index ee3df49fb..615b68494 100644 --- a/zomes/rea_proposal/lib/src/lib.rs +++ b/zomes/rea_proposal/lib/src/lib.rs @@ -31,7 +31,7 @@ pub fn handle_create_proposal(entry_def_id: S, proposal: CreateRequest) -> Re pub fn handle_get_proposal(entry_def_id: S, address: ProposalAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry, get_link_fields(&base_address)?)) } @@ -43,14 +43,14 @@ pub fn handle_update_proposal(entry_def_id: S, proposal: UpdateRequest) -> Re Ok(construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?)) } -pub fn handle_delete_proposal(address: RevisionHash) -> RecordAPIResult { - delete_record::(&address) +pub fn handle_delete_proposal(address: HeaderHash) -> RecordAPIResult { + delete_record::(&address) } /// Create response from input DHT primitives fn construct_response<'a>( address: &ProposalAddress, - revision_id: &RevisionHash, + revision_id: &HeaderHash, e: &EntryData, (publishes, published_to): ( Vec, diff --git a/zomes/rea_proposal/rpc/src/lib.rs b/zomes/rea_proposal/rpc/src/lib.rs index 11615b71f..fe7145678 100644 --- a/zomes/rea_proposal/rpc/src/lib.rs +++ b/zomes/rea_proposal/rpc/src/lib.rs @@ -9,8 +9,9 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, - ProposalAddress, ProposedIntentAddress, ProposedToAddress, Timestamp, + HeaderHash, ByAddress, ByHeader, + ProposalAddress, ProposedIntentAddress, ProposedToAddress, + DateTime, FixedOffset, }; /// Toplevel I/O structs for WASM API @@ -33,16 +34,16 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProposalAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub has_beginning: Option, + pub has_beginning: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub has_end: Option, + pub has_end: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub unit_based: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub created: Option, + pub created: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -76,13 +77,13 @@ pub struct CreateRequest { #[serde(default)] pub name: MaybeUndefined, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] pub unit_based: MaybeUndefined, #[serde(default)] - pub created: MaybeUndefined, + pub created: MaybeUndefined>, #[serde(default)] pub note: MaybeUndefined, #[serde(default)] @@ -100,13 +101,13 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] pub unit_based: MaybeUndefined, #[serde(default)] @@ -116,7 +117,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_proposal/storage/src/lib.rs b/zomes/rea_proposal/storage/src/lib.rs index 1daee20b7..58e6b4332 100644 --- a/zomes/rea_proposal/storage/src/lib.rs +++ b/zomes/rea_proposal/storage/src/lib.rs @@ -13,7 +13,7 @@ use hdk_records::{ generate_record_entry, }; -pub use vf_attributes_hdk::{ ProposalAddress, ProposedIntentAddress, ProposedToAddress, Timestamp }; +pub use vf_attributes_hdk::{ ProposalAddress, ProposedIntentAddress, ProposedToAddress, DateTime, FixedOffset }; use hc_zome_rea_proposal_rpc::{CreateRequest, UpdateRequest}; @@ -35,10 +35,10 @@ pub struct ProposalZomeConfig { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] pub struct EntryData { pub name: Option, - pub has_beginning: Option, - pub has_end: Option, + pub has_beginning: Option>, + pub has_end: Option>, pub unit_based: Option, - pub created: Option, + pub created: Option>, pub note: Option, pub in_scope_of: Option>, //[TODO]: diff --git a/zomes/rea_proposal/storage_consts/src/lib.rs b/zomes/rea_proposal/storage_consts/src/lib.rs index 09fbf555c..661fcf894 100644 --- a/zomes/rea_proposal/storage_consts/src/lib.rs +++ b/zomes/rea_proposal/storage_consts/src/lib.rs @@ -6,9 +6,3 @@ * @package Holo-REA */ pub const PROPOSAL_ENTRY_TYPE: &str = "vf_proposal"; - -pub const PROPOSAL_PUBLISHES_LINK_TAG: &str = "publishes"; -pub const PROPOSAL_PUBLISHED_TO_LINK_TAG: &str = "published_to"; - -pub const PROPOSAL_PUBLISHES_READ_API_METHOD: &str = "_internal_read_proposal_proposed_intents"; -pub const PROPOSAL_PUBLISHED_TO_READ_API_METHOD: &str = "_internal_read_proposal_participants"; diff --git a/zomes/rea_proposal/zome/Cargo.toml b/zomes/rea_proposal/zome/Cargo.toml index fd2faabb2..047b84b5a 100644 --- a/zomes/rea_proposal/zome/Cargo.toml +++ b/zomes/rea_proposal/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_proposal_rpc = { path = "../rpc" } hc_zome_rea_proposal_lib = { path = "../lib" } diff --git a/zomes/rea_proposal/zome/src/lib.rs b/zomes/rea_proposal/zome/src/lib.rs index 825a77388..f355d01ab 100644 --- a/zomes/rea_proposal/zome/src/lib.rs +++ b/zomes/rea_proposal/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_proposal_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProposalAddress::entry_def(), EntryDef { id: PROPOSAL_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_proposal/zome_idx_proposal/Cargo.toml b/zomes/rea_proposal/zome_idx_proposal/Cargo.toml index efe496f5e..553f3ed06 100644 --- a/zomes/rea_proposal/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposal/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_proposal/zome_idx_proposal/src/lib.rs b/zomes/rea_proposal/zome_idx_proposal/src/lib.rs index 9550449ae..9169641a6 100644 --- a/zomes/rea_proposal/zome_idx_proposal/src/lib.rs +++ b/zomes/rea_proposal/zome_idx_proposal/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_proposal_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProposalAddress::entry_def(), + ProposedIntentAddress::entry_def(), + ProposedToAddress::entry_def(), + ])) +} + #[index_zome] struct Proposal { publishes: Local, diff --git a/zomes/rea_proposed_intent/lib/src/lib.rs b/zomes/rea_proposed_intent/lib/src/lib.rs index 3f1ab41b6..7bb7388f9 100644 --- a/zomes/rea_proposed_intent/lib/src/lib.rs +++ b/zomes/rea_proposed_intent/lib/src/lib.rs @@ -21,14 +21,19 @@ use hdk_semantic_indexes_client_lib::*; use hc_zome_rea_proposed_intent_rpc::*; use hc_zome_rea_proposed_intent_storage::*; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_proposed_intent(entry_def_id: S, proposed_intent: CreateRequest) -> RecordAPIResult where S: AsRef, { let (revision_id, base_address, entry_resp): (_, ProposedIntentAddress, EntryData) = create_record(&entry_def_id, proposed_intent.to_owned())?; // handle link fields - create_index!(Local(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address)))?; - create_index!(Remote(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address)))?; + let r1 = create_index!(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address)); + hdk::prelude::debug!("handle_create_proposed_intent::published_in index {:?}", r1); + let r2 = create_index!(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address)); + hdk::prelude::debug!("handle_create_proposed_intent::publishes index {:?}", r2); Ok(construct_response(&base_address, &revision_id, &entry_resp)) } @@ -36,31 +41,33 @@ pub fn handle_create_proposed_intent(entry_def_id: S, proposed_intent: Create pub fn handle_get_proposed_intent(entry_def_id: S, address: ProposedIntentAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry)) } -pub fn handle_delete_proposed_intent(revision_id: &RevisionHash) -> RecordAPIResult +pub fn handle_delete_proposed_intent(revision_id: &HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; // Notify indexing zomes in local DNA (& validate). // Allows authors of indexing modules to intervene in the deletion of a record. - update_index!(Local(proposed_intent.published_in.not(&vec![entry.published_in]), proposal.publishes(&base_address)))?; + let r1 = update_index!(proposed_intent.published_in.not(&vec![entry.published_in]), proposal.publishes(&base_address)); + hdk::prelude::debug!("handle_delete_proposed_intent::published_in index {:?}", r1); // manage record deletion - let res = delete_record::(&revision_id); + let res = delete_record::(&revision_id); // Update in associated foreign DNAs as well. - // :TODO: In this pattern, foreign cells can also intervene in record deletion, and cause rollback. + // :TODO: If we caught errors here, foreign cells can also intervene in record deletion, and cause rollback. // Is this desirable? Should the behaviour be configurable? - update_index!(Remote(proposed_intent.publishes.not(&vec![entry.publishes]), intent.proposed_in(&base_address)))?; + let r2 = update_index!(proposed_intent.publishes.not(&vec![entry.publishes]), intent.proposed_in(&base_address)); + hdk::prelude::debug!("handle_delete_proposed_intent::publishes index {:?}", r2); res } /// Create response from input DHT primitives -fn construct_response<'a>(address: &ProposedIntentAddress, revision_id: &RevisionHash, e: &EntryData) -> ResponseData { +fn construct_response<'a>(address: &ProposedIntentAddress, revision_id: &HeaderHash, e: &EntryData) -> ResponseData { ResponseData { proposed_intent: Response { // entry fields @@ -83,3 +90,8 @@ fn read_proposed_intent_index_zome(conf: DnaConfigSlice) -> Option { fn read_proposal_index_zome(conf: DnaConfigSlice) -> Option { Some(conf.proposed_intent.proposal_index_zome) } + +/// Properties accessor for zome config. +fn read_intent_index_zome(conf: DnaConfigSlice) -> Option { + conf.proposed_intent.intent_index_zome +} diff --git a/zomes/rea_proposed_intent/rpc/src/lib.rs b/zomes/rea_proposed_intent/rpc/src/lib.rs index 44b94c474..ae4692736 100644 --- a/zomes/rea_proposed_intent/rpc/src/lib.rs +++ b/zomes/rea_proposed_intent/rpc/src/lib.rs @@ -8,7 +8,7 @@ */ use holochain_serialized_bytes::prelude::*; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ProposedIntentAddress, IntentAddress, ProposalAddress, }; @@ -19,7 +19,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProposedIntentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub reciprocal: bool, pub published_in: ProposalAddress, pub publishes: IntentAddress, diff --git a/zomes/rea_proposed_intent/storage/src/lib.rs b/zomes/rea_proposed_intent/storage/src/lib.rs index 65a183861..aaa5bf0eb 100644 --- a/zomes/rea_proposed_intent/storage/src/lib.rs +++ b/zomes/rea_proposed_intent/storage/src/lib.rs @@ -26,8 +26,9 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct ProposedIntentZomeConfig { - pub proposal_index_zome: String, pub index_zome: String, + pub proposal_index_zome: String, + pub intent_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_proposed_intent/storage_consts/src/lib.rs b/zomes/rea_proposed_intent/storage_consts/src/lib.rs index 422189d64..17ca2b867 100644 --- a/zomes/rea_proposed_intent/storage_consts/src/lib.rs +++ b/zomes/rea_proposed_intent/storage_consts/src/lib.rs @@ -6,16 +6,3 @@ * @package Holo-REA */ pub const PROPOSED_INTENT_ENTRY_TYPE: &str = "vf_proposed_intent"; - -pub const PROPOSED_INTENT_PUBLISHED_IN_LINK_TYPE: &str = "vf_proposed_intent_published_in"; -pub const PROPOSED_INTENT_PUBLISHED_IN_LINK_TAG: &str = "published_in"; - -pub const PROPOSED_INTENT_PUBLISHES_LINK_TYPE: &str = "vf_proposed_intent_publishes"; -pub const PROPOSED_INTENT_PUBLISHES_LINK_TAG: &str = "publishes"; - -pub const PROPOSED_INTENT_PROPOSAL_INDEXING_API_METHOD: &str = "_internal_reindex_proposals"; -pub const PROPOSAL_PROPOSED_INTENT_INDEXING_API_METHOD: &str = "_internal_reindex_proposed_intents"; - -pub const INTENT_PUBLISHEDIN_INDEXING_API_METHOD: &str = "index_intent_proposals"; - -pub const PROPOSED_INTENT_PROPOSES_INDEXING_API_METHOD: &str = "_internal_index_"; diff --git a/zomes/rea_proposed_intent/zome/Cargo.toml b/zomes/rea_proposed_intent/zome/Cargo.toml index 6931f1bb0..fe2d73eb5 100644 --- a/zomes/rea_proposed_intent/zome/Cargo.toml +++ b/zomes/rea_proposed_intent/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_proposed_intent_rpc = { path = "../rpc" } hc_zome_rea_proposed_intent_lib = { path = "../lib" } diff --git a/zomes/rea_proposed_intent/zome/src/lib.rs b/zomes/rea_proposed_intent/zome/src/lib.rs index b9e64a699..9f78cb0ad 100644 --- a/zomes/rea_proposed_intent/zome/src/lib.rs +++ b/zomes/rea_proposed_intent/zome/src/lib.rs @@ -17,6 +17,14 @@ use hc_zome_rea_proposed_intent_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProposedIntentAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: PROPOSED_INTENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml b/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml index 26a52c2a5..3625845cd 100644 --- a/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs b/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs index 96b81c2f1..15ed89743 100644 --- a/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs +++ b/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_proposed_intent_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProposedIntentAddress::entry_def(), + ProposalAddress::entry_def(), + IntentAddress::entry_def(), + ])) +} + #[index_zome] struct ProposedIntent { published_in: Local, diff --git a/zomes/rea_proposed_to/lib/Cargo.toml b/zomes/rea_proposed_to/lib/Cargo.toml index 27193c2b6..fbcf52d97 100644 --- a/zomes/rea_proposed_to/lib/Cargo.toml +++ b/zomes/rea_proposed_to/lib/Cargo.toml @@ -6,6 +6,10 @@ edition = "2018" [dependencies] paste = "1.0" +# :TODO: remove if removing debug logging +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_proposed_to_storage = { path = "../storage" } diff --git a/zomes/rea_proposed_to/lib/src/lib.rs b/zomes/rea_proposed_to/lib/src/lib.rs index 5aebf17bf..963aea06c 100644 --- a/zomes/rea_proposed_to/lib/src/lib.rs +++ b/zomes/rea_proposed_to/lib/src/lib.rs @@ -27,7 +27,8 @@ pub fn handle_create_proposed_to(entry_def_id: S, proposed_to: CreateRequest) let (revision_id, base_address, entry_resp): (_, ProposedToAddress, EntryData) = create_record(&entry_def_id, proposed_to.to_owned())?; // handle link fields - create_index!(Local(proposed_to.proposed(&proposed_to.proposed), proposal.proposed_to(&base_address)))?; + let r1 = create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.published_to(&base_address)); + hdk::prelude::debug!("handle_create_proposed_to::proposed index {:?}", r1); // :TODO: create index for retrieving all proposals for an agent @@ -37,21 +38,22 @@ pub fn handle_create_proposed_to(entry_def_id: S, proposed_to: CreateRequest) pub fn handle_get_proposed_to(entry_def_id: S, address: ProposedToAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry)) } -pub fn handle_delete_proposed_to(revision_id: &RevisionHash) -> RecordAPIResult +pub fn handle_delete_proposed_to(revision_id: &HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; - update_index!(Local(proposed_to.proposed.not(&vec![entry.proposed]), proposal.proposed_to(&base_address)))?; + let e = update_index!(proposed_to.proposed.not(&vec![entry.proposed]), proposal.published_to(&base_address)); + hdk::prelude::debug!("handle_delete_proposed_to::proposed index {:?}", e); - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives -fn construct_response<'a>(address: &ProposedToAddress, revision_id: &RevisionHash, e: &EntryData) -> ResponseData { +fn construct_response<'a>(address: &ProposedToAddress, revision_id: &HeaderHash, e: &EntryData) -> ResponseData { ResponseData { proposed_to: Response { id: address.to_owned(), diff --git a/zomes/rea_proposed_to/rpc/src/lib.rs b/zomes/rea_proposed_to/rpc/src/lib.rs index 8d49f1c62..bfb504b84 100644 --- a/zomes/rea_proposed_to/rpc/src/lib.rs +++ b/zomes/rea_proposed_to/rpc/src/lib.rs @@ -8,7 +8,7 @@ */ use holochain_serialized_bytes::prelude::*; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ProposedToAddress, AgentAddress, ProposalAddress, }; @@ -27,7 +27,7 @@ pub struct CreateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProposedToAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub proposed_to: AgentAddress, pub proposed: ProposalAddress, } diff --git a/zomes/rea_proposed_to/storage_consts/src/lib.rs b/zomes/rea_proposed_to/storage_consts/src/lib.rs index d365475ac..6bf8fa893 100644 --- a/zomes/rea_proposed_to/storage_consts/src/lib.rs +++ b/zomes/rea_proposed_to/storage_consts/src/lib.rs @@ -6,9 +6,3 @@ * @package Holo-REA */ pub const PROPOSED_TO_ENTRY_TYPE: &str = "vf_proposed_to"; - -pub const PROPOSED_TO_PROPOSED_LINK_TAG: &str = "proposed"; -pub const PROPOSED_TO_PROPOSED_TO_LINK_TAG: &str = "proposed_to"; - -pub const PROPOSED_TO_PROPOSAL_INDEXING_API_METHOD: &str = "_internal_reindex_proposals"; -pub const PROPOSAL_PROPOSED_TO_INDEXING_API_METHOD: &str = "_internal_reindex_proposed_to"; diff --git a/zomes/rea_proposed_to/zome/Cargo.toml b/zomes/rea_proposed_to/zome/Cargo.toml index 8e7e85b55..1ab23f1c6 100644 --- a/zomes/rea_proposed_to/zome/Cargo.toml +++ b/zomes/rea_proposed_to/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_proposed_to_rpc = { path = "../rpc" } hc_zome_rea_proposed_to_lib = { path = "../lib" } diff --git a/zomes/rea_proposed_to/zome/src/lib.rs b/zomes/rea_proposed_to/zome/src/lib.rs index 89f2373e1..4f4ddf19e 100644 --- a/zomes/rea_proposed_to/zome/src/lib.rs +++ b/zomes/rea_proposed_to/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_proposed_to_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProposedToAddress::entry_def(), EntryDef { id: PROPOSED_TO_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml b/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml index 9bda9b5d2..86f0307cc 100644 --- a/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs b/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs index 09fbe8398..1cd87af5d 100644 --- a/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs +++ b/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_proposed_to_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProposedToAddress::entry_def(), + ProposalAddress::entry_def(), + // AgentAddress::entry_def(), // :TODO: + ])) +} + #[index_zome] struct ProposedTo { proposed: Local, diff --git a/zomes/rea_resource_specification/lib/src/lib.rs b/zomes/rea_resource_specification/lib/src/lib.rs index 9fa763523..a27fce341 100644 --- a/zomes/rea_resource_specification/lib/src/lib.rs +++ b/zomes/rea_resource_specification/lib/src/lib.rs @@ -34,7 +34,7 @@ pub fn handle_create_resource_specification(entry_def_id: S, resource_specifi pub fn handle_get_resource_specification(entry_def_id: S, address: ResourceSpecificationAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&address, &revision, &entry, get_link_fields(&base_address)?)) } @@ -46,15 +46,15 @@ pub fn handle_update_resource_specification(entry_def_id: S, resource_specifi Ok(construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?)) } -pub fn handle_delete_resource_specification(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_resource_specification(revision_id: HeaderHash) -> RecordAPIResult { - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( address: &ResourceSpecificationAddress, - revision_id: &RevisionHash, + revision_id: &HeaderHash, e: &EntryData, // :TODO: link conforming resources in associated link registry DNA module ( diff --git a/zomes/rea_resource_specification/rpc/src/lib.rs b/zomes/rea_resource_specification/rpc/src/lib.rs index 042392406..2da8e8b38 100644 --- a/zomes/rea_resource_specification/rpc/src/lib.rs +++ b/zomes/rea_resource_specification/rpc/src/lib.rs @@ -9,7 +9,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ResourceSpecificationAddress, EconomicResourceAddress, ExternalURL, @@ -36,7 +36,7 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ResourceSpecificationAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub image: Option, @@ -83,7 +83,7 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] @@ -95,7 +95,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_resource_specification/storage_consts/src/lib.rs b/zomes/rea_resource_specification/storage_consts/src/lib.rs index e3562f009..72474370a 100644 --- a/zomes/rea_resource_specification/storage_consts/src/lib.rs +++ b/zomes/rea_resource_specification/storage_consts/src/lib.rs @@ -6,7 +6,3 @@ * @package Holo-REA */ pub const ECONOMIC_RESOURCE_SPECIFICATION_ENTRY_TYPE: &str = "vf_resource_specification"; -pub const ECONOMIC_RESOURCE_SPECIFICATION_BASE_ENTRY_TYPE: &str = "vf_resource_specification_baseurl"; -pub const ECONOMIC_RESOURCE_SPECIFICATION_INITIAL_ENTRY_LINK_TYPE: &str = "vf_resource_specification_entry"; -pub const RESOURCE_SPECIFICATION_CONFORMING_RESOURCE_LINK_TYPE: &str = "vf_resource_specification_conforming_resource"; -pub const RESOURCE_SPECIFICATION_CONFORMING_RESOURCE_LINK_TAG: &str = "conforming_resource"; diff --git a/zomes/rea_resource_specification/zome/Cargo.toml b/zomes/rea_resource_specification/zome/Cargo.toml index 64ceb5c0e..e4b6d9368 100644 --- a/zomes/rea_resource_specification/zome/Cargo.toml +++ b/zomes/rea_resource_specification/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_resource_specification_rpc = { path = "../rpc" } hc_zome_rea_resource_specification_lib = { path = "../lib" } diff --git a/zomes/rea_resource_specification/zome/src/lib.rs b/zomes/rea_resource_specification/zome/src/lib.rs index f5ea89ce9..1b56a63dc 100644 --- a/zomes/rea_resource_specification/zome/src/lib.rs +++ b/zomes/rea_resource_specification/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_resource_specification_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ResourceSpecificationAddress::entry_def(), EntryDef { id: ECONOMIC_RESOURCE_SPECIFICATION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml b/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml index 1c36b0260..57934e730 100644 --- a/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml +++ b/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/lib/src/lib.rs b/zomes/rea_satisfaction/lib/src/lib.rs index b44cb50f4..dd0cdfdec 100644 --- a/zomes/rea_satisfaction/lib/src/lib.rs +++ b/zomes/rea_satisfaction/lib/src/lib.rs @@ -10,12 +10,12 @@ * @package Holo-REA */ use hdk_records::RecordAPIResult; -use vf_attributes_hdk::{RevisionHash, SatisfactionAddress}; +use vf_attributes_hdk::{HeaderHash, SatisfactionAddress}; use hc_zome_rea_satisfaction_storage::EntryData; use hc_zome_rea_satisfaction_rpc::*; /// Create response from input DHT primitives -pub fn construct_response(address: &SatisfactionAddress, revision_id: &RevisionHash, e: &EntryData) -> RecordAPIResult { +pub fn construct_response(address: &SatisfactionAddress, revision_id: &HeaderHash, e: &EntryData) -> RecordAPIResult { Ok(ResponseData { satisfaction: Response { id: address.to_owned().into(), diff --git a/zomes/rea_satisfaction/lib_destination/Cargo.toml b/zomes/rea_satisfaction/lib_destination/Cargo.toml index bb784e84d..48a5ae5b2 100644 --- a/zomes/rea_satisfaction/lib_destination/Cargo.toml +++ b/zomes/rea_satisfaction/lib_destination/Cargo.toml @@ -6,6 +6,10 @@ edition = "2018" [dependencies] paste = "1.0" +# :TODO: remove if removing debug logging +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_satisfaction_storage = { path = "../storage" } diff --git a/zomes/rea_satisfaction/lib_destination/src/lib.rs b/zomes/rea_satisfaction/lib_destination/src/lib.rs index 89d033ca7..a9a6fd425 100644 --- a/zomes/rea_satisfaction/lib_destination/src/lib.rs +++ b/zomes/rea_satisfaction/lib_destination/src/lib.rs @@ -26,13 +26,17 @@ use hc_zome_rea_satisfaction_storage::*; use hc_zome_rea_satisfaction_rpc::*; use hc_zome_rea_satisfaction_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateRequest) -> RecordAPIResult where S: AsRef { let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - create_index!(Local(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address)))?; + let r1 = create_index!(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address)); + hdk::prelude::debug!("handle_create_satisfaction::satisfied_by index (destination) {:?}", r1); // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs @@ -42,7 +46,7 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques pub fn handle_get_satisfaction(entry_def_id: S, address: SatisfactionAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } @@ -52,26 +56,28 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques let (revision_id, base_address, new_entry, prev_entry): (_, SatisfactionAddress, EntryData, EntryData) = update_record(&entry_def_id, &satisfaction.get_revision_id(), satisfaction.to_owned())?; if new_entry.satisfied_by != prev_entry.satisfied_by { - update_index!(Local( + let e = update_index!( satisfaction .satisfied_by(&vec![new_entry.satisfied_by.to_owned()]) .not(&vec![prev_entry.satisfied_by]), economic_event.satisfies(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (destination) {:?}", e); } construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_satisfaction(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult { // read any referencing indexes let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; // handle link fields - update_index!(Local(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), economic_event.satisfies(&base_address)))?; + let e = update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), economic_event.satisfies(&base_address)); + hdk::prelude::debug!("handle_delete_satisfaction::satisfied_by index (destination) {:?}", e); - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Properties accessor for zome config. diff --git a/zomes/rea_satisfaction/lib_origin/src/lib.rs b/zomes/rea_satisfaction/lib_origin/src/lib.rs index b3b6ba184..aa93af95c 100644 --- a/zomes/rea_satisfaction/lib_origin/src/lib.rs +++ b/zomes/rea_satisfaction/lib_origin/src/lib.rs @@ -13,7 +13,7 @@ use paste::paste; use hdk::prelude::*; use crate::holo_hash::DnaHash; use hdk_records::{ - RecordAPIResult, + RecordAPIResult, OtherCellResult, records::{ create_record, read_record_entry, @@ -30,27 +30,35 @@ use hc_zome_rea_satisfaction_storage::*; use hc_zome_rea_satisfaction_rpc::*; use hc_zome_rea_satisfaction_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateRequest) -> RecordAPIResult where S: AsRef { let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - create_index!(Local(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address)))?; + let r1 = create_index!(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address)); + hdk::prelude::debug!("handle_create_satisfaction::satisfies index (origin) {:?}", r1); // link entries which may be local or remote let event_or_commitment = satisfaction.get_satisfied_by(); if is_satisfiedby_local_commitment(event_or_commitment)? { - // links to local commitment, create link index pair - create_index!(Local(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address)))?; + // links to local commitment, create link index pair + let r2 = create_index!(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address)); + hdk::prelude::debug!("handle_create_satisfaction::satisfied_by index (origin) {:?}", r2); } else { - // links to remote event, ping associated foreign DNA & fail if there's an error - // :TODO: consider the implications of this in loosely coordinated multi-network spaces - call_zome_method( - event_or_commitment, - &REPLICATE_CREATE_API_METHOD, - CreateParams { satisfaction: satisfaction.to_owned() }, - )?; + // links to remote event, ping associated foreign DNA & fail if there's an error + // :TODO: consider the implications of this in loosely coordinated multi-network spaces + // we assign a type to the response so that call_zome_method can + // effectively deserialize the response without failing + let result: OtherCellResult = call_zome_method( + event_or_commitment, + &REPLICATE_CREATE_API_METHOD, + CreateParams { satisfaction: satisfaction.to_owned() }, + ); + hdk::prelude::debug!("handle_create_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_CREATE_API_METHOD, result); } construct_response(&satisfaction_address, &revision_id, &entry_resp) @@ -59,7 +67,7 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques pub fn handle_get_satisfaction(entry_def_id: S, address: SatisfactionAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } @@ -70,12 +78,13 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques // update intent indexes in local DNA if new_entry.satisfies != prev_entry.satisfies { - update_index!(Local( + let e = update_index!( satisfaction .satisfies(&vec![new_entry.satisfies.to_owned()]) .not(&vec![prev_entry.satisfies]), intent.satisfied_by(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_satisfaction::satisfies index (origin) {:?}", e); } // update commitment / event indexes in local and/or remote DNA @@ -87,39 +96,44 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques if same_dna { if is_satisfiedby_local_commitment(&prev_entry.satisfied_by)? { // both values were local, update the index directly - update_index!(Local( + let e = update_index!( satisfaction .satisfied_by(&vec![new_entry.satisfied_by.to_owned()]) .not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address) - ))?; + ); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (origin) {:?}", e); } else { // both values were remote and in the same DNA, forward the update - call_zome_method( + let result: OtherCellResult = call_zome_method( &prev_entry.satisfied_by, &REPLICATE_UPDATE_API_METHOD, UpdateParams { satisfaction: satisfaction.to_owned() }, - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_UPDATE_API_METHOD, result); } } else { if is_satisfiedby_local_commitment(&prev_entry.satisfied_by)? { // previous value was local, clear the index directly - update_index!(Local(satisfaction.satisfied_by.not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address)))?; + let e = update_index!(satisfaction.satisfied_by.not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address)); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (origin) {:?}", e); } else { // previous value was remote, handle the remote update as a deletion - call_zome_method( + let result: OtherCellResult = call_zome_method( &prev_entry.satisfied_by, &REPLICATE_DELETE_API_METHOD, ByHeader { address: satisfaction.get_revision_id().to_owned() }, - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_DELETE_API_METHOD, result); } if is_satisfiedby_local_commitment(&new_entry.satisfied_by)? { // new value was local, add the index directly - update_index!(Local(satisfaction.satisfied_by(&vec![new_entry.satisfied_by.to_owned()]), commitment.satisfies(&base_address)))?; + let e = update_index!(satisfaction.satisfied_by(&vec![new_entry.satisfied_by.to_owned()]), commitment.satisfies(&base_address)); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (origin) {:?}", e); } else { // new value was remote, handle the remote update as a creation - call_zome_method( + let result: OtherCellResult = call_zome_method( &new_entry.satisfied_by, &REPLICATE_CREATE_API_METHOD, CreateParams { satisfaction: CreateRequest { @@ -129,7 +143,8 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques effort_quantity: new_entry.effort_quantity.to_owned().into(), note: new_entry.note.to_owned().into(), } }, - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_CREATE_API_METHOD, result); } } @@ -139,28 +154,31 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_satisfaction(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; // update intent indexes in local DNA - update_index!(Local(satisfaction.satisfies.not(&vec![entry.satisfies]), intent.satisfied_by(&base_address)))?; + let e = update_index!(satisfaction.satisfies.not(&vec![entry.satisfies]), intent.satisfied_by(&base_address)); + hdk::prelude::debug!("handle_delete_satisfaction::satisfies index (origin) {:?}", e); // update commitment & event indexes in local or remote DNAs let event_or_commitment = entry.satisfied_by.to_owned(); if is_satisfiedby_local_commitment(&event_or_commitment)? { - update_index!(Local(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), commitment.satisfies(&base_address)))?; + let e = update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), commitment.satisfies(&base_address)); + hdk::prelude::debug!("handle_delete_satisfaction::satisfied_by index (origin) {:?}", e); } else { // links to remote event, ping associated foreign DNA & fail if there's an error // :TODO: consider the implications of this in loosely coordinated multi-network spaces - call_zome_method( + let result: OtherCellResult = call_zome_method( &event_or_commitment, &REPLICATE_DELETE_API_METHOD, ByHeader { address: revision_id.to_owned() }, - )?; + ); + hdk::prelude::debug!("handle_delete_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_DELETE_API_METHOD, result); } - delete_record::(&revision_id) + delete_record::(&revision_id) } fn is_satisfiedby_local_commitment(event_or_commitment: &EventOrCommitmentAddress) -> RecordAPIResult { diff --git a/zomes/rea_satisfaction/rpc/src/lib.rs b/zomes/rea_satisfaction/rpc/src/lib.rs index 602a5ece8..d40347342 100644 --- a/zomes/rea_satisfaction/rpc/src/lib.rs +++ b/zomes/rea_satisfaction/rpc/src/lib.rs @@ -11,7 +11,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined}; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ByHeader, ByAddress, + HeaderHash, ByHeader, ByAddress, SatisfactionAddress, EventOrCommitmentAddress, EconomicEventAddress, @@ -39,7 +39,7 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: SatisfactionAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub satisfied_by: EventOrCommitmentAddress, pub satisfies: IntentAddress, #[serde(skip_serializing_if = "Option::is_none")] @@ -99,7 +99,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] #[serde(skip_serializing_if = "MaybeUndefined::is_undefined")] pub satisfied_by: MaybeUndefined, // note this setup allows None to be passed but `update_with` ignores it @@ -118,7 +118,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_satisfaction/storage_consts/src/lib.rs b/zomes/rea_satisfaction/storage_consts/src/lib.rs index cf96092bd..0b8695a4f 100644 --- a/zomes/rea_satisfaction/storage_consts/src/lib.rs +++ b/zomes/rea_satisfaction/storage_consts/src/lib.rs @@ -5,20 +5,8 @@ * * @package Holo-REA */ -pub const SATISFACTION_BASE_ENTRY_TYPE: &str = "vf_satisfaction_baseurl"; -pub const SATISFACTION_INITIAL_ENTRY_LINK_TYPE: &str = "vf_satisfaction_entry"; pub const SATISFACTION_ENTRY_TYPE: &str = "vf_satisfaction"; -pub const SATISFACTION_SATISFIES_LINK_TYPE: &str = "vf_satisfaction_satisfies"; -pub const SATISFACTION_SATISFIES_LINK_TAG: &str = "satisfies"; -pub const SATISFACTION_SATISFIEDBY_LINK_TYPE: &str = "vf_satisfaction_satisfied_by"; -pub const SATISFACTION_SATISFIEDBY_LINK_TAG: &str = "satisfied_by"; pub const REPLICATE_CREATE_API_METHOD: &str = "create_satisfaction"; pub const REPLICATE_UPDATE_API_METHOD: &str = "update_satisfaction"; pub const REPLICATE_DELETE_API_METHOD: &str = "delete_satisfaction"; -pub const CHECK_COMMITMENT_API_METHOD: &str = "get_commitment"; -pub const INTENT_INDEXING_API_METHOD: &str = "_internal_reindex_satisfactions"; -pub const COMMITMENT_INDEXING_API_METHOD: &str = "_internal_reindex_satisfactions"; -pub const EVENT_INDEXING_API_METHOD: &str = "_internal_reindex_satisfactions"; -pub const SATISFACTION_SATISFIEDBY_INDEXING_API_METHOD: &str = "_internal_reindex_satisfiedby"; // :NOTE: same in both observation and planning zome APIs -pub const SATISFACTION_SATISFIES_INDEXING_API_METHOD: &str = "_internal_reindex_intents"; diff --git a/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml b/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml index c767550f9..5a4f6dcd0 100644 --- a/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml +++ b/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs b/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs index 8f46163fb..b6f389ce6 100644 --- a/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs +++ b/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs @@ -8,6 +8,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_satisfaction_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + EconomicEventAddress::entry_def(), + SatisfactionAddress::entry_def(), + ])) +} + #[index_zome] struct Satisfaction { // :NOTE: this gets updated by shadowed local record storage zome, not the remote one in Planning DNA diff --git a/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml b/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml index 99b509501..70ca3e229 100644 --- a/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml +++ b/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs b/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs index 3b3daf014..dea1a0128 100644 --- a/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs +++ b/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_satisfaction_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + IntentAddress::entry_def(), + SatisfactionAddress::entry_def(), + CommitmentAddress::entry_def(), + ])) +} + #[index_zome] struct Satisfaction { satisfies: Local, diff --git a/zomes/rea_satisfaction/zome_observation/Cargo.toml b/zomes/rea_satisfaction/zome_observation/Cargo.toml index 61683a5a4..98c2d1e06 100644 --- a/zomes/rea_satisfaction/zome_observation/Cargo.toml +++ b/zomes/rea_satisfaction/zome_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_satisfaction_lib_destination = { path = "../lib_destination" } hc_zome_rea_satisfaction_rpc = { path = "../rpc" } diff --git a/zomes/rea_satisfaction/zome_observation/src/lib.rs b/zomes/rea_satisfaction/zome_observation/src/lib.rs index cd72e57c0..65ea63e4b 100644 --- a/zomes/rea_satisfaction/zome_observation/src/lib.rs +++ b/zomes/rea_satisfaction/zome_observation/src/lib.rs @@ -19,29 +19,45 @@ use hc_zome_rea_satisfaction_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + SatisfactionAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: SATISFACTION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, crdt_type: CrdtType, required_validations: 1.into(), required_validation_type: RequiredValidationType::default(), - } + }, ])) } #[hdk_extern] fn satisfaction_created(CreateParams { satisfaction }: CreateParams) -> ExternResult { - Ok(handle_create_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_create_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] -fn get_satisfaction(ByAddress { address }: ByAddress) -> ExternResult { +fn get_satisfaction( + ByAddress { address }: ByAddress, +) -> ExternResult { Ok(handle_get_satisfaction(SATISFACTION_ENTRY_TYPE, address)?) } #[hdk_extern] fn satisfaction_updated(UpdateParams { satisfaction }: UpdateParams) -> ExternResult { - Ok(handle_update_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_update_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] diff --git a/zomes/rea_satisfaction/zome_planning/Cargo.toml b/zomes/rea_satisfaction/zome_planning/Cargo.toml index 55f532cc3..0d77b9a6c 100644 --- a/zomes/rea_satisfaction/zome_planning/Cargo.toml +++ b/zomes/rea_satisfaction/zome_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_satisfaction_rpc = { path = "../rpc" } hc_zome_rea_satisfaction_lib_origin = { path = "../lib_origin" } diff --git a/zomes/rea_satisfaction/zome_planning/src/lib.rs b/zomes/rea_satisfaction/zome_planning/src/lib.rs index e4d354f6e..370a538ce 100644 --- a/zomes/rea_satisfaction/zome_planning/src/lib.rs +++ b/zomes/rea_satisfaction/zome_planning/src/lib.rs @@ -17,29 +17,45 @@ use hc_zome_rea_satisfaction_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + SatisfactionAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: SATISFACTION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, crdt_type: CrdtType, required_validations: 1.into(), required_validation_type: RequiredValidationType::default(), - } + }, ])) } #[hdk_extern] fn create_satisfaction(CreateParams { satisfaction }: CreateParams) -> ExternResult { - Ok(handle_create_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_create_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] -fn get_satisfaction(ByAddress { address }: ByAddress) -> ExternResult { +fn get_satisfaction( + ByAddress { address }: ByAddress, +) -> ExternResult { Ok(handle_get_satisfaction(SATISFACTION_ENTRY_TYPE, address)?) } #[hdk_extern] fn update_satisfaction(UpdateParams { satisfaction }: UpdateParams) -> ExternResult { - Ok(handle_update_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_update_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] diff --git a/zomes/rea_unit/lib/src/lib.rs b/zomes/rea_unit/lib/src/lib.rs index 87510a772..ba757df52 100644 --- a/zomes/rea_unit/lib/src/lib.rs +++ b/zomes/rea_unit/lib/src/lib.rs @@ -47,12 +47,12 @@ pub fn handle_update_unit(entry_def_id: S, unit: UpdateRequest) -> RecordAPIR Ok(construct_response(&new_id, &new_revision, &new_entry)) } -pub fn handle_delete_unit(revision_id: RevisionHash) -> RecordAPIResult { - delete_anchored_record::(&revision_id) +pub fn handle_delete_unit(revision_id: HeaderHash) -> RecordAPIResult { + delete_anchored_record::(&revision_id) } fn construct_response<'a>( - id: &UnitId, revision_id: &RevisionHash, e: &EntryData + id: &UnitId, revision_id: &HeaderHash, e: &EntryData ) -> ResponseData { ResponseData { unit: Response { diff --git a/zomes/rea_unit/rpc/src/lib.rs b/zomes/rea_unit/rpc/src/lib.rs index 7b46b87de..738f3286f 100644 --- a/zomes/rea_unit/rpc/src/lib.rs +++ b/zomes/rea_unit/rpc/src/lib.rs @@ -17,7 +17,7 @@ use hdk_records::{ // Export external type interface to allow consuming zomes to easily import & define zome API pub use vf_attributes_hdk::{ - RevisionHash, + HeaderHash, UnitId, }; @@ -27,7 +27,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: UnitId, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub label: String, pub symbol: String, } @@ -72,13 +72,13 @@ impl UniquelyIdentifiable for CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub label: MaybeUndefined, pub symbol: MaybeUndefined, } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_unit/zome/Cargo.toml b/zomes/rea_unit/zome/Cargo.toml index 12ab79818..3ea110a86 100644 --- a/zomes/rea_unit/zome/Cargo.toml +++ b/zomes/rea_unit/zome/Cargo.toml @@ -7,10 +7,11 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.124" hc_zome_rea_unit_rpc = { path = "../rpc" } hc_zome_rea_unit_lib = { path = "../lib" } +vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } [lib] path = "src/lib.rs" diff --git a/zomes/rea_unit/zome/src/lib.rs b/zomes/rea_unit/zome/src/lib.rs index b72c10d25..c01a8a39e 100644 --- a/zomes/rea_unit/zome/src/lib.rs +++ b/zomes/rea_unit/zome/src/lib.rs @@ -11,11 +11,13 @@ use hdk::prelude::*; use hc_zome_rea_unit_rpc::*; use hc_zome_rea_unit_lib::*; +use vf_attributes_hdk::UnitInternalAddress; #[hdk_extern] fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + UnitInternalAddress::entry_def(), EntryDef { id: UNIT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, @@ -43,7 +45,6 @@ struct ById { #[hdk_extern] fn get_unit(ById { id }: ById) -> ExternResult { - debug!("READ UNIT {:?}", id); Ok(handle_get_unit(UNIT_ENTRY_TYPE, id)?) }