From c6645641feb8e6b8ebc942c5738c7604ab713889 Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 22 Feb 2022 20:00:33 +1000 Subject: [PATCH 001/181] remove RevisionHash and change all record helper methods to accept raw HeaderHash in preparation for indexing logic changes DnaAddressable now must be directly storable as an Entry in order for fully-qualified IDs to be encoded in the DHT. This means conversion traits to/from hdk::prelude::Entry must be created, and the inner type of DnaAddressable cannot be ambiguous since this would make implementing such conversions impossible. This makes sense and is a good thing, because it enforces that only tuples of (DnaHash, EntryHash) can be used as identifiers. Implementors should not be linking to revisions but rather records. --- .../src/anchored_record_helpers.rs | 17 ++++------ lib/hdk_records/src/entry_helpers.rs | 30 ++++++++--------- lib/hdk_records/src/lib.rs | 2 +- lib/hdk_records/src/local_index_helpers.rs | 4 +-- lib/hdk_records/src/record_helpers.rs | 33 +++++++++---------- lib/hdk_semantic_indexes/rpc/src/lib.rs | 4 +-- 6 files changed, 41 insertions(+), 49 deletions(-) diff --git a/lib/hdk_records/src/anchored_record_helpers.rs b/lib/hdk_records/src/anchored_record_helpers.rs index dea7d3013..3bb85f66a 100644 --- a/lib/hdk_records/src/anchored_record_helpers.rs +++ b/lib/hdk_records/src/anchored_record_helpers.rs @@ -12,7 +12,6 @@ use hdk::prelude::*; use hdk::hash_path::path::Component; use hdk_type_serialization_macros::{ - RevisionHash, DnaAddressable, DnaIdentifiable, }; @@ -112,7 +111,7 @@ fn read_anchor_identity( pub fn read_anchored_record_entry( entry_type_root_path: &S, id_string: I, -) -> RecordAPIResult<(RevisionHash, A, T)> +) -> RecordAPIResult<(HeaderHash, A, T)> where S: AsRef, I: AsRef, T: std::fmt::Debug, @@ -138,7 +137,7 @@ pub fn read_anchored_record_entry( pub fn create_anchored_record( entry_def_id: &S, create_payload: C, -) -> RecordAPIResult<(RevisionHash, A, I)> +) -> RecordAPIResult<(HeaderHash, A, I)> where S: AsRef, B: DnaAddressable, A: DnaIdentifiable, @@ -176,9 +175,9 @@ pub fn create_anchored_record( /// pub fn update_anchored_record( entry_def_id: &S, - revision_id: &RevisionHash, + revision_id: &HeaderHash, update_payload: U, -) -> RecordAPIResult<(RevisionHash, B, I, I)> +) -> RecordAPIResult<(HeaderHash, B, I, I)> where S: AsRef, A: DnaAddressable, B: DnaIdentifiable, @@ -219,8 +218,7 @@ pub fn update_anchored_record( return Err(DataIntegrityError::IndexNotFound(identity_hash.to_owned())); } let old_link = addrs.pop().unwrap(); - let old_link_id = get_latest_header_hash(old_link)?; - let old_link_hash: &HeaderHash = old_link_id.as_ref(); + let old_link_hash = get_latest_header_hash(old_link)?; delete_link(old_link_hash.to_owned())?; // create the new identifier and link to it @@ -250,10 +248,9 @@ pub fn update_anchored_record( /// :TODO: This is a stub- include any logic necessary to handle cleanup of associated links. /// Not clearing old anchors may cause issues upon subsequent reinsert, which is not yet tested. /// -pub fn delete_anchored_record(address: &A) -> RecordAPIResult +pub fn delete_anchored_record(address: &HeaderHash) -> RecordAPIResult where SerializedBytes: TryInto, - A: AsRef, { - delete_entry::(address)?; + delete_entry::(address)?; Ok(true) } diff --git a/lib/hdk_records/src/entry_helpers.rs b/lib/hdk_records/src/entry_helpers.rs index a3e3e3e8d..860f88b62 100644 --- a/lib/hdk_records/src/entry_helpers.rs +++ b/lib/hdk_records/src/entry_helpers.rs @@ -16,9 +16,8 @@ use hdk::prelude::{ update as hdk_update, delete_entry as hdk_delete_entry, }; -use hdk::info::dna_info; -use crate::{RevisionHash, RecordAPIResult, DataIntegrityError}; +use crate::{HeaderHash, RecordAPIResult, DataIntegrityError}; /// Helper to handle retrieving linked element entry from an element /// @@ -59,12 +58,11 @@ pub (crate) fn get_entry_by_address(address: &EntryHash) -> RecordAPIResult(address: &I) -> RecordAPIResult +pub (crate) fn get_entry_by_header(address: &HeaderHash) -> RecordAPIResult where SerializedBytes: TryInto, - I: AsRef, { // :DUPE: identical to above, only type signature differs - let maybe_result = get(address.as_ref().clone(), GetOptions { strategy: GetStrategy::Latest }); + let maybe_result = get(address.clone(), GetOptions { strategy: GetStrategy::Latest }); match maybe_result { Err(_) => return Err(DataIntegrityError::EntryNotFound), _ => (), @@ -93,7 +91,7 @@ pub (crate) fn get_entry_by_header(address: &I) -> RecordAPIResult pub fn create_entry>( entry_def_id: S, entry_struct: I, -) -> RecordAPIResult<(RevisionHash, EntryHash)> +) -> RecordAPIResult<(HeaderHash, EntryHash)> where WasmError: From, Entry: TryFrom, { @@ -103,7 +101,7 @@ pub fn create_entry>( match entry_data { Ok(entry) => { let header_hash = hdk_create(CreateInput::new(EntryDefId::App(entry_def_id.as_ref().to_string()), entry, ChainTopOrdering::default()))?; - Ok((RevisionHash(dna_info()?.hash, header_hash), entry_hash)) + Ok((header_hash, entry_hash)) }, Err(e) => Err(DataIntegrityError::Wasm(WasmError::from(e))), } @@ -120,14 +118,13 @@ pub fn create_entry>( /// :TODO: determine how to implement some best-possible validation to alleviate at /// least non-malicious forks in the hashchain of a datum. /// -pub fn update_entry<'a, I: Clone, E, A, S: AsRef>( +pub fn update_entry<'a, I: Clone, E, S: AsRef>( entry_def_id: S, - address: &A, + address: &HeaderHash, new_entry: I, -) -> RecordAPIResult<(RevisionHash, EntryHash)> +) -> RecordAPIResult<(HeaderHash, EntryHash)> where WasmError: From, Entry: TryFrom, - A: AsRef, { // get initial address let entry_address = hash_entry(new_entry.clone())?; @@ -136,9 +133,9 @@ pub fn update_entry<'a, I: Clone, E, A, S: AsRef>( let entry_data: Result = new_entry.try_into(); match entry_data { Ok(entry) => { - let updated_header = hdk_update(address.as_ref().clone(), CreateInput::new(EntryDefId::App(entry_def_id.as_ref().to_string()), entry, ChainTopOrdering::default()))?; + let updated_header = hdk_update(address.clone(), CreateInput::new(EntryDefId::App(entry_def_id.as_ref().to_string()), entry, ChainTopOrdering::default()))?; - Ok((RevisionHash(dna_info()?.hash, updated_header), entry_address)) + Ok((updated_header, entry_address)) }, Err(e) => Err(DataIntegrityError::Wasm(WasmError::from(e))), } @@ -148,16 +145,15 @@ pub fn update_entry<'a, I: Clone, E, A, S: AsRef>( /// Wrapper for `hdk::remove_entry` that ensures that the entry is of the specified type before deleting. /// -pub fn delete_entry( - address: &A, +pub fn delete_entry( + address: &HeaderHash, ) -> RecordAPIResult where SerializedBytes: TryInto, - A: AsRef, { // typecheck the record before deleting, to prevent any accidental or malicious cross-type deletions let _prev_entry: T = get_entry_by_header(address)?; - hdk_delete_entry(address.as_ref().clone())?; + hdk_delete_entry(address.clone())?; Ok(true) } diff --git a/lib/hdk_records/src/lib.rs b/lib/hdk_records/src/lib.rs index 5c3382efc..74a1e99ed 100644 --- a/lib/hdk_records/src/lib.rs +++ b/lib/hdk_records/src/lib.rs @@ -4,7 +4,7 @@ use thiserror::Error; use std::convert::Infallible; use hdk::prelude::*; -pub use hdk_type_serialization_macros::{RevisionHash, DnaAddressable}; +pub use hdk_type_serialization_macros::DnaAddressable; pub use hdk::prelude::{CellId, EntryHash, hash_entry}; pub use holo_hash::{DnaHash}; diff --git a/lib/hdk_records/src/local_index_helpers.rs b/lib/hdk_records/src/local_index_helpers.rs index 4bb1358d9..f1ee2bf24 100644 --- a/lib/hdk_records/src/local_index_helpers.rs +++ b/lib/hdk_records/src/local_index_helpers.rs @@ -12,7 +12,7 @@ use hdk::prelude::*; use crate::{ - RevisionHash, DnaAddressable, + HeaderHash, DnaAddressable, RecordAPIResult, record_interface::Identified, identity_helpers::{ @@ -30,7 +30,7 @@ use crate::{ /// pub fn query_root_index<'a, T, R, O, I: AsRef>( base_entry_type: &I, -) -> RecordAPIResult>> +) -> RecordAPIResult>> where T: std::fmt::Debug, O: DnaAddressable, SerializedBytes: TryInto, diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index f928ed213..a655ac07e 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -12,7 +12,7 @@ use hdk::prelude::*; use hdk::info::dna_info; use crate::{ - RevisionHash, DnaAddressable, + DnaAddressable, RecordAPIResult, DataIntegrityError, record_interface::{Identifiable, Identified, Updateable}, entries::{ @@ -36,12 +36,12 @@ fn get_header_hash(shh: element::SignedHeaderHashed) -> HeaderHash { //--------------------------------[ READ ]-------------------------------------- -/// Retrieve the latest available RevisionHash for a given EntryHash. +/// Retrieve the latest available HeaderHash for a given EntryHash. /// /// Useful in coordinating updates between different entry types. /// -pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult { - Ok(RevisionHash(dna_info()?.hash, (match get_details(entry_hash, GetOptions { strategy: GetStrategy::Latest })? { +pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult { + match get_details(entry_hash, GetOptions { strategy: GetStrategy::Latest })? { Some(Details::Entry(details)) => match details.entry_dht_status { metadata::EntryDhtStatus::Live => match details.updates.len() { 0 => { @@ -59,13 +59,13 @@ pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult Err(DataIntegrityError::EntryNotFound), }, _ => Err(DataIntegrityError::EntryNotFound), - })?)) + } } -/// Retrive the specific version of an entry specified by the given `RevisionHash` +/// Retrive the specific version of an entry specified by the given `HeaderHash` /// pub fn read_record_entry_by_header( - header_hash: &RevisionHash, + header_hash: &HeaderHash, ) -> RecordAPIResult<(B, T)> where T: std::fmt::Debug, B: DnaAddressable, @@ -87,7 +87,7 @@ pub fn read_record_entry_by_header( /// pub (crate) fn read_record_entry_by_identity( identity_address: &EntryHash, -) -> RecordAPIResult<(RevisionHash, B, T)> +) -> RecordAPIResult<(HeaderHash, B, T)> where T: std::fmt::Debug, B: DnaAddressable, SerializedBytes: TryInto, @@ -99,9 +99,9 @@ pub (crate) fn read_record_entry_by_identity( // pull details of the current version, to ensure we have the most recent let latest_header_hash = get_latest_header_hash(entry_hash)?; - let (entry_hash, entry_data) = read_record_entry_by_header(&latest_header_hash)?; + let (read_entry_hash, entry_data) = read_record_entry_by_header(&latest_header_hash)?; - Ok((latest_header_hash, entry_hash, entry_data)) + Ok((latest_header_hash, read_entry_hash, entry_data)) } /// Read a record's entry data by locating it via an anchor `Path` composed @@ -113,7 +113,7 @@ pub (crate) fn read_record_entry_by_identity( pub fn read_record_entry( entry_type_root_path: &S, address: &EntryHash, -) -> RecordAPIResult<(RevisionHash, B, T)> +) -> RecordAPIResult<(HeaderHash, B, T)> where S: AsRef, T: std::fmt::Debug, B: DnaAddressable, @@ -133,7 +133,7 @@ pub fn read_record_entry( pub fn create_record( entry_def_id: S, create_payload: C, -) -> RecordAPIResult<(RevisionHash, B, I)> +) -> RecordAPIResult<(HeaderHash, B, I)> where S: AsRef, B: DnaAddressable, C: Into, @@ -173,9 +173,9 @@ pub fn create_record( /// pub fn update_record( entry_def_id: S, - address: &RevisionHash, + address: &HeaderHash, update_payload: U, -) -> RecordAPIResult<(RevisionHash, B, I, I)> +) -> RecordAPIResult<(HeaderHash, B, I, I)> where S: AsRef, B: DnaAddressable, I: Identifiable + Updateable, @@ -206,13 +206,12 @@ pub fn update_record( /// /// Links are not affected so as to retain a link to the referencing information, which may now need to be updated. /// -pub fn delete_record(address: &A) -> RecordAPIResult +pub fn delete_record(address: &HeaderHash) -> RecordAPIResult where SerializedBytes: TryInto, - A: AsRef, { // :TODO: handle deletion of the identity `Path` for the referenced entry if this is the last header being deleted - delete_entry::(address)?; + delete_entry::(address)?; Ok(true) } diff --git a/lib/hdk_semantic_indexes/rpc/src/lib.rs b/lib/hdk_semantic_indexes/rpc/src/lib.rs index 9aa7a54dd..bb9773daf 100644 --- a/lib/hdk_semantic_indexes/rpc/src/lib.rs +++ b/lib/hdk_semantic_indexes/rpc/src/lib.rs @@ -7,7 +7,7 @@ */ use holochain_serialized_bytes::prelude::*; use hdk_type_serialization_macros::{ - DnaAddressable, EntryHash, HeaderHash, RevisionHash, + DnaAddressable, EntryHash, HeaderHash, }; pub use hdk_rpc_errors::{OtherCellResult, CrossCellError}; @@ -17,7 +17,7 @@ pub use hdk_rpc_errors::{OtherCellResult, CrossCellError}; /// Query / modify entries by revision / `HeaderHash` #[derive(Debug, Serialize, Deserialize)] pub struct ByHeader { - pub address: RevisionHash, + pub address: HeaderHash, } /// Shared parameter struct that all related record storage endpoints must implement From 4398682421f08b0facc42fcd6fa727c5f4e9a62b Mon Sep 17 00:00:00 2001 From: Harlan T Wood Date: Tue, 22 Feb 2022 09:08:20 -1000 Subject: [PATCH 002/181] Run `cargo test` in GH Actions --- .github/workflows/test.yml | 68 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..3869c0112 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,68 @@ +name: Checks + +on: [push, pull_request] + +jobs: + checks: + timeout-minutes: 45 + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, macos-11] + fail-fast: false + + steps: + - name: Fetch source code + uses: actions/checkout@v2 + + - uses: actions/cache@v2 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + .cargo/bin/ + .cargo/registry/index/ + .cargo/registry/cache/ + .cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Setup Xcode version + if: ${{ runner.os == 'macOS' }} + uses: maxim-lobanov/setup-xcode@v1.2.3 + with: + xcode-version: latest-stable + + - name: Check macOS version + if: ${{ runner.os == 'macOS' }} + run: sw_vers + + - name: Set up nix + uses: cachix/install-nix-action@v16 + with: + nix_path: nixpkgs=channel:nixos-21.05 + extra_nix_config: | + substituters = https://cache.nixos.org https://cache.holo.host https://ci-builds.cachix.org https://holochain-ci.cachix.org + trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= cache.holo.host-1:lNXIXtJgS9Iuw4Cu6X0HINLu9sTfcjEntnrgwMQIMcE= cache.holo.host-2:ZJCkX3AUYZ8soxTLfTb60g+F3MkWD7hkH9y8CgqwhDQ= ci-builds.cachix.org-1:fxB0+h/MMlCpXf6hFsQM31YpHbaQoRmcNPNHwDUkXA4= holochain-ci.cachix.org-1:5IUSkZc0aoRS53rfkvH9Kid40NpyjwCMCzwRTXy+QN8= + + - uses: cachix/cachix-action@v10 + with: + name: ci-builds + authToken: "${{ secrets.CACHIX_TOKEN }}" + + - name: Inspect nix.conf + run: cat ~/.config/nix/nix.conf + + - name: Install recent bash, and set as NIX_BUILD_SHELL # needed by macos, which has an older bash incompatible with nix + if: ${{ runner.os == 'macOS' }} + run: echo "NIX_BUILD_SHELL=$(nix-build -A bashInteractive '')/bin/bash" >> $GITHUB_ENV + + - name: Prepare Nix environment + run: nix-shell --command "echo Completed" + + - run: nix-shell --pure --run 'cargo build' + - run: nix-shell --pure --run 'cargo test' From f00392c061e88f8fafd3b3a0b6cc969d12890a6f Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 23 Feb 2022 11:29:15 +1000 Subject: [PATCH 003/181] change calculate_identity_address() to use hash of externally-facing fully qualified address necessary changes to related trait bounds to support this --- .../src/anchored_record_helpers.rs | 2 +- lib/hdk_records/src/identity_helpers.rs | 8 ++-- lib/hdk_records/src/record_helpers.rs | 5 ++- lib/hdk_semantic_indexes/zome/src/lib.rs | 40 ++++++++++++++----- lib/hdk_type_serialization_macros/Cargo.toml | 2 +- lib/hdk_type_serialization_macros/src/lib.rs | 6 ++- 6 files changed, 44 insertions(+), 19 deletions(-) diff --git a/lib/hdk_records/src/anchored_record_helpers.rs b/lib/hdk_records/src/anchored_record_helpers.rs index 3bb85f66a..4bf760a85 100644 --- a/lib/hdk_records/src/anchored_record_helpers.rs +++ b/lib/hdk_records/src/anchored_record_helpers.rs @@ -144,7 +144,7 @@ pub fn create_anchored_record( C: Into + UniquelyIdentifiable, I: Identifiable, WasmError: From, - Entry: TryFrom, + Entry: TryFrom + TryFrom, R: Clone + Identified, { // determine unique anchor index key diff --git a/lib/hdk_records/src/identity_helpers.rs b/lib/hdk_records/src/identity_helpers.rs index 5352b5e66..9405d5d3e 100644 --- a/lib/hdk_records/src/identity_helpers.rs +++ b/lib/hdk_records/src/identity_helpers.rs @@ -58,14 +58,16 @@ pub (crate) fn entry_type_root_path( /// Determine the underlying `EntryHash` for a given `base_address` identifier, without querying the DHT. /// -pub fn calculate_identity_address( - entry_type_root_path: S, +pub fn calculate_identity_address( + _entry_type_root_path: S, base_address: &A, ) -> RecordAPIResult where S: AsRef, A: DnaAddressable, + Entry: TryFrom, + WasmError: From, { - Ok(identity_path_for(entry_type_root_path, base_address).path_entry_hash()?) + Ok(hash_entry(base_address.clone())?) } /// Given an identity `EntryHash` (ie. the result of `create_entry_identity`), diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index a655ac07e..a2342d729 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -110,7 +110,7 @@ pub (crate) fn read_record_entry_by_identity( /// Presumes that the record is to be fetched from the current DNA and naturally errors /// if attempted on an `EntryHash` that only exists in a foreign cell. /// -pub fn read_record_entry( +pub fn read_record_entry( entry_type_root_path: &S, address: &EntryHash, ) -> RecordAPIResult<(HeaderHash, B, T)> @@ -118,7 +118,8 @@ pub fn read_record_entry( T: std::fmt::Debug, B: DnaAddressable, SerializedBytes: TryInto, - Entry: TryFrom, + Entry: TryFrom + TryFrom, + WasmError: From, R: std::fmt::Debug + Identified, { let identity_address = calculate_identity_address(entry_type_root_path, &B::new(dna_info()?.hash, address.clone()))?; diff --git a/lib/hdk_semantic_indexes/zome/src/lib.rs b/lib/hdk_semantic_indexes/zome/src/lib.rs index 880ce3aed..28d3301f5 100644 --- a/lib/hdk_semantic_indexes/zome/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome/src/lib.rs @@ -36,7 +36,7 @@ pub struct IndexingZomeConfig { /// Use this method to query associated IDs for a query edge, without retrieving /// the records themselves. /// -pub fn read_index<'a, O, A, S, I>( +pub fn read_index<'a, O, A, S, I, E>( base_entry_type: &I, base_address: &A, link_tag: &S, @@ -45,6 +45,8 @@ pub fn read_index<'a, O, A, S, I>( I: AsRef, A: DnaAddressable, O: DnaAddressable, + Entry: TryFrom, + WasmError: From, { let index_address = calculate_identity_address(base_entry_type, base_address)?; let refd_index_addresses = get_linked_addresses(&index_address, LinkTag::new(link_tag.as_ref()))?; @@ -67,7 +69,7 @@ pub fn read_index<'a, O, A, S, I>( /// /// Use this method to query associated records for a query edge in full. /// -pub fn query_index<'a, T, O, C, F, A, S, I, J>( +pub fn query_index<'a, T, O, C, F, A, S, I, J, E>( base_entry_type: &I, base_address: &A, link_tag: &S, @@ -83,6 +85,8 @@ pub fn query_index<'a, T, O, C, F, A, S, I, J>( C: std::fmt::Debug, SerializedBytes: TryInto, F: Fn(C) -> Option, + Entry: TryFrom, + WasmError: From, { let index_address = calculate_identity_address(base_entry_type, base_address)?; let addrs_result = get_linked_addresses(&index_address, LinkTag::new(link_tag.as_ref()))?; @@ -146,7 +150,7 @@ fn retrieve_foreign_record<'a, T, B, C, F, S>( /// The returned `RemoteEntryLinkResponse` provides an appropriate format for responding to indexing /// requests that originate from calls to `create/update/delete_remote_index` in a foreign DNA. /// -pub fn sync_index( +pub fn sync_index( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -159,6 +163,8 @@ pub fn sync_index( I: AsRef, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { // create any new indexes let indexes_created = create_remote_index_destination( @@ -187,7 +193,7 @@ pub fn sync_index( /// This basically consists of an identity `Path` for the remote content and bidirectional /// links between it and its `dest_addresses`. /// -fn create_remote_index_destination( +fn create_remote_index_destination( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -199,6 +205,8 @@ fn create_remote_index_destination( I: AsRef, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { // create a base entry pointer for the referenced origin record let _identity_hash = create_entry_identity(source_entry_type, source)?; @@ -210,7 +218,7 @@ fn create_remote_index_destination( ) } -fn create_dest_identities_and_indexes<'a, A, B, S, I>( +fn create_dest_identities_and_indexes<'a, A, B, S, I, E>( source_entry_type: &'a I, source: &'a A, dest_entry_type: &'a I, @@ -221,6 +229,8 @@ fn create_dest_identities_and_indexes<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: 'a + DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { let base_method = create_dest_indexes(source_entry_type, source, dest_entry_type, link_tag, link_tag_reciprocal); @@ -235,7 +245,7 @@ fn create_dest_identities_and_indexes<'a, A, B, S, I>( } /// Helper for index update to add multiple destination links from some source. -fn create_dest_indexes<'a, A, B, S, I>( +fn create_dest_indexes<'a, A, B, S, I, E>( source_entry_type: &'a I, source: &'a A, dest_entry_type: &'a I, @@ -246,6 +256,8 @@ fn create_dest_indexes<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { Box::new(move |dest| { match create_index(source_entry_type, source, dest_entry_type, dest, link_tag, link_tag_reciprocal) { @@ -260,7 +272,7 @@ fn create_dest_indexes<'a, A, B, S, I>( /// Creates a bidirectional link between two entry addresses, and returns a vector /// of the `HeaderHash`es of the (respectively) forward & reciprocal links created. -fn create_index( +fn create_index( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -272,6 +284,8 @@ fn create_index( S: AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { let source_hash = calculate_identity_address(source_entry_type, source)?; let dest_hash = calculate_identity_address(dest_entry_type, dest)?; @@ -292,7 +306,7 @@ fn create_index( /// affected in the removal, and is simply left dangling in the /// DHT space as an indicator of previously linked items. /// -fn remove_remote_index_links( +fn remove_remote_index_links( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -304,6 +318,8 @@ fn remove_remote_index_links( I: AsRef, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { Ok(remove_addresses.iter() .flat_map(delete_dest_indexes( @@ -316,7 +332,7 @@ fn remove_remote_index_links( } /// Helper for index update to remove multiple destination links from some source. -fn delete_dest_indexes<'a, A, B, S, I>( +fn delete_dest_indexes<'a, A, B, S, I, E>( source_entry_type: &'a I, source: &'a A, dest_entry_type: &'a I, @@ -327,6 +343,8 @@ fn delete_dest_indexes<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { Box::new(move |dest_addr| { match delete_index(source_entry_type, source, dest_entry_type, dest_addr, link_tag, link_tag_reciprocal) { @@ -344,7 +362,7 @@ fn delete_dest_indexes<'a, A, B, S, I>( /// /// :TODO: this should probably only delete the referenced IDs, at the moment it clears anything matching tags. /// -fn delete_index<'a, A, B, S, I>( +fn delete_index<'a, A, B, S, I, E>( source_entry_type: &I, source: &A, dest_entry_type: &I, @@ -356,6 +374,8 @@ fn delete_index<'a, A, B, S, I>( S: 'a + AsRef<[u8]> + ?Sized, A: DnaAddressable, B: DnaAddressable, + Entry: TryFrom + TryFrom, + WasmError: From, { let tag_source = LinkTag::new(link_tag.as_ref()); let tag_dest = LinkTag::new(link_tag_reciprocal.as_ref()); diff --git a/lib/hdk_type_serialization_macros/Cargo.toml b/lib/hdk_type_serialization_macros/Cargo.toml index 5f4a73f49..70a03a50d 100644 --- a/lib/hdk_type_serialization_macros/Cargo.toml +++ b/lib/hdk_type_serialization_macros/Cargo.toml @@ -6,7 +6,7 @@ edition = "2018" [dependencies] serde = "1" -holochain_serialized_bytes = "0.0" +hdk = "0.0" holo_hash = "0.0" [lib] diff --git a/lib/hdk_type_serialization_macros/src/lib.rs b/lib/hdk_type_serialization_macros/src/lib.rs index d7092364b..923b99bba 100644 --- a/lib/hdk_type_serialization_macros/src/lib.rs +++ b/lib/hdk_type_serialization_macros/src/lib.rs @@ -6,7 +6,8 @@ */ use std::fmt::Debug; -pub use holochain_serialized_bytes::prelude::*; +pub use hdk::prelude::*; +pub use hdk; pub use holo_hash::{DnaHash, EntryHash, HeaderHash, AnyDhtHash, HOLO_HASH_UNTYPED_LEN}; #[macro_export] @@ -51,7 +52,8 @@ pub trait DnaAddressable macro_rules! addressable_identifier { ($r:ident => $base:ty) => { // externally facing type, with DnaHash of cell for context - #[derive(Serialize, Deserialize, SerializedBytes, Debug, Clone, PartialEq, Eq, Hash)] + #[hdk_entry(id="$r")] + #[derive(Clone, PartialEq, Eq, Hash)] pub struct $r(pub DnaHash, pub $base); // constructor From 138c68f770d3a0397d90e0e6df18f39477961472 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 10:00:48 +1000 Subject: [PATCH 004/181] remove RevisionHash from remaining core libs & definition --- lib/hdk_type_serialization_macros/src/lib.rs | 2 -- lib/vf_attributes_hdk/src/lib.rs | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/hdk_type_serialization_macros/src/lib.rs b/lib/hdk_type_serialization_macros/src/lib.rs index 923b99bba..533135b65 100644 --- a/lib/hdk_type_serialization_macros/src/lib.rs +++ b/lib/hdk_type_serialization_macros/src/lib.rs @@ -86,8 +86,6 @@ macro_rules! addressable_identifier { } } -addressable_identifier!(RevisionHash => HeaderHash); - /// Supertrait for things which can be identified by some string label in a particular DNA /// pub trait DnaIdentifiable diff --git a/lib/vf_attributes_hdk/src/lib.rs b/lib/vf_attributes_hdk/src/lib.rs index fb677934d..5904d190e 100644 --- a/lib/vf_attributes_hdk/src/lib.rs +++ b/lib/vf_attributes_hdk/src/lib.rs @@ -4,7 +4,7 @@ use hdk_type_serialization_macros::*; pub use chrono::{ FixedOffset, Utc, DateTime }; pub use holo_hash::{ AgentPubKey, EntryHash, HeaderHash }; pub use holochain_zome_types::timestamp::Timestamp; -pub use hdk_type_serialization_macros::{RevisionHash, DnaAddressable}; +pub use hdk_type_serialization_macros::{DnaAddressable}; pub use hdk_semantic_indexes_zome_rpc::{ByHeader, ByAddress}; simple_alias!(ActionId => String); From 3f85185d0562099085b0868f08a2411de0c4bc18 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 10:15:25 +1000 Subject: [PATCH 005/181] drop all refs to deprecated RevisionHash in favour of raw HeaderHash cleanup some remaining unneeded non-generic deletion payload structs remove unneeded type generics on deletion functions --- lib/hdk_type_serialization_macros/src/lib.rs | 2 +- zomes/rea_agreement/lib/src/lib.rs | 6 ++--- zomes/rea_agreement/rpc/src/lib.rs | 8 +++---- zomes/rea_agreement/zome/src/lib.rs | 7 +----- zomes/rea_commitment/lib/src/lib.rs | 6 ++--- zomes/rea_commitment/rpc/src/lib.rs | 8 +++---- zomes/rea_commitment/storage/src/lib.rs | 1 - zomes/rea_commitment/zome/src/lib.rs | 5 ---- zomes/rea_economic_event/lib/src/lib.rs | 24 +++++++++---------- zomes/rea_economic_event/rpc/src/lib.rs | 10 ++++---- zomes/rea_economic_event/zome_api/src/lib.rs | 2 +- zomes/rea_economic_resource/lib/src/lib.rs | 18 +++++++------- zomes/rea_economic_resource/rpc/src/lib.rs | 6 ++--- .../rea_economic_resource/zome_api/src/lib.rs | 8 +++---- zomes/rea_fulfillment/lib/src/lib.rs | 4 ++-- .../lib_destination/src/lib.rs | 4 ++-- zomes/rea_fulfillment/lib_origin/src/lib.rs | 4 ++-- zomes/rea_fulfillment/rpc/src/lib.rs | 8 +++---- zomes/rea_fulfillment/storage/src/lib.rs | 1 - zomes/rea_intent/lib/src/lib.rs | 6 ++--- zomes/rea_intent/rpc/src/lib.rs | 8 +++---- zomes/rea_intent/storage/src/lib.rs | 1 - zomes/rea_intent/zome/src/lib.rs | 5 ---- zomes/rea_process/lib/src/lib.rs | 6 ++--- zomes/rea_process/rpc/src/lib.rs | 8 +++---- zomes/rea_process/zome/src/lib.rs | 7 +----- .../rea_process_specification/lib/src/lib.rs | 6 ++--- .../rea_process_specification/rpc/src/lib.rs | 8 +++---- zomes/rea_proposal/lib/src/lib.rs | 4 ++-- zomes/rea_proposal/rpc/src/lib.rs | 8 +++---- zomes/rea_proposed_intent/lib/src/lib.rs | 6 ++--- zomes/rea_proposed_intent/rpc/src/lib.rs | 4 ++-- zomes/rea_proposed_to/lib/src/lib.rs | 4 ++-- zomes/rea_proposed_to/rpc/src/lib.rs | 4 ++-- .../rea_resource_specification/lib/src/lib.rs | 6 ++--- .../rea_resource_specification/rpc/src/lib.rs | 8 +++---- zomes/rea_satisfaction/lib/src/lib.rs | 4 ++-- .../lib_destination/src/lib.rs | 4 ++-- zomes/rea_satisfaction/lib_origin/src/lib.rs | 4 ++-- zomes/rea_satisfaction/rpc/src/lib.rs | 8 +++---- zomes/rea_unit/lib/src/lib.rs | 6 ++--- zomes/rea_unit/rpc/src/lib.rs | 8 +++---- 42 files changed, 121 insertions(+), 144 deletions(-) diff --git a/lib/hdk_type_serialization_macros/src/lib.rs b/lib/hdk_type_serialization_macros/src/lib.rs index 533135b65..ff7208a03 100644 --- a/lib/hdk_type_serialization_macros/src/lib.rs +++ b/lib/hdk_type_serialization_macros/src/lib.rs @@ -8,7 +8,7 @@ use std::fmt::Debug; pub use hdk::prelude::*; pub use hdk; -pub use holo_hash::{DnaHash, EntryHash, HeaderHash, AnyDhtHash, HOLO_HASH_UNTYPED_LEN}; +pub use holo_hash::*; #[macro_export] macro_rules! simple_alias { diff --git a/zomes/rea_agreement/lib/src/lib.rs b/zomes/rea_agreement/lib/src/lib.rs index 0cafc8ea0..686096b4b 100644 --- a/zomes/rea_agreement/lib/src/lib.rs +++ b/zomes/rea_agreement/lib/src/lib.rs @@ -45,13 +45,13 @@ pub fn handle_update_agreement(entry_def_id: S, agreement: UpdateRequest) -> construct_response(&identity_address, revision_id, &entry, get_link_fields(&identity_address)?) } -pub fn handle_delete_agreement(address: RevisionHash) -> RecordAPIResult { - delete_record::(&address) +pub fn handle_delete_agreement(address: HeaderHash) -> RecordAPIResult { + delete_record::(&address) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &AgreementAddress, revision: RevisionHash, e: &EntryData, ( + address: &AgreementAddress, revision: HeaderHash, e: &EntryData, ( commitments, economic_events, ): ( diff --git a/zomes/rea_agreement/rpc/src/lib.rs b/zomes/rea_agreement/rpc/src/lib.rs index 54cff2579..e6eba82b7 100644 --- a/zomes/rea_agreement/rpc/src/lib.rs +++ b/zomes/rea_agreement/rpc/src/lib.rs @@ -10,12 +10,12 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, AgreementAddress, CommitmentAddress, EconomicEventAddress, DateTime, FixedOffset, + ByHeader, HeaderHash, }; //---------------- EXTERNAL RECORD STRUCTURE ---------------- @@ -26,7 +26,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: AgreementAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(skip_serializing_if = "Option::is_none")] pub name: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -80,7 +80,7 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] #[serde(skip_serializing_if = "MaybeUndefined::is_undefined")] pub name: MaybeUndefined, @@ -93,7 +93,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&self) -> RevisionHash { + pub fn get_revision_id(&self) -> HeaderHash { self.revision_id.to_owned().into() } diff --git a/zomes/rea_agreement/zome/src/lib.rs b/zomes/rea_agreement/zome/src/lib.rs index 97ef5405e..5d99459d0 100644 --- a/zomes/rea_agreement/zome/src/lib.rs +++ b/zomes/rea_agreement/zome/src/lib.rs @@ -56,12 +56,7 @@ fn update_agreement(UpdateParams { agreement }: UpdateParams) -> ExternResult ExternResult { +fn delete_agreement(ByHeader { address }: ByHeader) -> ExternResult { Ok(handle_delete_agreement(address)?) } diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index 472b23ed9..46fc8b876 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -93,7 +93,7 @@ pub fn handle_update_commitment(entry_def_id: S, commitment: UpdateRequest) - construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) } -pub fn handle_delete_commitment(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_commitment(revision_id: HeaderHash) -> RecordAPIResult { // load the record to ensure it is of the correct type let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -110,12 +110,12 @@ pub fn handle_delete_commitment(revision_id: RevisionHash) -> RecordAPIResult(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &CommitmentAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &CommitmentAddress, revision_id: &HeaderHash, e: &EntryData, ( fulfillments, satisfactions, involved_agents, diff --git a/zomes/rea_commitment/rpc/src/lib.rs b/zomes/rea_commitment/rpc/src/lib.rs index 2a91a67fe..03f766c30 100644 --- a/zomes/rea_commitment/rpc/src/lib.rs +++ b/zomes/rea_commitment/rpc/src/lib.rs @@ -11,7 +11,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined, default_false}; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, + HeaderHash, ActionId, DateTime, FixedOffset, ExternalURL, @@ -37,7 +37,7 @@ pub use vf_attributes_hdk::{ CommitmentAddress }; #[serde(rename_all = "camelCase")] pub struct Response { pub id: CommitmentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub action: ActionId, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -163,7 +163,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub action: MaybeUndefined, #[serde(default)] @@ -211,7 +211,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_commitment/storage/src/lib.rs b/zomes/rea_commitment/storage/src/lib.rs index 57edd068f..35ebc4631 100644 --- a/zomes/rea_commitment/storage/src/lib.rs +++ b/zomes/rea_commitment/storage/src/lib.rs @@ -16,7 +16,6 @@ use hdk_records::{ use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ActionId, DateTime, FixedOffset, ExternalURL, diff --git a/zomes/rea_commitment/zome/src/lib.rs b/zomes/rea_commitment/zome/src/lib.rs index 060028586..e6059f35f 100644 --- a/zomes/rea_commitment/zome/src/lib.rs +++ b/zomes/rea_commitment/zome/src/lib.rs @@ -90,11 +90,6 @@ fn update_commitment(UpdateParams { commitment }: UpdateParams) -> ExternResult< Ok(handle_update_commitment(COMMITMENT_ENTRY_TYPE, commitment)?) } -#[derive(Debug, Serialize, Deserialize)] -struct ByHeader { - pub address: RevisionHash, -} - #[hdk_extern] fn delete_commitment(ByHeader { address }: ByHeader) -> ExternResult { Ok(handle_delete_commitment(address)?) diff --git a/zomes/rea_economic_event/lib/src/lib.rs b/zomes/rea_economic_event/lib/src/lib.rs index 526051813..c17d074d2 100644 --- a/zomes/rea_economic_event/lib/src/lib.rs +++ b/zomes/rea_economic_event/lib/src/lib.rs @@ -64,8 +64,8 @@ impl API for EconomicEventZomePermissableDefault { entry_def_id: Self::S, process_entry_def_id: Self::S, event: EconomicEventCreateRequest, new_inventoried_resource: Option ) -> RecordAPIResult { - let mut resources_affected: Vec<(RevisionHash, EconomicResourceAddress, EconomicResourceData, EconomicResourceData)> = vec![]; - let mut resource_created: Option<(RevisionHash, EconomicResourceAddress, EconomicResourceData)> = None; + let mut resources_affected: Vec<(HeaderHash, EconomicResourceAddress, EconomicResourceData, EconomicResourceData)> = vec![]; + let mut resource_created: Option<(HeaderHash, EconomicResourceAddress, EconomicResourceData)> = None; // if the event observes a new resource, create that resource & return it in the response if let Some(economic_resource) = new_inventoried_resource { @@ -125,7 +125,7 @@ impl API for EconomicEventZomePermissableDefault { construct_response(&identity_address, &revision_id, &new_entry, get_link_fields(&identity_address)?) } - fn delete_economic_event(revision_id: RevisionHash) -> RecordAPIResult { + fn delete_economic_event(revision_id: HeaderHash) -> RecordAPIResult { // read any referencing indexes let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -144,7 +144,7 @@ impl API for EconomicEventZomePermissableDefault { // May not be needed due to cross-record deletion validation logic. // delete entry last as it must be present in order for links to be removed - delete_record::(&revision_id) + delete_record::(&revision_id) } fn get_all_economic_events(entry_def_id: Self::S) -> RecordAPIResult { @@ -168,7 +168,7 @@ fn read_process_index_zome(conf: DnaConfigSlice) -> Option { } fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEventCreateRequest, resource_address: Option, -) -> RecordAPIResult<(RevisionHash, EconomicEventAddress, EntryData)> +) -> RecordAPIResult<(HeaderHash, EconomicEventAddress, EntryData)> where S: AsRef { let (revision_id, base_address, entry_resp): (_, EconomicEventAddress, EntryData) = create_record( @@ -206,7 +206,7 @@ fn read_resource_zome(conf: DnaConfigSlice) -> Option { /// fn handle_create_inventory_from_event( economic_resource: &ResourceCreateRequest, event: &CreateRequest, -) -> OtherCellResult<(RevisionHash, EconomicResourceAddress, EconomicResourceData)> +) -> OtherCellResult<(HeaderHash, EconomicResourceAddress, EconomicResourceData)> { Ok(call_local_zome_method( read_resource_zome, @@ -226,7 +226,7 @@ fn resource_creation(event: &CreateRequest, resource: &ResourceCreateRequest) -> /// fn handle_update_resource_inventory( event: &EconomicEventCreateRequest, -) -> RecordAPIResult> +) -> RecordAPIResult> { Ok(call_local_zome_method( read_resource_zome, @@ -235,7 +235,7 @@ fn handle_update_resource_inventory( )?) } -fn handle_list_output(entries_result: Vec>) -> RecordAPIResult { +fn handle_list_output(entries_result: Vec>) -> RecordAPIResult { let edges = entries_result.iter() .cloned() .filter_map(Result::ok) @@ -271,7 +271,7 @@ fn handle_list_output(entries_result: Vec( event_address: &EconomicEventAddress, - revision_id: &RevisionHash, + revision_id: &HeaderHash, event: &EntryData, ( fulfillments, satisfactions, @@ -280,7 +280,7 @@ pub fn construct_response_with_resource<'a>( Vec, ), resource_address: Option, - resource_revision_id: &RevisionHash, + resource_revision_id: &HeaderHash, resource: EconomicResourceData, ( contained_in, stage, @@ -329,7 +329,7 @@ pub fn construct_response_with_resource<'a>( // Same as above, but omits EconomicResource object pub fn construct_response<'a>( - address: &EconomicEventAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicEventAddress, revision_id: &HeaderHash, e: &EntryData, ( fulfillments, satisfactions, ): ( @@ -369,7 +369,7 @@ pub fn construct_response<'a>( } pub fn construct_list_response<'a>( - address: &EconomicEventAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicEventAddress, revision_id: &HeaderHash, e: &EntryData, ( fulfillments, satisfactions, ): ( diff --git a/zomes/rea_economic_event/rpc/src/lib.rs b/zomes/rea_economic_event/rpc/src/lib.rs index 2109e1f3f..9f2e6000a 100644 --- a/zomes/rea_economic_event/rpc/src/lib.rs +++ b/zomes/rea_economic_event/rpc/src/lib.rs @@ -12,7 +12,7 @@ use serde_maybe_undefined::MaybeUndefined; use vf_measurement::QuantityValue; use hdk_relay_pagination::PageInfo; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, EconomicEventAddress, EconomicResourceAddress, ActionId, @@ -39,7 +39,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: EconomicEventAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub action: ActionId, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -94,7 +94,7 @@ pub struct Response { #[serde(rename_all = "camelCase")] pub struct ResourceResponse { pub id: EconomicResourceAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(skip_serializing_if = "Option::is_none")] pub conforms_to: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -305,7 +305,7 @@ pub struct CreateParams { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub note: MaybeUndefined, #[serde(default)] @@ -319,7 +319,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_economic_event/zome_api/src/lib.rs b/zomes/rea_economic_event/zome_api/src/lib.rs index d0ac078ec..8f3d9d696 100644 --- a/zomes/rea_economic_event/zome_api/src/lib.rs +++ b/zomes/rea_economic_event/zome_api/src/lib.rs @@ -10,7 +10,7 @@ pub trait API { ) -> RecordAPIResult; fn get_economic_event(entry_def_id: Self::S, address: EconomicEventAddress) -> RecordAPIResult; fn update_economic_event(entry_def_id: Self::S, event: UpdateRequest) -> RecordAPIResult; - fn delete_economic_event(revision_id: RevisionHash) -> RecordAPIResult; + fn delete_economic_event(revision_id: HeaderHash) -> RecordAPIResult; fn get_all_economic_events(entry_def_id: Self::S) -> RecordAPIResult; } diff --git a/zomes/rea_economic_resource/lib/src/lib.rs b/zomes/rea_economic_resource/lib/src/lib.rs index 5ae500b7f..3c2ba6477 100644 --- a/zomes/rea_economic_resource/lib/src/lib.rs +++ b/zomes/rea_economic_resource/lib/src/lib.rs @@ -70,7 +70,7 @@ impl API for EconomicResourceZomePermissableDefault { /// /// :TODO: assess whether this should use the same standardised API format as external endpoints /// - fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(RevisionHash, EconomicResourceAddress, EntryData)> + fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(HeaderHash, EconomicResourceAddress, EntryData)> { // :TODO: move this assertion to validation callback if let MaybeUndefined::Some(_sent_inventory_id) = ¶ms.get_event_params().resource_inventoried_as { @@ -107,9 +107,9 @@ impl API for EconomicResourceZomePermissableDefault { fn update_inventory_from_event( resource_entry_def_id: Self::S, event: EventCreateRequest, - ) -> RecordAPIResult> + ) -> RecordAPIResult> { - let mut resources_affected: Vec<(RevisionHash, EconomicResourceAddress, EntryData, EntryData)> = vec![]; + let mut resources_affected: Vec<(HeaderHash, EconomicResourceAddress, EntryData, EntryData)> = vec![]; // if the event is a transfer-like event, run the receiver's update first if let MaybeUndefined::Some(receiver_inventory) = &event.to_resource_inventoried_as { @@ -162,15 +162,15 @@ fn read_economic_resource_index_zome(conf: DnaConfigSlice) -> Option { fn handle_update_inventory_resource( resource_entry_def_id: S, - resource_addr: &RevisionHash, + resource_addr: &HeaderHash, event: EventCreateRequest, -) -> RecordAPIResult<(RevisionHash, EconomicResourceAddress, EntryData, EntryData)> +) -> RecordAPIResult<(HeaderHash, EconomicResourceAddress, EntryData, EntryData)> where S: AsRef, { Ok(update_record(&resource_entry_def_id, resource_addr, event)?) } -fn handle_list_output(event_entry_def_id: S, process_entry_def_id: S, entries_result: Vec>) -> RecordAPIResult +fn handle_list_output(event_entry_def_id: S, process_entry_def_id: S, entries_result: Vec>) -> RecordAPIResult where S: AsRef { let edges = entries_result.iter() @@ -203,7 +203,7 @@ fn handle_list_output(event_entry_def_id: S, process_entry_def_id: S, entries /// Create response from input DHT primitives pub fn construct_response<'a>( - address: &EconomicResourceAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicResourceAddress, revision_id: &HeaderHash, e: &EntryData, ( contained_in, stage, state, @@ -222,7 +222,7 @@ pub fn construct_response<'a>( /// Create response from input DHT primitives pub fn construct_response_record<'a>( - address: &EconomicResourceAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicResourceAddress, revision_id: &HeaderHash, e: &EntryData, ( contained_in, stage, state, @@ -258,7 +258,7 @@ pub fn construct_response_record<'a>( } pub fn construct_list_response<'a>( - address: &EconomicResourceAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &EconomicResourceAddress, revision_id: &HeaderHash, e: &EntryData, ( contained_in, stage, state, diff --git a/zomes/rea_economic_resource/rpc/src/lib.rs b/zomes/rea_economic_resource/rpc/src/lib.rs index 02d38de3b..f4494eba3 100644 --- a/zomes/rea_economic_resource/rpc/src/lib.rs +++ b/zomes/rea_economic_resource/rpc/src/lib.rs @@ -10,7 +10,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, + HeaderHash, ByAddress, EconomicResourceAddress, EconomicEventAddress, ExternalURL, @@ -63,7 +63,7 @@ impl<'a> CreationPayload { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub classified_as: MaybeUndefined>, #[serde(default)] @@ -77,7 +77,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_economic_resource/zome_api/src/lib.rs b/zomes/rea_economic_resource/zome_api/src/lib.rs index 950b6e6a1..c43a0162f 100644 --- a/zomes/rea_economic_resource/zome_api/src/lib.rs +++ b/zomes/rea_economic_resource/zome_api/src/lib.rs @@ -11,11 +11,11 @@ use hc_zome_rea_economic_resource_storage::{EntryData}; pub trait API { type S: AsRef; - fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(RevisionHash, EconomicResourceAddress, EntryData)>; + fn create_inventory_from_event(resource_entry_def_id: Self::S, params: CreationPayload) -> RecordAPIResult<(HeaderHash, EconomicResourceAddress, EntryData)>; fn update_inventory_from_event( resource_entry_def_id: Self::S, event: EventCreateRequest, - ) -> RecordAPIResult>; + ) -> RecordAPIResult>; fn get_economic_resource(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S, address: EconomicResourceAddress) -> RecordAPIResult; fn update_economic_resource(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S, resource: UpdateRequest) -> RecordAPIResult; fn get_all_economic_resources(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S) -> RecordAPIResult; @@ -33,7 +33,7 @@ macro_rules! declare_economic_resource_zome_api { // :TODO: The signature of this method, and its decoupling from the EconomicEvent zome, means that resources can be // instantiated from the receiving inventory. Is this desirable? What are the repercussions? #[hdk_extern] - fn _internal_create_inventory(params: CreationPayload) -> ExternResult<(RevisionHash, EconomicResourceAddress, EntryData)> + fn _internal_create_inventory(params: CreationPayload) -> ExternResult<(HeaderHash, EconomicResourceAddress, EntryData)> { Ok(<$zome_api>::create_inventory_from_event( RESOURCE_ENTRY_TYPE, @@ -42,7 +42,7 @@ macro_rules! declare_economic_resource_zome_api { } #[hdk_extern] - fn _internal_update_inventory(event: EventCreateRequest) -> ExternResult> + fn _internal_update_inventory(event: EventCreateRequest) -> ExternResult> { Ok(<$zome_api>::update_inventory_from_event(RESOURCE_ENTRY_TYPE, event)?) } diff --git a/zomes/rea_fulfillment/lib/src/lib.rs b/zomes/rea_fulfillment/lib/src/lib.rs index e22a0afd3..8f635ca14 100644 --- a/zomes/rea_fulfillment/lib/src/lib.rs +++ b/zomes/rea_fulfillment/lib/src/lib.rs @@ -10,12 +10,12 @@ * @package Holo-REA */ use hdk_records::RecordAPIResult; -use vf_attributes_hdk::{RevisionHash, FulfillmentAddress}; +use vf_attributes_hdk::{HeaderHash, FulfillmentAddress}; use hc_zome_rea_fulfillment_storage::EntryData; use hc_zome_rea_fulfillment_rpc::*; /// Create response from input DHT primitives -pub fn construct_response(address: &FulfillmentAddress, revision_id: &RevisionHash, e: &EntryData) -> RecordAPIResult { +pub fn construct_response(address: &FulfillmentAddress, revision_id: &HeaderHash, e: &EntryData) -> RecordAPIResult { Ok(ResponseData { fulfillment: Response { id: address.to_owned(), diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index cd9d6b8d4..1134ba8d3 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -63,7 +63,7 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_fulfillment(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult { // read any referencing indexes let (base_address, fulfillment) = read_record_entry_by_header::(&revision_id)?; @@ -71,7 +71,7 @@ pub fn handle_delete_fulfillment(revision_id: RevisionHash) -> RecordAPIResult(&revision_id) + delete_record::(&revision_id) } /// Properties accessor for zome config. diff --git a/zomes/rea_fulfillment/lib_origin/src/lib.rs b/zomes/rea_fulfillment/lib_origin/src/lib.rs index 5b6fc3a5f..33aa1c061 100644 --- a/zomes/rea_fulfillment/lib_origin/src/lib.rs +++ b/zomes/rea_fulfillment/lib_origin/src/lib.rs @@ -83,7 +83,7 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_fulfillment(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -98,7 +98,7 @@ pub fn handle_delete_fulfillment(revision_id: RevisionHash) -> RecordAPIResult(&revision_id) + delete_record::(&revision_id) } /// Properties accessor for zome config. diff --git a/zomes/rea_fulfillment/rpc/src/lib.rs b/zomes/rea_fulfillment/rpc/src/lib.rs index 05b00c605..79058c419 100644 --- a/zomes/rea_fulfillment/rpc/src/lib.rs +++ b/zomes/rea_fulfillment/rpc/src/lib.rs @@ -11,7 +11,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined}; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ByHeader, ByAddress, + HeaderHash, ByHeader, ByAddress, EconomicEventAddress, CommitmentAddress, }; @@ -39,7 +39,7 @@ pub use vf_attributes_hdk::{ FulfillmentAddress }; #[serde(rename_all = "camelCase")] pub struct Response { pub id: FulfillmentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub fulfilled_by: EconomicEventAddress, pub fulfills: CommitmentAddress, #[serde(skip_serializing_if = "Option::is_none")] @@ -99,7 +99,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] #[serde(skip_serializing_if = "MaybeUndefined::is_undefined")] pub fulfilled_by: MaybeUndefined, // note this setup allows None to be passed but `update_with` ignores it @@ -118,7 +118,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_fulfillment/storage/src/lib.rs b/zomes/rea_fulfillment/storage/src/lib.rs index 3d28f94e1..bcaf0a9b8 100644 --- a/zomes/rea_fulfillment/storage/src/lib.rs +++ b/zomes/rea_fulfillment/storage/src/lib.rs @@ -16,7 +16,6 @@ use hdk_records::{ use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, FulfillmentAddress, EconomicEventAddress, CommitmentAddress, diff --git a/zomes/rea_intent/lib/src/lib.rs b/zomes/rea_intent/lib/src/lib.rs index a018bd096..2c4a506da 100644 --- a/zomes/rea_intent/lib/src/lib.rs +++ b/zomes/rea_intent/lib/src/lib.rs @@ -81,7 +81,7 @@ pub fn handle_update_intent(entry_def_id: S, intent: UpdateRequest) -> Record construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) } -pub fn handle_delete_intent(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_intent(revision_id: HeaderHash) -> RecordAPIResult { // load the record to ensure it is of the correct type let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -95,12 +95,12 @@ pub fn handle_delete_intent(revision_id: RevisionHash) -> RecordAPIResult } // delete entry last, as it must be present in order for links to be removed - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives pub fn construct_response<'a>( - address: &IntentAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &IntentAddress, revision_id: &HeaderHash, e: &EntryData, ( satisfactions, // published_in, ): ( diff --git a/zomes/rea_intent/rpc/src/lib.rs b/zomes/rea_intent/rpc/src/lib.rs index 76b00876a..30a7598d1 100644 --- a/zomes/rea_intent/rpc/src/lib.rs +++ b/zomes/rea_intent/rpc/src/lib.rs @@ -9,7 +9,6 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined, default_false}; -use vf_attributes_hdk::RevisionHash; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ ActionId, @@ -22,6 +21,7 @@ pub use vf_attributes_hdk::{ SatisfactionAddress, LocationAddress, ProposedIntentAddress, + HeaderHash, }; //---------------- EXTERNAL RECORD STRUCTURE ---------------- @@ -35,7 +35,7 @@ pub use vf_attributes_hdk::{ IntentAddress }; #[serde(rename_all = "camelCase")] pub struct Response { pub id: IntentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub action: ActionId, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -154,7 +154,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub action: MaybeUndefined, #[serde(default)] @@ -200,7 +200,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_intent/storage/src/lib.rs b/zomes/rea_intent/storage/src/lib.rs index dfe743123..ecedd6730 100644 --- a/zomes/rea_intent/storage/src/lib.rs +++ b/zomes/rea_intent/storage/src/lib.rs @@ -16,7 +16,6 @@ use hdk_records::{ use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ActionId, DateTime, FixedOffset, ExternalURL, diff --git a/zomes/rea_intent/zome/src/lib.rs b/zomes/rea_intent/zome/src/lib.rs index 1a4014c84..afa05280e 100644 --- a/zomes/rea_intent/zome/src/lib.rs +++ b/zomes/rea_intent/zome/src/lib.rs @@ -89,11 +89,6 @@ fn update_intent(UpdateParams { intent }: UpdateParams) -> ExternResult ExternResult { Ok(handle_delete_intent(address)?) diff --git a/zomes/rea_process/lib/src/lib.rs b/zomes/rea_process/lib/src/lib.rs index 855ff74be..9df372862 100644 --- a/zomes/rea_process/lib/src/lib.rs +++ b/zomes/rea_process/lib/src/lib.rs @@ -44,17 +44,17 @@ pub fn handle_update_process(entry_def_id: S, process: UpdateRequest) -> Reco construct_response(&identity_address, &revision_id, &entry, get_link_fields(&identity_address)?) } -pub fn handle_delete_process(_entry_def_id: S, revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_process(_entry_def_id: S, revision_id: HeaderHash) -> RecordAPIResult { // load the record to ensure it is of the correct type let (_base_address, _entry) = read_record_entry_by_header::(&revision_id)?; - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &ProcessAddress, revision_id: &RevisionHash, e: &EntryData, ( + address: &ProcessAddress, revision_id: &HeaderHash, e: &EntryData, ( inputs, outputs, unplanned_economic_events, committed_inputs, committed_outputs, diff --git a/zomes/rea_process/rpc/src/lib.rs b/zomes/rea_process/rpc/src/lib.rs index 251ba78f5..73f42bfd9 100644 --- a/zomes/rea_process/rpc/src/lib.rs +++ b/zomes/rea_process/rpc/src/lib.rs @@ -8,7 +8,7 @@ use serde_maybe_undefined::{ default_false, }; pub use vf_attributes_hdk::{ - RevisionHash, + HeaderHash, ProcessAddress, Timestamp, ExternalURL, @@ -27,7 +27,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProcessAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub has_beginning: Option, @@ -135,7 +135,7 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] @@ -161,7 +161,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_process/zome/src/lib.rs b/zomes/rea_process/zome/src/lib.rs index 0b383d277..d379f3df6 100644 --- a/zomes/rea_process/zome/src/lib.rs +++ b/zomes/rea_process/zome/src/lib.rs @@ -57,12 +57,7 @@ fn update_process(UpdateParams { process }: UpdateParams) -> ExternResult ExternResult { +fn delete_process(ByHeader { address }: ByHeader) -> ExternResult { Ok(handle_delete_process(PROCESS_ENTRY_TYPE, address)?) } diff --git a/zomes/rea_process_specification/lib/src/lib.rs b/zomes/rea_process_specification/lib/src/lib.rs index 9e742b658..dfeed4f3a 100644 --- a/zomes/rea_process_specification/lib/src/lib.rs +++ b/zomes/rea_process_specification/lib/src/lib.rs @@ -42,14 +42,14 @@ pub fn handle_update_process_specification(entry_def_id: S, process_specifica Ok(construct_response(&base_address, &revision_id, &new_entry)) } -pub fn handle_delete_process_specification(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_process_specification(revision_id: HeaderHash) -> RecordAPIResult { - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( - address: &ProcessSpecificationAddress, revision_id: &RevisionHash, e: &EntryData, + address: &ProcessSpecificationAddress, revision_id: &HeaderHash, e: &EntryData, ) -> ResponseData { ResponseData { process_specification: Response { diff --git a/zomes/rea_process_specification/rpc/src/lib.rs b/zomes/rea_process_specification/rpc/src/lib.rs index 114e4397c..cad3459a6 100644 --- a/zomes/rea_process_specification/rpc/src/lib.rs +++ b/zomes/rea_process_specification/rpc/src/lib.rs @@ -9,7 +9,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ProcessSpecificationAddress, }; @@ -32,7 +32,7 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProcessSpecificationAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, @@ -67,7 +67,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] @@ -75,7 +75,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_proposal/lib/src/lib.rs b/zomes/rea_proposal/lib/src/lib.rs index ee3df49fb..48a364773 100644 --- a/zomes/rea_proposal/lib/src/lib.rs +++ b/zomes/rea_proposal/lib/src/lib.rs @@ -43,14 +43,14 @@ pub fn handle_update_proposal(entry_def_id: S, proposal: UpdateRequest) -> Re Ok(construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?)) } -pub fn handle_delete_proposal(address: RevisionHash) -> RecordAPIResult { +pub fn handle_delete_proposal(address: HeaderHash) -> RecordAPIResult { delete_record::(&address) } /// Create response from input DHT primitives fn construct_response<'a>( address: &ProposalAddress, - revision_id: &RevisionHash, + revision_id: &HeaderHash, e: &EntryData, (publishes, published_to): ( Vec, diff --git a/zomes/rea_proposal/rpc/src/lib.rs b/zomes/rea_proposal/rpc/src/lib.rs index 11615b71f..9fd03bff8 100644 --- a/zomes/rea_proposal/rpc/src/lib.rs +++ b/zomes/rea_proposal/rpc/src/lib.rs @@ -9,7 +9,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ProposalAddress, ProposedIntentAddress, ProposedToAddress, Timestamp, }; @@ -33,7 +33,7 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProposalAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: Option, #[serde(skip_serializing_if = "Option::is_none")] pub has_beginning: Option, @@ -100,7 +100,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] @@ -116,7 +116,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_proposed_intent/lib/src/lib.rs b/zomes/rea_proposed_intent/lib/src/lib.rs index 3f1ab41b6..7dd6c759a 100644 --- a/zomes/rea_proposed_intent/lib/src/lib.rs +++ b/zomes/rea_proposed_intent/lib/src/lib.rs @@ -40,7 +40,7 @@ pub fn handle_get_proposed_intent(entry_def_id: S, address: ProposedIntentAdd Ok(construct_response(&base_address, &revision, &entry)) } -pub fn handle_delete_proposed_intent(revision_id: &RevisionHash) -> RecordAPIResult +pub fn handle_delete_proposed_intent(revision_id: &HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -49,7 +49,7 @@ pub fn handle_delete_proposed_intent(revision_id: &RevisionHash) -> RecordAPIRes update_index!(Local(proposed_intent.published_in.not(&vec![entry.published_in]), proposal.publishes(&base_address)))?; // manage record deletion - let res = delete_record::(&revision_id); + let res = delete_record::(&revision_id); // Update in associated foreign DNAs as well. // :TODO: In this pattern, foreign cells can also intervene in record deletion, and cause rollback. @@ -60,7 +60,7 @@ pub fn handle_delete_proposed_intent(revision_id: &RevisionHash) -> RecordAPIRes } /// Create response from input DHT primitives -fn construct_response<'a>(address: &ProposedIntentAddress, revision_id: &RevisionHash, e: &EntryData) -> ResponseData { +fn construct_response<'a>(address: &ProposedIntentAddress, revision_id: &HeaderHash, e: &EntryData) -> ResponseData { ResponseData { proposed_intent: Response { // entry fields diff --git a/zomes/rea_proposed_intent/rpc/src/lib.rs b/zomes/rea_proposed_intent/rpc/src/lib.rs index 44b94c474..ae4692736 100644 --- a/zomes/rea_proposed_intent/rpc/src/lib.rs +++ b/zomes/rea_proposed_intent/rpc/src/lib.rs @@ -8,7 +8,7 @@ */ use holochain_serialized_bytes::prelude::*; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ProposedIntentAddress, IntentAddress, ProposalAddress, }; @@ -19,7 +19,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProposedIntentAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub reciprocal: bool, pub published_in: ProposalAddress, pub publishes: IntentAddress, diff --git a/zomes/rea_proposed_to/lib/src/lib.rs b/zomes/rea_proposed_to/lib/src/lib.rs index 5aebf17bf..3ba550f4f 100644 --- a/zomes/rea_proposed_to/lib/src/lib.rs +++ b/zomes/rea_proposed_to/lib/src/lib.rs @@ -41,7 +41,7 @@ pub fn handle_get_proposed_to(entry_def_id: S, address: ProposedToAddress) -> Ok(construct_response(&base_address, &revision, &entry)) } -pub fn handle_delete_proposed_to(revision_id: &RevisionHash) -> RecordAPIResult +pub fn handle_delete_proposed_to(revision_id: &HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -51,7 +51,7 @@ pub fn handle_delete_proposed_to(revision_id: &RevisionHash) -> RecordAPIResult< } /// Create response from input DHT primitives -fn construct_response<'a>(address: &ProposedToAddress, revision_id: &RevisionHash, e: &EntryData) -> ResponseData { +fn construct_response<'a>(address: &ProposedToAddress, revision_id: &HeaderHash, e: &EntryData) -> ResponseData { ResponseData { proposed_to: Response { id: address.to_owned(), diff --git a/zomes/rea_proposed_to/rpc/src/lib.rs b/zomes/rea_proposed_to/rpc/src/lib.rs index 8d49f1c62..bfb504b84 100644 --- a/zomes/rea_proposed_to/rpc/src/lib.rs +++ b/zomes/rea_proposed_to/rpc/src/lib.rs @@ -8,7 +8,7 @@ */ use holochain_serialized_bytes::prelude::*; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ProposedToAddress, AgentAddress, ProposalAddress, }; @@ -27,7 +27,7 @@ pub struct CreateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ProposedToAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub proposed_to: AgentAddress, pub proposed: ProposalAddress, } diff --git a/zomes/rea_resource_specification/lib/src/lib.rs b/zomes/rea_resource_specification/lib/src/lib.rs index 9fa763523..0698634b6 100644 --- a/zomes/rea_resource_specification/lib/src/lib.rs +++ b/zomes/rea_resource_specification/lib/src/lib.rs @@ -46,15 +46,15 @@ pub fn handle_update_resource_specification(entry_def_id: S, resource_specifi Ok(construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?)) } -pub fn handle_delete_resource_specification(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_resource_specification(revision_id: HeaderHash) -> RecordAPIResult { - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives fn construct_response<'a>( address: &ResourceSpecificationAddress, - revision_id: &RevisionHash, + revision_id: &HeaderHash, e: &EntryData, // :TODO: link conforming resources in associated link registry DNA module ( diff --git a/zomes/rea_resource_specification/rpc/src/lib.rs b/zomes/rea_resource_specification/rpc/src/lib.rs index 042392406..2da8e8b38 100644 --- a/zomes/rea_resource_specification/rpc/src/lib.rs +++ b/zomes/rea_resource_specification/rpc/src/lib.rs @@ -9,7 +9,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ - RevisionHash, ByAddress, ByHeader, + HeaderHash, ByAddress, ByHeader, ResourceSpecificationAddress, EconomicResourceAddress, ExternalURL, @@ -36,7 +36,7 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: ResourceSpecificationAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub image: Option, @@ -83,7 +83,7 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] pub name: MaybeUndefined, #[serde(default)] @@ -95,7 +95,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_satisfaction/lib/src/lib.rs b/zomes/rea_satisfaction/lib/src/lib.rs index b44cb50f4..dd0cdfdec 100644 --- a/zomes/rea_satisfaction/lib/src/lib.rs +++ b/zomes/rea_satisfaction/lib/src/lib.rs @@ -10,12 +10,12 @@ * @package Holo-REA */ use hdk_records::RecordAPIResult; -use vf_attributes_hdk::{RevisionHash, SatisfactionAddress}; +use vf_attributes_hdk::{HeaderHash, SatisfactionAddress}; use hc_zome_rea_satisfaction_storage::EntryData; use hc_zome_rea_satisfaction_rpc::*; /// Create response from input DHT primitives -pub fn construct_response(address: &SatisfactionAddress, revision_id: &RevisionHash, e: &EntryData) -> RecordAPIResult { +pub fn construct_response(address: &SatisfactionAddress, revision_id: &HeaderHash, e: &EntryData) -> RecordAPIResult { Ok(ResponseData { satisfaction: Response { id: address.to_owned().into(), diff --git a/zomes/rea_satisfaction/lib_destination/src/lib.rs b/zomes/rea_satisfaction/lib_destination/src/lib.rs index 89d033ca7..53253f5ff 100644 --- a/zomes/rea_satisfaction/lib_destination/src/lib.rs +++ b/zomes/rea_satisfaction/lib_destination/src/lib.rs @@ -63,7 +63,7 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_satisfaction(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult { // read any referencing indexes let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -71,7 +71,7 @@ pub fn handle_delete_satisfaction(revision_id: RevisionHash) -> RecordAPIResult< // handle link fields update_index!(Local(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), economic_event.satisfies(&base_address)))?; - delete_record::(&revision_id) + delete_record::(&revision_id) } /// Properties accessor for zome config. diff --git a/zomes/rea_satisfaction/lib_origin/src/lib.rs b/zomes/rea_satisfaction/lib_origin/src/lib.rs index b3b6ba184..3771ab33e 100644 --- a/zomes/rea_satisfaction/lib_origin/src/lib.rs +++ b/zomes/rea_satisfaction/lib_origin/src/lib.rs @@ -139,7 +139,7 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques construct_response(&base_address, &revision_id, &new_entry) } -pub fn handle_delete_satisfaction(revision_id: RevisionHash) -> RecordAPIResult +pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult { let (base_address, entry) = read_record_entry_by_header::(&revision_id)?; @@ -160,7 +160,7 @@ pub fn handle_delete_satisfaction(revision_id: RevisionHash) -> RecordAPIResult< )?; } - delete_record::(&revision_id) + delete_record::(&revision_id) } fn is_satisfiedby_local_commitment(event_or_commitment: &EventOrCommitmentAddress) -> RecordAPIResult { diff --git a/zomes/rea_satisfaction/rpc/src/lib.rs b/zomes/rea_satisfaction/rpc/src/lib.rs index 602a5ece8..d40347342 100644 --- a/zomes/rea_satisfaction/rpc/src/lib.rs +++ b/zomes/rea_satisfaction/rpc/src/lib.rs @@ -11,7 +11,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined}; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - RevisionHash, ByHeader, ByAddress, + HeaderHash, ByHeader, ByAddress, SatisfactionAddress, EventOrCommitmentAddress, EconomicEventAddress, @@ -39,7 +39,7 @@ pub struct UpdateParams { #[serde(rename_all = "camelCase")] pub struct Response { pub id: SatisfactionAddress, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub satisfied_by: EventOrCommitmentAddress, pub satisfies: IntentAddress, #[serde(skip_serializing_if = "Option::is_none")] @@ -99,7 +99,7 @@ impl<'a> CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, #[serde(default)] #[serde(skip_serializing_if = "MaybeUndefined::is_undefined")] pub satisfied_by: MaybeUndefined, // note this setup allows None to be passed but `update_with` ignores it @@ -118,7 +118,7 @@ pub struct UpdateRequest { } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } diff --git a/zomes/rea_unit/lib/src/lib.rs b/zomes/rea_unit/lib/src/lib.rs index 87510a772..64af16d12 100644 --- a/zomes/rea_unit/lib/src/lib.rs +++ b/zomes/rea_unit/lib/src/lib.rs @@ -47,12 +47,12 @@ pub fn handle_update_unit(entry_def_id: S, unit: UpdateRequest) -> RecordAPIR Ok(construct_response(&new_id, &new_revision, &new_entry)) } -pub fn handle_delete_unit(revision_id: RevisionHash) -> RecordAPIResult { - delete_anchored_record::(&revision_id) +pub fn handle_delete_unit(revision_id: HeaderHash) -> RecordAPIResult { + delete_anchored_record::(&revision_id) } fn construct_response<'a>( - id: &UnitId, revision_id: &RevisionHash, e: &EntryData + id: &UnitId, revision_id: &HeaderHash, e: &EntryData ) -> ResponseData { ResponseData { unit: Response { diff --git a/zomes/rea_unit/rpc/src/lib.rs b/zomes/rea_unit/rpc/src/lib.rs index 7b46b87de..738f3286f 100644 --- a/zomes/rea_unit/rpc/src/lib.rs +++ b/zomes/rea_unit/rpc/src/lib.rs @@ -17,7 +17,7 @@ use hdk_records::{ // Export external type interface to allow consuming zomes to easily import & define zome API pub use vf_attributes_hdk::{ - RevisionHash, + HeaderHash, UnitId, }; @@ -27,7 +27,7 @@ pub use vf_attributes_hdk::{ #[serde(rename_all = "camelCase")] pub struct Response { pub id: UnitId, - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub label: String, pub symbol: String, } @@ -72,13 +72,13 @@ impl UniquelyIdentifiable for CreateRequest { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] #[serde(rename_all = "camelCase")] pub struct UpdateRequest { - pub revision_id: RevisionHash, + pub revision_id: HeaderHash, pub label: MaybeUndefined, pub symbol: MaybeUndefined, } impl<'a> UpdateRequest { - pub fn get_revision_id(&'a self) -> &RevisionHash { + pub fn get_revision_id(&'a self) -> &HeaderHash { &self.revision_id } From 6df2251833d8b845b00f74cf80fe2f491cb9ddd2 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 10:20:55 +1000 Subject: [PATCH 006/181] fix generics to match new method signatures, missing ByHeader imports in RPC crates --- lib/hdk_semantic_indexes/zome_derive/src/lib.rs | 2 +- zomes/rea_agreement/lib/src/lib.rs | 2 +- zomes/rea_commitment/lib/src/lib.rs | 2 +- zomes/rea_commitment/rpc/src/lib.rs | 2 +- zomes/rea_economic_event/lib/src/lib.rs | 2 +- zomes/rea_economic_resource/lib/src/lib.rs | 8 ++++---- zomes/rea_fulfillment/lib_destination/src/lib.rs | 2 +- zomes/rea_fulfillment/lib_origin/src/lib.rs | 2 +- zomes/rea_intent/lib/src/lib.rs | 2 +- zomes/rea_intent/rpc/src/lib.rs | 2 +- zomes/rea_process/lib/src/lib.rs | 2 +- zomes/rea_process/rpc/src/lib.rs | 2 +- zomes/rea_process_specification/lib/src/lib.rs | 2 +- zomes/rea_proposal/lib/src/lib.rs | 4 ++-- zomes/rea_proposed_intent/lib/src/lib.rs | 2 +- zomes/rea_proposed_to/lib/src/lib.rs | 4 ++-- zomes/rea_resource_specification/lib/src/lib.rs | 2 +- zomes/rea_satisfaction/lib_destination/src/lib.rs | 2 +- zomes/rea_satisfaction/lib_origin/src/lib.rs | 2 +- 19 files changed, 24 insertions(+), 24 deletions(-) diff --git a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs index ad9ce2e6a..0e92b4ed8 100644 --- a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs @@ -147,7 +147,7 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { quote! { match ¶ms.#query_field_ident { Some(#query_field_ident) => { - entries_result = query_index::( + entries_result = query_index::( &stringify!(#related_record_type_str_attribute), #query_field_ident, &stringify!(#reciprocal_index_name), diff --git a/zomes/rea_agreement/lib/src/lib.rs b/zomes/rea_agreement/lib/src/lib.rs index 686096b4b..766c30b76 100644 --- a/zomes/rea_agreement/lib/src/lib.rs +++ b/zomes/rea_agreement/lib/src/lib.rs @@ -33,7 +33,7 @@ pub fn handle_create_agreement(entry_def_id: S, agreement: CreateRequest) -> pub fn handle_get_agreement(entry_def_id: S, address: AgreementAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, revision, &entry, get_link_fields(&base_address)?) } diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index 46fc8b876..872e3e821 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -48,7 +48,7 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - pub fn handle_get_commitment(entry_def_id: S, address: CommitmentAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } diff --git a/zomes/rea_commitment/rpc/src/lib.rs b/zomes/rea_commitment/rpc/src/lib.rs index 03f766c30..ce7c6ca9a 100644 --- a/zomes/rea_commitment/rpc/src/lib.rs +++ b/zomes/rea_commitment/rpc/src/lib.rs @@ -11,7 +11,7 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::{MaybeUndefined, default_false}; use vf_measurement::QuantityValue; pub use vf_attributes_hdk::{ - HeaderHash, + HeaderHash, ByHeader, ActionId, DateTime, FixedOffset, ExternalURL, diff --git a/zomes/rea_economic_event/lib/src/lib.rs b/zomes/rea_economic_event/lib/src/lib.rs index c17d074d2..ac779e64d 100644 --- a/zomes/rea_economic_event/lib/src/lib.rs +++ b/zomes/rea_economic_event/lib/src/lib.rs @@ -113,7 +113,7 @@ impl API for EconomicEventZomePermissableDefault { } fn get_economic_event(entry_def_id: Self::S, address: EconomicEventAddress) -> RecordAPIResult { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } diff --git a/zomes/rea_economic_resource/lib/src/lib.rs b/zomes/rea_economic_resource/lib/src/lib.rs index 3c2ba6477..14052563a 100644 --- a/zomes/rea_economic_resource/lib/src/lib.rs +++ b/zomes/rea_economic_resource/lib/src/lib.rs @@ -98,7 +98,7 @@ impl API for EconomicResourceZomePermissableDefault { fn get_economic_resource(entry_def_id: Self::S, event_entry_def_id: Self::S, process_entry_def_id: Self::S, address: EconomicResourceAddress) -> RecordAPIResult { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&event_entry_def_id, &process_entry_def_id, &address)?) } @@ -311,7 +311,7 @@ fn get_resource_state(event_entry_def_id: S, resource: &EconomicResourceAddre return result; } - let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); + let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); match evt { Err(_) => result, // :TODO: this indicates some data integrity error Ok((_, _, entry)) => { @@ -340,14 +340,14 @@ fn get_resource_stage(event_entry_def_id: S, process_entry_def_id: S, resourc return result; } - let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); + let evt = read_record_entry::(&event_entry_def_id, event.as_ref()); match evt { Err(_) => result, // :TODO: this indicates some data integrity error Ok((_, _, entry)) => { match &entry.output_of { Some(output_of) => { // get the associated process - let maybe_process_entry = read_record_entry::(&process_entry_def_id, output_of.as_ref()); + let maybe_process_entry = read_record_entry::(&process_entry_def_id, output_of.as_ref()); // check to see if it has an associated specification match &maybe_process_entry { Ok((_,_, process_entry)) => match &process_entry.based_on { diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index 1134ba8d3..e6cbdaa66 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -42,7 +42,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) pub fn handle_get_fulfillment(entry_def_id: S, address: FulfillmentAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } diff --git a/zomes/rea_fulfillment/lib_origin/src/lib.rs b/zomes/rea_fulfillment/lib_origin/src/lib.rs index 33aa1c061..4b20eb318 100644 --- a/zomes/rea_fulfillment/lib_origin/src/lib.rs +++ b/zomes/rea_fulfillment/lib_origin/src/lib.rs @@ -50,7 +50,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) pub fn handle_get_fulfillment(entry_def_id: S, address: FulfillmentAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } diff --git a/zomes/rea_intent/lib/src/lib.rs b/zomes/rea_intent/lib/src/lib.rs index 2c4a506da..f4d2493bb 100644 --- a/zomes/rea_intent/lib/src/lib.rs +++ b/zomes/rea_intent/lib/src/lib.rs @@ -46,7 +46,7 @@ pub fn handle_create_intent(entry_def_id: S, intent: CreateRequest) -> Record pub fn handle_get_intent(entry_def_id: S, address: IntentAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } diff --git a/zomes/rea_intent/rpc/src/lib.rs b/zomes/rea_intent/rpc/src/lib.rs index 30a7598d1..e897d6355 100644 --- a/zomes/rea_intent/rpc/src/lib.rs +++ b/zomes/rea_intent/rpc/src/lib.rs @@ -21,7 +21,7 @@ pub use vf_attributes_hdk::{ SatisfactionAddress, LocationAddress, ProposedIntentAddress, - HeaderHash, + HeaderHash, ByHeader, }; //---------------- EXTERNAL RECORD STRUCTURE ---------------- diff --git a/zomes/rea_process/lib/src/lib.rs b/zomes/rea_process/lib/src/lib.rs index 9df372862..9e7a62e6e 100644 --- a/zomes/rea_process/lib/src/lib.rs +++ b/zomes/rea_process/lib/src/lib.rs @@ -32,7 +32,7 @@ pub fn handle_create_process(entry_def_id: S, process: CreateRequest) -> Reco pub fn handle_get_process(entry_def_id: S, address: ProcessAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry, get_link_fields(&address)?) } diff --git a/zomes/rea_process/rpc/src/lib.rs b/zomes/rea_process/rpc/src/lib.rs index 73f42bfd9..68ecee09d 100644 --- a/zomes/rea_process/rpc/src/lib.rs +++ b/zomes/rea_process/rpc/src/lib.rs @@ -8,7 +8,7 @@ use serde_maybe_undefined::{ default_false, }; pub use vf_attributes_hdk::{ - HeaderHash, + HeaderHash, ByHeader, ProcessAddress, Timestamp, ExternalURL, diff --git a/zomes/rea_process_specification/lib/src/lib.rs b/zomes/rea_process_specification/lib/src/lib.rs index dfeed4f3a..f038dd6b5 100644 --- a/zomes/rea_process_specification/lib/src/lib.rs +++ b/zomes/rea_process_specification/lib/src/lib.rs @@ -30,7 +30,7 @@ pub fn handle_create_process_specification(entry_def_id: S, process_specifica pub fn handle_get_process_specification(entry_def_id: S, address: ProcessSpecificationAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry)) } diff --git a/zomes/rea_proposal/lib/src/lib.rs b/zomes/rea_proposal/lib/src/lib.rs index 48a364773..615b68494 100644 --- a/zomes/rea_proposal/lib/src/lib.rs +++ b/zomes/rea_proposal/lib/src/lib.rs @@ -31,7 +31,7 @@ pub fn handle_create_proposal(entry_def_id: S, proposal: CreateRequest) -> Re pub fn handle_get_proposal(entry_def_id: S, address: ProposalAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry, get_link_fields(&base_address)?)) } @@ -44,7 +44,7 @@ pub fn handle_update_proposal(entry_def_id: S, proposal: UpdateRequest) -> Re } pub fn handle_delete_proposal(address: HeaderHash) -> RecordAPIResult { - delete_record::(&address) + delete_record::(&address) } /// Create response from input DHT primitives diff --git a/zomes/rea_proposed_intent/lib/src/lib.rs b/zomes/rea_proposed_intent/lib/src/lib.rs index 7dd6c759a..f1d812688 100644 --- a/zomes/rea_proposed_intent/lib/src/lib.rs +++ b/zomes/rea_proposed_intent/lib/src/lib.rs @@ -36,7 +36,7 @@ pub fn handle_create_proposed_intent(entry_def_id: S, proposed_intent: Create pub fn handle_get_proposed_intent(entry_def_id: S, address: ProposedIntentAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry)) } diff --git a/zomes/rea_proposed_to/lib/src/lib.rs b/zomes/rea_proposed_to/lib/src/lib.rs index 3ba550f4f..5b34c39fa 100644 --- a/zomes/rea_proposed_to/lib/src/lib.rs +++ b/zomes/rea_proposed_to/lib/src/lib.rs @@ -37,7 +37,7 @@ pub fn handle_create_proposed_to(entry_def_id: S, proposed_to: CreateRequest) pub fn handle_get_proposed_to(entry_def_id: S, address: ProposedToAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&base_address, &revision, &entry)) } @@ -47,7 +47,7 @@ pub fn handle_delete_proposed_to(revision_id: &HeaderHash) -> RecordAPIResult(&revision_id) + delete_record::(&revision_id) } /// Create response from input DHT primitives diff --git a/zomes/rea_resource_specification/lib/src/lib.rs b/zomes/rea_resource_specification/lib/src/lib.rs index 0698634b6..a27fce341 100644 --- a/zomes/rea_resource_specification/lib/src/lib.rs +++ b/zomes/rea_resource_specification/lib/src/lib.rs @@ -34,7 +34,7 @@ pub fn handle_create_resource_specification(entry_def_id: S, resource_specifi pub fn handle_get_resource_specification(entry_def_id: S, address: ResourceSpecificationAddress) -> RecordAPIResult where S: AsRef, { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; Ok(construct_response(&address, &revision, &entry, get_link_fields(&base_address)?)) } diff --git a/zomes/rea_satisfaction/lib_destination/src/lib.rs b/zomes/rea_satisfaction/lib_destination/src/lib.rs index 53253f5ff..f50ed6fd2 100644 --- a/zomes/rea_satisfaction/lib_destination/src/lib.rs +++ b/zomes/rea_satisfaction/lib_destination/src/lib.rs @@ -42,7 +42,7 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques pub fn handle_get_satisfaction(entry_def_id: S, address: SatisfactionAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } diff --git a/zomes/rea_satisfaction/lib_origin/src/lib.rs b/zomes/rea_satisfaction/lib_origin/src/lib.rs index 3771ab33e..63758a23e 100644 --- a/zomes/rea_satisfaction/lib_origin/src/lib.rs +++ b/zomes/rea_satisfaction/lib_origin/src/lib.rs @@ -59,7 +59,7 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques pub fn handle_get_satisfaction(entry_def_id: S, address: SatisfactionAddress) -> RecordAPIResult where S: AsRef { - let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; + let (revision, base_address, entry) = read_record_entry::(&entry_def_id, address.as_ref())?; construct_response(&base_address, &revision, &entry) } From 58eb4a69a028f4adbcea3dcd8b91340851cc7db7 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 14:28:26 +1000 Subject: [PATCH 007/181] reimplement identity read/write helpers to directly store UUIDs in the DHT remove unneeded direct retrieval of identity EntryHash, now gets read from DnaAddressable --- lib/hdk_records/src/identity_helpers.rs | 70 ++++++------------------- 1 file changed, 17 insertions(+), 53 deletions(-) diff --git a/lib/hdk_records/src/identity_helpers.rs b/lib/hdk_records/src/identity_helpers.rs index 9405d5d3e..317c763e7 100644 --- a/lib/hdk_records/src/identity_helpers.rs +++ b/lib/hdk_records/src/identity_helpers.rs @@ -19,31 +19,13 @@ * @since 2019-05-16 */ use hdk::prelude::*; -use hdk::hash_path::path::Component; -use hdk_type_serialization_macros::{extern_id_to_bytes, bytes_to_extern_id, DnaAddressable}; +use hdk_type_serialization_macros::DnaAddressable; use crate::{ RecordAPIResult, DataIntegrityError, - link_helpers::get_linked_addresses, entry_helpers::get_entry_by_address, }; -/// Represent `key index` record identities using native Holochain `Path` construct -/// -/// :TODO: sharding strategy for `c2` -/// -fn identity_path_for( - entry_type_root_path: S, - base_address: &A, -) -> Path - where S: AsRef, - A: DnaAddressable, -{ - let type_root = entry_type_root_path.as_ref().as_bytes().to_vec(); - - Path::from(vec![type_root.into(), extern_id_to_bytes::(base_address).into()]) -} - /// Determine root `Path` for an entry type, can be used to anchor type-specific indexes & queries. /// pub (crate) fn entry_type_root_path( @@ -71,59 +53,41 @@ pub fn calculate_identity_address( } /// Given an identity `EntryHash` (ie. the result of `create_entry_identity`), -/// query the underlying address for the progenitor Entry of the record. +/// query the `DnaHash` and `AnyDhtHash` of the record. /// -pub (crate) fn read_entry_identity( - identity_path_address: &EntryHash, -) -> RecordAPIResult -{ - let mut addrs = get_linked_addresses(identity_path_address, LinkTag::new(crate::identifiers::RECORD_INITIAL_ENTRY_LINK_TAG))?; - let entry_hash = addrs.pop().ok_or(DataIntegrityError::IndexNotFound((*identity_path_address).clone()))?; - - Ok(entry_hash) -} - -/// Given an identity `EntryHash` (ie. the result of `create_entry_identity`), -/// query the `DnaHash` and `AnyDhtHash` of the record by inspecting the associated `Path` entry. -/// -/// :WARNING: if sharding is introduced, this will cause runtime failures until changed -/// -pub fn read_entry_identity_full( +pub fn read_entry_identity( identity_path_address: &EntryHash, ) -> RecordAPIResult where A: DnaAddressable, + SerializedBytes: TryInto, { - let index_path: Path = get_entry_by_address(&identity_path_address)?; - let components: &Vec = index_path.as_ref(); - let compound_key = components.last(); - - // ensure that a path component exists - if None == compound_key { return Err(DataIntegrityError::CorruptIndexError(identity_path_address.clone(), None)); } + let identifier = get_entry_by_address(identity_path_address); - // ensure final addressing path component length - let key_bytes = compound_key.unwrap().as_ref(); - match bytes_to_extern_id(key_bytes) { - Err(_) => Err(DataIntegrityError::CorruptIndexError(identity_path_address.clone(), Some(key_bytes.to_vec()))), + // throw meaningful error if reference is invalid + match identifier { + Err(_) => Err(DataIntegrityError::CorruptIndexError(identity_path_address.clone(), None)), Ok(identity) => Ok(identity), } } //-------------------------------[ CREATE ]------------------------------------- -/// Creates a `Path` to initialise a unique index for a new entry, and returns -/// the `EntryHash` of the new `Path`. +/// Creates a pointer to initialise a universally-unique ID for a new entry, and returns +/// the `EntryHash` of the stored identifier. /// -/// This `Path` is intended to be used as an anchor to base links to/from the +/// This identifier is intended to be used as an anchor to base links to/from the /// entry onto. /// -pub fn create_entry_identity( +pub fn create_entry_identity( entry_type_root_path: S, initial_address: &A, ) -> RecordAPIResult where S: AsRef, A: DnaAddressable, + CreateInput: TryFrom, + Entry: TryFrom, + WasmError: From, { - let path = identity_path_for(entry_type_root_path, initial_address); - path.ensure()?; - Ok(path.path_entry_hash()?) + create_entry(initial_address.to_owned())?; + calculate_identity_address(entry_type_root_path, initial_address) } From 74ce274fb5bdb764daa1a0e09f4ebb8ae3f483c7 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 14:29:06 +1000 Subject: [PATCH 008/181] adjust trait bounds for identity managing methods to reflect new storage requirements --- lib/hdk_records/src/anchored_record_helpers.rs | 3 ++- lib/hdk_records/src/local_index_helpers.rs | 2 +- lib/hdk_records/src/record_helpers.rs | 13 ++++++++----- lib/hdk_semantic_indexes/zome/src/lib.rs | 18 +++++++++++------- 4 files changed, 22 insertions(+), 14 deletions(-) diff --git a/lib/hdk_records/src/anchored_record_helpers.rs b/lib/hdk_records/src/anchored_record_helpers.rs index 4bf760a85..27ee2b145 100644 --- a/lib/hdk_records/src/anchored_record_helpers.rs +++ b/lib/hdk_records/src/anchored_record_helpers.rs @@ -117,7 +117,7 @@ pub fn read_anchored_record_entry( T: std::fmt::Debug, B: DnaAddressable, A: DnaIdentifiable, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, Entry: TryFrom, R: std::fmt::Debug + Identified, { @@ -145,6 +145,7 @@ pub fn create_anchored_record( I: Identifiable, WasmError: From, Entry: TryFrom + TryFrom, + CreateInput: TryFrom, R: Clone + Identified, { // determine unique anchor index key diff --git a/lib/hdk_records/src/local_index_helpers.rs b/lib/hdk_records/src/local_index_helpers.rs index f1ee2bf24..cf92eb634 100644 --- a/lib/hdk_records/src/local_index_helpers.rs +++ b/lib/hdk_records/src/local_index_helpers.rs @@ -33,7 +33,7 @@ pub fn query_root_index<'a, T, R, O, I: AsRef>( ) -> RecordAPIResult>> where T: std::fmt::Debug, O: DnaAddressable, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, Entry: TryFrom, R: std::fmt::Debug + Identified, { diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index a2342d729..fd9c5d8ad 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -90,14 +90,15 @@ pub (crate) fn read_record_entry_by_identity( ) -> RecordAPIResult<(HeaderHash, B, T)> where T: std::fmt::Debug, B: DnaAddressable, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, Entry: TryFrom, R: std::fmt::Debug + Identified, { // read active links to current version - let entry_hash = read_entry_identity(identity_address)?; + let identifier: B = read_entry_identity(identity_address)?; // pull details of the current version, to ensure we have the most recent - let latest_header_hash = get_latest_header_hash(entry_hash)?; + let entry_hash: &EntryHash = identifier.as_ref(); + let latest_header_hash = get_latest_header_hash(entry_hash.to_owned())?; let (read_entry_hash, entry_data) = read_record_entry_by_header(&latest_header_hash)?; @@ -117,7 +118,7 @@ pub fn read_record_entry( where S: AsRef, T: std::fmt::Debug, B: DnaAddressable, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, Entry: TryFrom + TryFrom, WasmError: From, R: std::fmt::Debug + Identified, @@ -140,7 +141,8 @@ pub fn create_record( C: Into, I: Identifiable, WasmError: From, - Entry: TryFrom, + Entry: TryFrom + TryFrom, + CreateInput: TryFrom, R: Identified, { // convert the type's CREATE payload into internal storage struct @@ -156,6 +158,7 @@ pub fn create_record( let identity_address = create_entry_identity(&entry_def_id, &identity)?; // link the identifier to the actual entry + // :TODO: this isn't needed for reading anymore, but might be worthwhile retaining for legibility in the DHT. Needs consideration as to DHT size bloat tradeoff. create_link(identity_address, entry_hash, LinkTag::new(crate::identifiers::RECORD_INITIAL_ENTRY_LINK_TAG))?; Ok((header_hash, identity, entry_data)) diff --git a/lib/hdk_semantic_indexes/zome/src/lib.rs b/lib/hdk_semantic_indexes/zome/src/lib.rs index 28d3301f5..e8606197f 100644 --- a/lib/hdk_semantic_indexes/zome/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome/src/lib.rs @@ -11,7 +11,7 @@ use hdk_records::{ identities::{ calculate_identity_address, create_entry_identity, - read_entry_identity_full, + read_entry_identity, }, links::{get_linked_addresses, get_linked_headers}, rpc::call_local_zome_method, @@ -45,14 +45,15 @@ pub fn read_index<'a, O, A, S, I, E>( I: AsRef, A: DnaAddressable, O: DnaAddressable, - Entry: TryFrom, + Entry: TryFrom + TryFrom, + SerializedBytes: TryInto, WasmError: From, { let index_address = calculate_identity_address(base_entry_type, base_address)?; let refd_index_addresses = get_linked_addresses(&index_address, LinkTag::new(link_tag.as_ref()))?; let (existing_link_results, read_errors): (Vec>, Vec>) = refd_index_addresses.iter() - .map(read_entry_identity_full) + .map(read_entry_identity) .partition(Result::is_ok); // :TODO: this might have some issues as it presumes integrity of the DHT; needs investigating @@ -83,7 +84,7 @@ pub fn query_index<'a, T, O, C, F, A, S, I, J, E>( O: DnaAddressable, T: serde::de::DeserializeOwned + std::fmt::Debug, C: std::fmt::Debug, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, F: Fn(C) -> Option, Entry: TryFrom, WasmError: From, @@ -112,7 +113,7 @@ fn retrieve_foreign_records<'a, T, B, C, F, S>( T: serde::de::DeserializeOwned + std::fmt::Debug, B: DnaAddressable, C: std::fmt::Debug, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, F: Fn(C) -> Option, { let read_single_record = retrieve_foreign_record::(zome_name_from_config, &method_name); @@ -130,11 +131,11 @@ fn retrieve_foreign_record<'a, T, B, C, F, S>( T: serde::de::DeserializeOwned + std::fmt::Debug, B: DnaAddressable, C: std::fmt::Debug, - SerializedBytes: TryInto, + SerializedBytes: TryInto + TryInto, F: Fn(C) -> Option, { move |addr| { - let address: B = read_entry_identity_full(addr)?; + let address: B = read_entry_identity(addr)?; let entry_res: T = call_local_zome_method(zome_name_from_config.to_owned(), method_name, ByAddress { address })?; Ok(entry_res) } @@ -164,6 +165,7 @@ pub fn sync_index( A: DnaAddressable, B: DnaAddressable, Entry: TryFrom + TryFrom, + CreateInput: TryFrom + TryFrom, WasmError: From, { // create any new indexes @@ -206,6 +208,7 @@ fn create_remote_index_destination( A: DnaAddressable, B: DnaAddressable, Entry: TryFrom + TryFrom, + CreateInput: TryFrom + TryFrom, WasmError: From, { // create a base entry pointer for the referenced origin record @@ -230,6 +233,7 @@ fn create_dest_identities_and_indexes<'a, A, B, S, I, E>( A: DnaAddressable, B: 'a + DnaAddressable, Entry: TryFrom + TryFrom, + CreateInput: TryFrom + TryFrom, WasmError: From, { let base_method = create_dest_indexes(source_entry_type, source, dest_entry_type, link_tag, link_tag_reciprocal); From 3981d6d28488169b1e1f87709e6e125f70f05769 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 17:24:08 +1000 Subject: [PATCH 009/181] define identifier EntryDefs using verbose macro as substitution for proc macro attributes in macros-by-example does not work --- lib/hdk_type_serialization_macros/src/lib.rs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/lib/hdk_type_serialization_macros/src/lib.rs b/lib/hdk_type_serialization_macros/src/lib.rs index ff7208a03..d135e475e 100644 --- a/lib/hdk_type_serialization_macros/src/lib.rs +++ b/lib/hdk_type_serialization_macros/src/lib.rs @@ -51,11 +51,19 @@ pub trait DnaAddressable #[macro_export] macro_rules! addressable_identifier { ($r:ident => $base:ty) => { - // externally facing type, with DnaHash of cell for context - #[hdk_entry(id="$r")] - #[derive(Clone, PartialEq, Eq, Hash)] + // externally facing type, with DnaHash of cell for universally-unique context + #[derive(Serialize, Deserialize, SerializedBytes, Debug, Clone, PartialEq, Eq, Hash)] pub struct $r(pub DnaHash, pub $base); + // define as an EntryDef so identifiers can be stored directly to the DHT as indexing anchors + entry_def!($r EntryDef { + id: stringify!($r).into(), + crdt_type: CrdtType, + required_validations: RequiredValidations::default(), + visibility: EntryVisibility::Public, + required_validation_type: RequiredValidationType::default(), + }); + // constructor impl $crate::DnaAddressable<$base> for $r { fn new(dna: DnaHash, identifier: $base) -> Self { From 8963472e7a5d451a084547fd79b91549ede02573 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 17:47:29 +1000 Subject: [PATCH 010/181] fix EntryDefs for index pointers not being defined, and use of entry_defs overriding already-registered defs in indexing zomes --- lib/hdk_semantic_indexes/zome_derive/src/lib.rs | 3 --- zomes/rea_agreement/zome/src/lib.rs | 1 + zomes/rea_commitment/zome/src/lib.rs | 1 + zomes/rea_economic_event/zome/src/lib.rs | 1 + zomes/rea_economic_resource/zome/src/lib.rs | 1 + zomes/rea_fulfillment/zome_observation/src/lib.rs | 1 + zomes/rea_intent/zome/src/lib.rs | 1 + zomes/rea_process/zome/src/lib.rs | 1 + zomes/rea_process_specification/zome/src/lib.rs | 1 + zomes/rea_proposal/zome/src/lib.rs | 1 + zomes/rea_proposed_intent/zome/src/lib.rs | 1 + zomes/rea_proposed_to/zome/src/lib.rs | 1 + zomes/rea_resource_specification/zome/src/lib.rs | 1 + zomes/rea_satisfaction/zome_observation/src/lib.rs | 1 + zomes/rea_satisfaction/zome_planning/src/lib.rs | 1 + 15 files changed, 14 insertions(+), 3 deletions(-) diff --git a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs index 0e92b4ed8..c871ff7c3 100644 --- a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs @@ -164,9 +164,6 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { use hdk::prelude::*; use hdk_semantic_indexes_zome_lib::*; - // unrelated toplevel zome boilerplate - entry_defs![PathEntry::entry_def()]; - // :TODO: obviate this with zome-specific configs #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct DnaConfigSlice { diff --git a/zomes/rea_agreement/zome/src/lib.rs b/zomes/rea_agreement/zome/src/lib.rs index 5d99459d0..2ff3ee437 100644 --- a/zomes/rea_agreement/zome/src/lib.rs +++ b/zomes/rea_agreement/zome/src/lib.rs @@ -16,6 +16,7 @@ use hc_zome_rea_agreement_lib::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + AgreementAddress::entry_def(), EntryDef { id: AGREEMENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_commitment/zome/src/lib.rs b/zomes/rea_commitment/zome/src/lib.rs index e6059f35f..2aeca4c0e 100644 --- a/zomes/rea_commitment/zome/src/lib.rs +++ b/zomes/rea_commitment/zome/src/lib.rs @@ -43,6 +43,7 @@ fn validate(validation_data: ValidateData) -> ExternResult ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + CommitmentAddress::entry_def(), EntryDef { id: CAP_STORAGE_ENTRY_DEF_ID.into(), visibility: EntryVisibility::Private, diff --git a/zomes/rea_economic_event/zome/src/lib.rs b/zomes/rea_economic_event/zome/src/lib.rs index 519a40d9f..c4641db2e 100644 --- a/zomes/rea_economic_event/zome/src/lib.rs +++ b/zomes/rea_economic_event/zome/src/lib.rs @@ -18,6 +18,7 @@ use hc_zome_rea_process_storage_consts::PROCESS_ENTRY_TYPE; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + EconomicEventAddress::entry_def(), EntryDef { id: EVENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_economic_resource/zome/src/lib.rs b/zomes/rea_economic_resource/zome/src/lib.rs index b1fca882c..cf6b1cb31 100644 --- a/zomes/rea_economic_resource/zome/src/lib.rs +++ b/zomes/rea_economic_resource/zome/src/lib.rs @@ -18,6 +18,7 @@ use hc_zome_rea_economic_resource_storage::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + EconomicResourceAddress::entry_def(), EntryDef { id: CAP_STORAGE_ENTRY_DEF_ID.into(), visibility: EntryVisibility::Private, diff --git a/zomes/rea_fulfillment/zome_observation/src/lib.rs b/zomes/rea_fulfillment/zome_observation/src/lib.rs index 9d849050f..438ff19f9 100644 --- a/zomes/rea_fulfillment/zome_observation/src/lib.rs +++ b/zomes/rea_fulfillment/zome_observation/src/lib.rs @@ -19,6 +19,7 @@ use hc_zome_rea_fulfillment_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + FulfillmentAddress::entry_def(), EntryDef { id: FULFILLMENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_intent/zome/src/lib.rs b/zomes/rea_intent/zome/src/lib.rs index afa05280e..68367f915 100644 --- a/zomes/rea_intent/zome/src/lib.rs +++ b/zomes/rea_intent/zome/src/lib.rs @@ -39,6 +39,7 @@ fn validate(validation_data: ValidateData) -> ExternResult ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + IntentAddress::entry_def(), EntryDef { id: CAP_STORAGE_ENTRY_DEF_ID.into(), visibility: EntryVisibility::Private, diff --git a/zomes/rea_process/zome/src/lib.rs b/zomes/rea_process/zome/src/lib.rs index d379f3df6..9e752b0b3 100644 --- a/zomes/rea_process/zome/src/lib.rs +++ b/zomes/rea_process/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_process_rpc::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProcessAddress::entry_def(), EntryDef { id: PROCESS_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_process_specification/zome/src/lib.rs b/zomes/rea_process_specification/zome/src/lib.rs index 666465880..d519e153b 100644 --- a/zomes/rea_process_specification/zome/src/lib.rs +++ b/zomes/rea_process_specification/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_process_specification_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProcessSpecificationAddress::entry_def(), EntryDef { id: PROCESS_SPECIFICATION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_proposal/zome/src/lib.rs b/zomes/rea_proposal/zome/src/lib.rs index 825a77388..f355d01ab 100644 --- a/zomes/rea_proposal/zome/src/lib.rs +++ b/zomes/rea_proposal/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_proposal_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProposalAddress::entry_def(), EntryDef { id: PROPOSAL_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_proposed_intent/zome/src/lib.rs b/zomes/rea_proposed_intent/zome/src/lib.rs index b9e64a699..276388401 100644 --- a/zomes/rea_proposed_intent/zome/src/lib.rs +++ b/zomes/rea_proposed_intent/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_proposed_intent_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProposedIntentAddress::entry_def(), EntryDef { id: PROPOSED_INTENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_proposed_to/zome/src/lib.rs b/zomes/rea_proposed_to/zome/src/lib.rs index 89f2373e1..4f4ddf19e 100644 --- a/zomes/rea_proposed_to/zome/src/lib.rs +++ b/zomes/rea_proposed_to/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_proposed_to_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProposedToAddress::entry_def(), EntryDef { id: PROPOSED_TO_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_resource_specification/zome/src/lib.rs b/zomes/rea_resource_specification/zome/src/lib.rs index f5ea89ce9..1b56a63dc 100644 --- a/zomes/rea_resource_specification/zome/src/lib.rs +++ b/zomes/rea_resource_specification/zome/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_resource_specification_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ResourceSpecificationAddress::entry_def(), EntryDef { id: ECONOMIC_RESOURCE_SPECIFICATION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_satisfaction/zome_observation/src/lib.rs b/zomes/rea_satisfaction/zome_observation/src/lib.rs index cd72e57c0..bdf55a5ff 100644 --- a/zomes/rea_satisfaction/zome_observation/src/lib.rs +++ b/zomes/rea_satisfaction/zome_observation/src/lib.rs @@ -19,6 +19,7 @@ use hc_zome_rea_satisfaction_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + SatisfactionAddress::entry_def(), EntryDef { id: SATISFACTION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_satisfaction/zome_planning/src/lib.rs b/zomes/rea_satisfaction/zome_planning/src/lib.rs index e4d354f6e..747cb2f8b 100644 --- a/zomes/rea_satisfaction/zome_planning/src/lib.rs +++ b/zomes/rea_satisfaction/zome_planning/src/lib.rs @@ -17,6 +17,7 @@ use hc_zome_rea_satisfaction_storage_consts::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + SatisfactionAddress::entry_def(), EntryDef { id: SATISFACTION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, From 3901a70bc2c3194d86e39c74224c6adbfcebe6d0 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 19:30:36 +1000 Subject: [PATCH 011/181] reimplement temporary 'all' index logic to use non-Path-based indexing structure --- lib/hdk_records/src/identity_helpers.rs | 21 ++++++++++++++++++--- lib/hdk_records/src/lib.rs | 2 ++ lib/hdk_records/src/local_index_helpers.rs | 8 ++++++-- 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/lib/hdk_records/src/identity_helpers.rs b/lib/hdk_records/src/identity_helpers.rs index 317c763e7..da9afbe03 100644 --- a/lib/hdk_records/src/identity_helpers.rs +++ b/lib/hdk_records/src/identity_helpers.rs @@ -49,7 +49,7 @@ pub fn calculate_identity_address( Entry: TryFrom, WasmError: From, { - Ok(hash_entry(base_address.clone())?) + Ok(hash_entry(base_address.to_owned())?) } /// Given an identity `EntryHash` (ie. the result of `create_entry_identity`), @@ -78,8 +78,12 @@ pub fn read_entry_identity( /// This identifier is intended to be used as an anchor to base links to/from the /// entry onto. /// +/// Also links the identifier to a global index for all entries of the given `entry_type`. +/// :TODO: replace this linkage with date-ordered sparse index based on record creation time +/// @see query_root_index() +/// pub fn create_entry_identity( - entry_type_root_path: S, + entry_type: S, initial_address: &A, ) -> RecordAPIResult where S: AsRef, @@ -89,5 +93,16 @@ pub fn create_entry_identity( WasmError: From, { create_entry(initial_address.to_owned())?; - calculate_identity_address(entry_type_root_path, initial_address) + + let id_hash = calculate_identity_address(&entry_type, initial_address)?; + + let index_path = entry_type_root_path(&entry_type); + index_path.ensure()?; + create_link( + index_path.path_entry_hash()?, + id_hash.to_owned(), + LinkTag::new(crate::identifiers::RECORD_GLOBAL_INDEX_LINK_TAG), + )?; + + Ok(id_hash) } diff --git a/lib/hdk_records/src/lib.rs b/lib/hdk_records/src/lib.rs index 74a1e99ed..329f1dade 100644 --- a/lib/hdk_records/src/lib.rs +++ b/lib/hdk_records/src/lib.rs @@ -108,4 +108,6 @@ pub mod identifiers { // Holochain DHT storage type IDs pub const RECORD_INITIAL_ENTRY_LINK_TAG: &'static [u8] = b"initial_entry"; pub const RECORD_IDENTITY_ANCHOR_LINK_TAG: &'static [u8] = b"identity_anchor"; + // temporary: @see query_root_index() + pub const RECORD_GLOBAL_INDEX_LINK_TAG: &'static [u8] = b"all_entries"; } diff --git a/lib/hdk_records/src/local_index_helpers.rs b/lib/hdk_records/src/local_index_helpers.rs index cf92eb634..895ce5d01 100644 --- a/lib/hdk_records/src/local_index_helpers.rs +++ b/lib/hdk_records/src/local_index_helpers.rs @@ -26,7 +26,7 @@ use crate::{ /// Given a type of entry, returns a Vec of *all* records of that entry registered /// internally with the DHT. /// -/// :TODO: sharding strategy for 2-nth order link destinations +/// :TODO: replace with date-ordered sparse index based on record creation time /// pub fn query_root_index<'a, T, R, O, I: AsRef>( base_entry_type: &I, @@ -38,7 +38,11 @@ pub fn query_root_index<'a, T, R, O, I: AsRef>( R: std::fmt::Debug + Identified, { let index_path = entry_type_root_path(base_entry_type); - let linked_records: Vec = index_path.children()?.into(); + + let linked_records: Vec = get_links( + index_path.path_entry_hash()?, + Some(LinkTag::new(crate::identifiers::RECORD_GLOBAL_INDEX_LINK_TAG)), + )?; Ok(linked_records.iter() .map(|link| { read_record_entry_by_identity(&link.target) }) From 6addbf54ab0189506ab92943a4f41844300ff9bc Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 20:18:08 +1000 Subject: [PATCH 012/181] comment UUID helpers lib, remove unneeded method, comment method to deprecate later --- lib/hdk_type_serialization_macros/src/lib.rs | 61 +++++++++++++------- 1 file changed, 40 insertions(+), 21 deletions(-) diff --git a/lib/hdk_type_serialization_macros/src/lib.rs b/lib/hdk_type_serialization_macros/src/lib.rs index d135e475e..da5c78c1a 100644 --- a/lib/hdk_type_serialization_macros/src/lib.rs +++ b/lib/hdk_type_serialization_macros/src/lib.rs @@ -1,8 +1,27 @@ /** - * Type aliases used to ensure explicit awareness of applicable record types in VF structs + * The primary goal of this module is to provide structs which ensure universal uniqueness + * of record identifiers in Holochain apps. This is achieved by concatenating the `DnaHash` + * of the host network space with an identifier which is locally-unique within that membrane. * - * To convert wrapped values to an `EntryHash`, use `aliased_val.as_ref()`. - * To convert a plain `EntryHash` to its wrapped form, use `raw_address.into()`. + * Such information is sufficient to build a universally-unique Holochain URI, and allows + * apps to mix references to disparate network spaces in the same data structures. + * + * To convert wrapped values to an `EntryHash` or `DnaHash`, use `aliased_val.as_ref()` in assignment + * to the appropriate type. + * + * A secondary goal is to provide an ability to create distinct types for different identifiers, + * such that identifiers cannot be accidentally mismatched to the wrong record types. + * For example, given these two definitions- + * + * addressable_identifier!(CommitmentAddress => EntryHash); + * addressable_identifier!(IntentAddress => EntryHash); + * + * 'CommitmentAddress' and 'IntentAddress' cannot be confused even though they contain data + * of the same format, and the compiler will complain if a 'CommitmentAddress' is incorrectly + * assigned to a struct field or method parameter expecting an 'IntentAddress'. This helps to + * prevent developer error when your application has a large number of different entry types. + * + * This same functionality is also provided for simple values with the `simple_alias` macro. */ use std::fmt::Debug; @@ -10,6 +29,9 @@ pub use hdk::prelude::*; pub use hdk; pub use holo_hash::*; +/// Generate a simple newtype wrapper around some raw data, to enforce distinctness of +/// different data items with the same underlying format. +/// #[macro_export] macro_rules! simple_alias { ($id:ident => $base:ty) => { @@ -36,7 +58,7 @@ macro_rules! simple_alias { } } -/// Supertrait to bind all dependent traits that implement identifier behaviours. +/// Supertrait to bind all dependent traits that implement unique identifier behaviours. /// pub trait DnaAddressable where Self: Clone + Eq + std::hash::Hash @@ -48,6 +70,13 @@ pub trait DnaAddressable fn new(dna: DnaHash, identifier: B) -> Self; } +/// Generate a universally-unique identifier for some DNA-local identifier +/// (an `EntryHash` or `AgentPubKey`). +/// +/// This also defines an `EntryDef` of the same name so that the identifier +/// can be directly stored to the DHT, which is required for building foreign-key +/// indexes which reference remote data. +/// #[macro_export] macro_rules! addressable_identifier { ($r:ident => $base:ty) => { @@ -103,6 +132,10 @@ pub trait DnaIdentifiable fn new(dna: DnaHash, identifier: B) -> Self; } +/// Generate a universally-unique identifier for some DNA-local string identifier. +/// The implementor must ensure that this string ID remains unique in the DNA via +/// whatever application logic is relevant to the use-case. +/// #[macro_export] macro_rules! dna_scoped_string { ($r:ident) => { @@ -138,6 +171,9 @@ macro_rules! dna_scoped_string { /// /// Use the `addressable_identifier!` macro to auto-implement type-specific identifiers compatible with this method of encoding. /// +/// :TODO: remove this method, it's currently used in conversion of IDs to cursors in response formatting and +/// should probably be replaced with the HoloHashB64 variants or similar functionality. +/// pub fn extern_id_to_bytes(id: &A) -> Vec where A: AsRef + AsRef, B: Clone, @@ -151,23 +187,6 @@ pub fn extern_id_to_bytes(id: &A) -> Vec [AnyDhtHash::from((*entry_address).clone()).get_raw_36(), dna_hash.get_raw_36()].concat() } -/// Convert raw bytes encoded into a `Path` index into its full identity pair -/// -/// @see hdk_type_serialization_macros::extern_id_to_bytes -/// -pub fn bytes_to_extern_id(key_bytes: &[u8]) -> Result - where A: DnaAddressable, -{ - if key_bytes.len() != HOLO_HASH_UNTYPED_LEN * 2 { return Err(SerializedBytesError::Deserialize("Invalid input length for bytes_to_extern_id!".to_string())) } - - // pull DnaHash from last 36 bytes; first 36 are for EntryHash/HeaderHash - // @see holo_hash::hash - Ok(A::new( - DnaHash::from_raw_36(key_bytes[HOLO_HASH_UNTYPED_LEN..].to_vec()), - EntryHash::from_raw_36(key_bytes[0..HOLO_HASH_UNTYPED_LEN].to_vec()), - )) -} - #[cfg(test)] mod tests { use super::*; From a9d65e445d82541cbcad930f3f2adcb5cc2e4d20 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 20:20:46 +1000 Subject: [PATCH 013/181] rename crate to more appropriate naming --- Cargo.toml | 2 +- README.md | 4 ++-- lib/hdk_records/Cargo.toml | 2 +- lib/hdk_records/src/anchored_record_helpers.rs | 2 +- lib/hdk_records/src/identity_helpers.rs | 2 +- lib/hdk_records/src/lib.rs | 2 +- lib/hdk_records/src/record_helpers.rs | 2 +- lib/hdk_records/src/record_interface.rs | 2 +- lib/hdk_semantic_indexes/README.md | 8 ++++---- lib/hdk_semantic_indexes/rpc/Cargo.toml | 2 +- lib/hdk_semantic_indexes/rpc/src/lib.rs | 2 +- .../Cargo.toml | 2 +- .../README.md | 2 +- .../src/lib.rs | 0 lib/vf_attributes_hdk/Cargo.toml | 2 +- lib/vf_attributes_hdk/src/lib.rs | 4 ++-- 16 files changed, 20 insertions(+), 20 deletions(-) rename lib/{hdk_type_serialization_macros => hdk_uuid_types}/Cargo.toml (81%) rename lib/{hdk_type_serialization_macros => hdk_uuid_types}/README.md (98%) rename lib/{hdk_type_serialization_macros => hdk_uuid_types}/src/lib.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index e335a5396..74c3ad1c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ members= [ "lib/hdk_semantic_indexes/rpc", "lib/hdk_semantic_indexes/zome", "lib/hdk_semantic_indexes/zome_derive", - "lib/hdk_type_serialization_macros", + "lib/hdk_uuid_types", "lib/vf_actions", "lib/serde_maybe_undefined", "lib/vf_measurement", diff --git a/README.md b/README.md index 74882345f..b3aa22951 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ These are fully distributed, agent-centric applications. There are no transactio - [Storage constants \(database internals\)](#storage-constants-database-internals) - [Library modules](#library-modules) - [`hdk_records`](#hdk_records) - - [`hdk_type_serialization_macros`](#hdk_type_serialization_macros) + - [`hdk_uuid_types`](#hdk_uuid_types) - [`serde_maybe_undefined`](#serde_maybe_undefined) - [Other names](#other-names) - [License](#license) @@ -215,7 +215,7 @@ The Rust crates in [**`lib/`**](lib/) provide some abstract functionality and ty Manages CRUD and indexing operations for entries, including DNA-local and remote-DNA indexing capabilities. Leverages [DNA Auth Resolver](https://github.com/holochain-open-dev/dna-auth-resolver/) to grant capabilities for cross-DNA API calls. -#### `hdk_type_serialization_macros` +#### `hdk_uuid_types` Exports an `addressable_identifier!()` macro which wraps a primitive type implementing `Into` in a struct scoping it to a `DnaHash`; as well as `dna_scoped_string!()` which does the same for `String`. diff --git a/lib/hdk_records/Cargo.toml b/lib/hdk_records/Cargo.toml index ab56361e2..b761f1920 100644 --- a/lib/hdk_records/Cargo.toml +++ b/lib/hdk_records/Cargo.toml @@ -14,7 +14,7 @@ serde_maybe_undefined = { path = "../serde_maybe_undefined" } hdk_rpc_errors = { path = "../hdk_rpc_errors" } hdk_semantic_indexes_zome_rpc = { path = "../hdk_semantic_indexes/rpc" } hc_zome_dna_auth_resolver_lib = {git = "https://github.com/holochain-open-dev/dna-auth-resolver", rev = "b1adec5", package = "hc_zome_dna_auth_resolver_lib"} -hdk_type_serialization_macros = { path = "../hdk_type_serialization_macros" } +hdk_uuid_types = { path = "../hdk_uuid_types" } [lib] crate-type = ["lib"] diff --git a/lib/hdk_records/src/anchored_record_helpers.rs b/lib/hdk_records/src/anchored_record_helpers.rs index 27ee2b145..b8d74b4ca 100644 --- a/lib/hdk_records/src/anchored_record_helpers.rs +++ b/lib/hdk_records/src/anchored_record_helpers.rs @@ -11,7 +11,7 @@ */ use hdk::prelude::*; use hdk::hash_path::path::Component; -use hdk_type_serialization_macros::{ +use hdk_uuid_types::{ DnaAddressable, DnaIdentifiable, }; diff --git a/lib/hdk_records/src/identity_helpers.rs b/lib/hdk_records/src/identity_helpers.rs index da9afbe03..d8061600d 100644 --- a/lib/hdk_records/src/identity_helpers.rs +++ b/lib/hdk_records/src/identity_helpers.rs @@ -19,7 +19,7 @@ * @since 2019-05-16 */ use hdk::prelude::*; -use hdk_type_serialization_macros::DnaAddressable; +use hdk_uuid_types::DnaAddressable; use crate::{ RecordAPIResult, DataIntegrityError, diff --git a/lib/hdk_records/src/lib.rs b/lib/hdk_records/src/lib.rs index 329f1dade..8205f518c 100644 --- a/lib/hdk_records/src/lib.rs +++ b/lib/hdk_records/src/lib.rs @@ -4,7 +4,7 @@ use thiserror::Error; use std::convert::Infallible; use hdk::prelude::*; -pub use hdk_type_serialization_macros::DnaAddressable; +pub use hdk_uuid_types::DnaAddressable; pub use hdk::prelude::{CellId, EntryHash, hash_entry}; pub use holo_hash::{DnaHash}; diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index fd9c5d8ad..08183882d 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -222,7 +222,7 @@ pub fn delete_record(address: &HeaderHash) -> RecordAPIResult #[cfg(test)] mod tests { use super::*; - use hdk_type_serialization_macros::{ simple_alias, addressable_identifier }; + use hdk_uuid_types::{ simple_alias, addressable_identifier }; use crate::{generate_record_entry}; simple_alias!(EntryId => EntryHash); diff --git a/lib/hdk_records/src/record_interface.rs b/lib/hdk_records/src/record_interface.rs index 6fc1ee997..67f0b70fe 100644 --- a/lib/hdk_records/src/record_interface.rs +++ b/lib/hdk_records/src/record_interface.rs @@ -9,7 +9,7 @@ */ use hdk::prelude::*; -use hdk_type_serialization_macros::DnaAddressable; +use hdk_uuid_types::DnaAddressable; use crate::{ RecordAPIResult, diff --git a/lib/hdk_semantic_indexes/README.md b/lib/hdk_semantic_indexes/README.md index db40e68ef..db40cf998 100644 --- a/lib/hdk_semantic_indexes/README.md +++ b/lib/hdk_semantic_indexes/README.md @@ -33,7 +33,7 @@ As such, there are four crates comprising this module in its completeness: ### Defining an index -You will need to declare two zome crates- one for each side of the index. In addition to these zome crates you also need to define some identifier types implementing `hdk_type_serialization_macros::DnaAddressable` and map them to a `QueryParams` struct which forms the external API. +You will need to declare two zome crates- one for each side of the index. In addition to these zome crates you also need to define some identifier types implementing `hdk_uuid_types::DnaAddressable` and map them to a `QueryParams` struct which forms the external API. In the example above, this might look as follows: @@ -41,7 +41,7 @@ In the example above, this might look as follows: use hdk_semantic_indexes_zome_derive::index_zome; //-- usually, you would define these shared identifier types in another crate -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; addressable_identifier!(PostId => EntryHash); //-- @@ -67,7 +67,7 @@ struct Writer { use hdk_semantic_indexes_zome_derive::index_zome; //-- usually, you would define these shared identifier types in another crate -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; addressable_identifier!(AuthorId => AgentPubKey); //-- @@ -229,7 +229,7 @@ No other identifiers need match- in this example, the client zome need not have ### A word on `DnaAddressable` identifiers -[`hdk_type_serialization_macros`](../hdk_type_serialization_macros) provides macros for wrapping "raw" (DNA-local) identifiers with an associated `DnaHash`, which makes them universally-unique between all cells in a running Holochain conductor. +[`hdk_uuid_types`](../hdk_uuid_types) provides macros for wrapping "raw" (DNA-local) identifiers with an associated `DnaHash`, which makes them universally-unique between all cells in a running Holochain conductor. Since **all indexes provided by this library manage many:many relationships between application cells** it is possible that links between records might reference foreign records in multiple different networks. Complicating this further, if UI applications are to be able to dynamically compose different network arrangements to create "agent-centric" views which interact with multiple communities simultaneously; then **the possibility exists for such multiple references to different networks to be created independently of the original design of each application**. diff --git a/lib/hdk_semantic_indexes/rpc/Cargo.toml b/lib/hdk_semantic_indexes/rpc/Cargo.toml index 949cac6a6..a376c7810 100644 --- a/lib/hdk_semantic_indexes/rpc/Cargo.toml +++ b/lib/hdk_semantic_indexes/rpc/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" serde = "1" holochain_serialized_bytes = "0.0.51" hdk_rpc_errors = { path = "../../hdk_rpc_errors" } -hdk_type_serialization_macros = { path = "../../hdk_type_serialization_macros" } +hdk_uuid_types = { path = "../../hdk_uuid_types" } [lib] crate-type = ["lib"] diff --git a/lib/hdk_semantic_indexes/rpc/src/lib.rs b/lib/hdk_semantic_indexes/rpc/src/lib.rs index bb9773daf..332243a45 100644 --- a/lib/hdk_semantic_indexes/rpc/src/lib.rs +++ b/lib/hdk_semantic_indexes/rpc/src/lib.rs @@ -6,7 +6,7 @@ * @since 2021-10-01 */ use holochain_serialized_bytes::prelude::*; -use hdk_type_serialization_macros::{ +use hdk_uuid_types::{ DnaAddressable, EntryHash, HeaderHash, }; diff --git a/lib/hdk_type_serialization_macros/Cargo.toml b/lib/hdk_uuid_types/Cargo.toml similarity index 81% rename from lib/hdk_type_serialization_macros/Cargo.toml rename to lib/hdk_uuid_types/Cargo.toml index 70a03a50d..67f9ab0df 100644 --- a/lib/hdk_type_serialization_macros/Cargo.toml +++ b/lib/hdk_uuid_types/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "hdk_type_serialization_macros" +name = "hdk_uuid_types" version = "0.1.0" authors = ["pospi "] edition = "2018" diff --git a/lib/hdk_type_serialization_macros/README.md b/lib/hdk_uuid_types/README.md similarity index 98% rename from lib/hdk_type_serialization_macros/README.md rename to lib/hdk_uuid_types/README.md index 1f8f42cea..17e32f206 100644 --- a/lib/hdk_type_serialization_macros/README.md +++ b/lib/hdk_uuid_types/README.md @@ -15,7 +15,7 @@ This module provides macros for wrapping "raw" (DNA-local) `EntryHash`, `HeaderH ## Usage ```rust -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; // "Newtype struct" pattern, wraps values in different types to enforce compile-time distinctness. // To access the raw wrapped value, use `.as_ref()`. diff --git a/lib/hdk_type_serialization_macros/src/lib.rs b/lib/hdk_uuid_types/src/lib.rs similarity index 100% rename from lib/hdk_type_serialization_macros/src/lib.rs rename to lib/hdk_uuid_types/src/lib.rs diff --git a/lib/vf_attributes_hdk/Cargo.toml b/lib/vf_attributes_hdk/Cargo.toml index b88414b63..62672b7cc 100644 --- a/lib/vf_attributes_hdk/Cargo.toml +++ b/lib/vf_attributes_hdk/Cargo.toml @@ -11,7 +11,7 @@ holochain_serialized_bytes = "0.0" holochain_zome_types = { version = "0.0", default-features = false } holo_hash = "0.0" -hdk_type_serialization_macros = { path = "../hdk_type_serialization_macros" } +hdk_uuid_types = { path = "../hdk_uuid_types" } hdk_semantic_indexes_zome_rpc = { path = "../hdk_semantic_indexes/rpc" } [lib] diff --git a/lib/vf_attributes_hdk/src/lib.rs b/lib/vf_attributes_hdk/src/lib.rs index 5904d190e..1a3dc1f24 100644 --- a/lib/vf_attributes_hdk/src/lib.rs +++ b/lib/vf_attributes_hdk/src/lib.rs @@ -1,10 +1,10 @@ -use hdk_type_serialization_macros::*; +use hdk_uuid_types::*; // re-exports for convenience pub use chrono::{ FixedOffset, Utc, DateTime }; pub use holo_hash::{ AgentPubKey, EntryHash, HeaderHash }; pub use holochain_zome_types::timestamp::Timestamp; -pub use hdk_type_serialization_macros::{DnaAddressable}; +pub use hdk_uuid_types::{DnaAddressable}; pub use hdk_semantic_indexes_zome_rpc::{ByHeader, ByAddress}; simple_alias!(ActionId => String); From db1c0d18a7dd8fb0aa94816a1522997dd717339d Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 24 Feb 2022 21:44:43 +1000 Subject: [PATCH 014/181] pin GraphQL client module to same version as schema module --- modules/graphql-client/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/graphql-client/package.json b/modules/graphql-client/package.json index 224eaf856..87c617e29 100644 --- a/modules/graphql-client/package.json +++ b/modules/graphql-client/package.json @@ -1,7 +1,7 @@ { "name": "@vf-ui/graphql-client-holochain", "private": true, - "version": "0.0.1", + "version": "0.0.1-alpha.3", "description": "ValueFlows GraphQLClient configurations, providing pluggable backend datasources for different distributed, federated and client/server infrastructure.", "main": "index.ts", "scripts": { From e0f1d3fc46a072f08998ed55fbe83d8a56499c13 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 11:41:05 +1000 Subject: [PATCH 015/181] fix release tag match, not a real regex --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 065cd45fe..ae311847f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,7 +3,7 @@ name: Release on: push: tags: - - ^[0-9]+\.[0-9]+\.[0-9]+.* + - '[0-9]+.[0-9]+.[0-9]+**' # on: # push: From 1ef46ddd53380338f69383b0094025fdb855e6f4 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 11:56:37 +1000 Subject: [PATCH 016/181] remove unnecessary double-handling of null connection URI --- apps/holorea-graphql-explorer/src/App.tsx | 12 +++++------- modules/vf-graphql-holochain/connection.ts | 18 +++++------------- 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/apps/holorea-graphql-explorer/src/App.tsx b/apps/holorea-graphql-explorer/src/App.tsx index 848968ff7..b83604efe 100644 --- a/apps/holorea-graphql-explorer/src/App.tsx +++ b/apps/holorea-graphql-explorer/src/App.tsx @@ -51,13 +51,11 @@ class App extends Component { async connect () { let dnaMappings: DNAMappings - // this is allowed to be undefined, but if it - // is, it will fall back to assuming a - // Holochain Launcher environment - let connectionUrl = process.env.REACT_APP_HC_CONN_URL as string + // This is allowed to be undefined, but if it is, it will fall back to assuming a + // Holochain Launcher environment. + let conductorUri = process.env.REACT_APP_HC_CONN_URL as string || '' - const { connectionPromise, socketURI } = openConnection(connectionUrl); - const conn = await connectionPromise + const conn = await openConnection(conductorUri); const appInfo = await conn.appInfo({ installed_app_id: (process.env.REACT_APP_HC_APP_ID as string) }) if (!appInfo) { throw new Error(`appInfo call failed for Holochain app '${process.env.REACT_APP_HC_APP_ID}' - ensure the name is correct and that the agent's app installation has not failed`) @@ -74,7 +72,7 @@ class App extends Component { const schema = await bindSchema({ dnaConfig: dnaMappings, - conductorUri: socketURI + conductorUri, }) // @ts-ignore not sure why this is error/red (Connor) const link = new SchemaLink({ schema }) diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index 7cffd816c..6d5a8aea8 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -30,11 +30,9 @@ type RecordId = [HoloHash, HoloHash] // Connection persistence and multi-conductor / multi-agent handling //---------------------------------------------------------------------------------------------------------------------- -const HOLOCHAIN_LAUNCHER_CONTEXT_ID = 'unspecifiedForHolochainLauncher' -// the only environment at this point that will -// work without a specified websocket url is the Holochain -// Launcher -let DEFAULT_CONNECTION_URI = process.env.REACT_APP_HC_CONN_URL || HOLOCHAIN_LAUNCHER_CONTEXT_ID +// :NOTE: when calling AppWebsocket.connect for the Launcher Context +// it just expects an empty string for the socketURI. Other environments require it. +let DEFAULT_CONNECTION_URI = process.env.REACT_APP_HC_CONN_URL || '' const CONNECTION_CACHE: { [i: string]: Promise } = {} /** @@ -53,19 +51,13 @@ export const openConnection = (socketURI: string, traceAppSignals?: AppSignalCb) console.log(`Init Holochain connection: ${socketURI}`) - // when calling AppWebsocket.connect for the Launcher Context - // it just expects an empty string for the socketURI - const uriToPassAppWebsocket = socketURI === HOLOCHAIN_LAUNCHER_CONTEXT_ID ? '' : socketURI - CONNECTION_CACHE[socketURI] = AppWebsocket.connect(uriToPassAppWebsocket, undefined, traceAppSignals) + CONNECTION_CACHE[socketURI] = AppWebsocket.connect(socketURI, undefined, traceAppSignals) .then((client) => { console.log(`Holochain connection to ${socketURI} OK`) return client }) - return { - connectionPromise: CONNECTION_CACHE[socketURI], - socketURI - } + return CONNECTION_CACHE[socketURI] } const getConnection = (socketURI: string) => { From 1e77e84e4957f595cf7346b75179d3b8bceca898 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 16:53:03 +1000 Subject: [PATCH 017/181] refactor autoconnect logic out of explorer app into Holochain connector module remove deprecated type alias for DNAMappings --- apps/holorea-graphql-explorer/src/App.tsx | 36 ++--------------- modules/graphql-client/index.ts | 23 +++++++++-- modules/vf-graphql-holochain/connection.ts | 47 +++++++++++++++++++--- modules/vf-graphql-holochain/index.ts | 9 +++-- modules/vf-graphql-holochain/types.ts | 4 +- 5 files changed, 71 insertions(+), 48 deletions(-) diff --git a/apps/holorea-graphql-explorer/src/App.tsx b/apps/holorea-graphql-explorer/src/App.tsx index b83604efe..7d026baff 100644 --- a/apps/holorea-graphql-explorer/src/App.tsx +++ b/apps/holorea-graphql-explorer/src/App.tsx @@ -6,7 +6,7 @@ import GraphiQL, { Fetcher } from 'graphiql' // @ts-ignore import GraphiQLExplorer from 'graphiql-explorer' -import bindSchema, { openConnection, DNAMappings, CellId } from '@valueflows/vf-graphql-holochain' +import bindSchema, { autoConnect } from '@valueflows/vf-graphql-holochain' import 'graphiql/graphiql.css' import './App.css' @@ -28,11 +28,6 @@ interface State { explorerIsOpen: boolean, } -type ActualInstalledCell = { // :TODO: remove this when fixed in tryorama - cell_id: CellId; - role_id: string; -} - class App extends Component { _graphiql?: GraphiQL state = { @@ -49,36 +44,11 @@ class App extends Component { } async connect () { - let dnaMappings: DNAMappings - - // This is allowed to be undefined, but if it is, it will fall back to assuming a - // Holochain Launcher environment. - let conductorUri = process.env.REACT_APP_HC_CONN_URL as string || '' - - const conn = await openConnection(conductorUri); - const appInfo = await conn.appInfo({ installed_app_id: (process.env.REACT_APP_HC_APP_ID as string) }) - if (!appInfo) { - throw new Error(`appInfo call failed for Holochain app '${process.env.REACT_APP_HC_APP_ID}' - ensure the name is correct and that the agent's app installation has not failed`) - } - - dnaMappings = (appInfo['cell_data'] as unknown[] as ActualInstalledCell[]).reduce((mappings, { cell_id, role_id }) => { - const hrea_cell_match = role_id.match(/hrea_(\w+)_\d+/) - if (!hrea_cell_match) { return mappings } - - mappings[hrea_cell_match[1] as keyof DNAMappings] = cell_id - return mappings - }, {} as DNAMappings) - console.log('Connecting to detected Holochain cells:', dnaMappings) - - const schema = await bindSchema({ - dnaConfig: dnaMappings, - conductorUri, - }) - // @ts-ignore not sure why this is error/red (Connor) + let { dnaConfig, conductorUri } = await autoConnect() + const schema = await bindSchema({ dnaConfig, conductorUri }) const link = new SchemaLink({ schema }) this.setState({ - // @ts-ignore not sure why this is error/red (Connor) schema, link, fetcher: ((operation: any) => { diff --git a/modules/graphql-client/index.ts b/modules/graphql-client/index.ts index 3ca21890d..d5a558f9f 100644 --- a/modules/graphql-client/index.ts +++ b/modules/graphql-client/index.ts @@ -10,9 +10,16 @@ import { InMemoryCache, ApolloClient } from '@apollo/client' import { SchemaLink } from '@apollo/link-schema' -import bindSchema from '@valueflows/vf-graphql-holochain' +import bindSchema, { autoConnect, APIOptions, DNAIdMappings } from '@valueflows/vf-graphql-holochain' -async function initGraphQLClient(options) { +// Same as OpenConnectionOptions but for external client where dnaConfig may be autodetected +interface AutoConnectionOptions { + dnaConfig?: DNAIdMappings, +} + +export type ClientOptions = APIOptions & AutoConnectionOptions + +export async function initGraphQLClient(options: APIOptions) { const schema = await bindSchema(options/* modules, DNA id bindings */) return new ApolloClient({ @@ -21,4 +28,14 @@ async function initGraphQLClient(options) { }); } -export default initGraphQLClient; +async function connect(options: ClientOptions) { + // autodetect `CellId`s if no explicit `dnaConfig` is provided + if (!options.dnaConfig) { + let { dnaConfig } = await autoConnect(options.conductorUri) + options.dnaConfig = dnaConfig + } + + return await initGraphQLClient(options) +} + +export default connect; diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index 6d5a8aea8..683872b57 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -26,15 +26,33 @@ import { DNAIdMappings } from './types' type RecordId = [HoloHash, HoloHash] +type ActualInstalledCell = { // :TODO: remove this when fixed in tryorama + cell_id: CellId; + role_id: string; +} + //---------------------------------------------------------------------------------------------------------------------- // Connection persistence and multi-conductor / multi-agent handling //---------------------------------------------------------------------------------------------------------------------- // :NOTE: when calling AppWebsocket.connect for the Launcher Context // it just expects an empty string for the socketURI. Other environments require it. -let DEFAULT_CONNECTION_URI = process.env.REACT_APP_HC_CONN_URL || '' +let DEFAULT_CONNECTION_URI = process.env.REACT_APP_HC_CONN_URL as string || '' +let HOLOCHAIN_APP_ID = process.env.REACT_APP_HC_APP_ID as string || '' + const CONNECTION_CACHE: { [i: string]: Promise } = {} +export async function autoConnect(conductorUri?: string, appID?: string, traceAppSignals?: AppSignalCb) { + if (!conductorUri) { + conductorUri = DEFAULT_CONNECTION_URI + } + + const conn = await openConnection(conductorUri, traceAppSignals) + const dnaConfig = await sniffHolochainAppCells(conn, appID) + + return { conn, dnaConfig, conductorUri } +} + /** * Inits a connection for the given websocket URI. If no `socketURI` is provided, * a connection is attempted via the `REACT_APP_HC_CONN_URL` environment variable. @@ -45,10 +63,6 @@ const CONNECTION_CACHE: { [i: string]: Promise } = {} * been previously performed for the same `socketURI`. */ export const openConnection = (socketURI: string, traceAppSignals?: AppSignalCb) => { - if (!socketURI) { - socketURI = DEFAULT_CONNECTION_URI - } - console.log(`Init Holochain connection: ${socketURI}`) CONNECTION_CACHE[socketURI] = AppWebsocket.connect(socketURI, undefined, traceAppSignals) @@ -68,6 +82,29 @@ const getConnection = (socketURI: string) => { return CONNECTION_CACHE[socketURI] } +/** + * Introspect an active Holochain connection's app cells to determine cell IDs + * for mapping to the schema resolvers. + */ +export async function sniffHolochainAppCells(conn: AppWebsocket, appID?: string) { + const appInfo = await conn.appInfo({ installed_app_id: appID || HOLOCHAIN_APP_ID }) + if (!appInfo) { + throw new Error(`appInfo call failed for Holochain app '${appID || HOLOCHAIN_APP_ID}' - ensure the name is correct and that the app installation has succeeded`) + } + + let dnaMappings: DNAIdMappings = (appInfo['cell_data'] as unknown[] as ActualInstalledCell[]).reduce((mappings, { cell_id, role_id }) => { + const hrea_cell_match = role_id.match(/hrea_(\w+)_\d+/) + if (!hrea_cell_match) { return mappings } + + mappings[hrea_cell_match[1] as keyof DNAIdMappings] = cell_id + return mappings + }, {} as DNAIdMappings) + + console.info('Connecting to detected Holochain cells:', dnaMappings) + + return dnaMappings +} + //---------------------------------------------------------------------------------------------------------------------- // Holochain / GraphQL type translation layer diff --git a/modules/vf-graphql-holochain/index.ts b/modules/vf-graphql-holochain/index.ts index 4343b42bb..5f0584468 100644 --- a/modules/vf-graphql-holochain/index.ts +++ b/modules/vf-graphql-holochain/index.ts @@ -10,19 +10,20 @@ import { makeExecutableSchema } from '@graphql-tools/schema' -import { APIOptions, ResolverOptions, DEFAULT_VF_MODULES, DNAMappings, CellId } from './types' +import { APIOptions, ResolverOptions, DEFAULT_VF_MODULES, DNAIdMappings, CellId } from './types' import generateResolvers from './resolvers' -import { mapZomeFn, openConnection } from './connection' +import { mapZomeFn, autoConnect, openConnection, sniffHolochainAppCells } from './connection' const { buildSchema, printSchema } = require('@valueflows/vf-graphql') export { // direct access to resolver callbacks generator for apps that need to bind to other GraphQL schemas generateResolvers, + // connection handling methods + autoConnect, openConnection, sniffHolochainAppCells, // direct access to Holochain zome method bindings for authoring own custom resolvers bound to non-REA DNAs - openConnection, mapZomeFn, // types that wrapper libraries may need to manage conductor DNA connection logic - DNAMappings, CellId, + DNAIdMappings, CellId, APIOptions, } /** diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index 61e78ab12..ea875517c 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -16,7 +16,7 @@ import { Kind } from 'graphql/language' // Configuration object to allow specifying custom conductor DNA IDs to bind to. // Default is to use a DNA with the same ID as the mapping ID (ie. agent = "agent") -export interface DNAMappings { +export interface DNAIdMappings { agent: CellId, observation: CellId, planning: CellId, @@ -26,8 +26,6 @@ export interface DNAMappings { export { CellId } -export type DNAIdMappings = DNAMappings - // Options for resolver generator export interface ResolverOptions { // Array of ValueFlows module names to include in the schema From 859d35da00625a51af0f08ce0dbb0d6d014f5177 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 16:56:59 +1000 Subject: [PATCH 018/181] remove todo --- modules/graphql-client/index.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/graphql-client/index.ts b/modules/graphql-client/index.ts index d5a558f9f..3e151405d 100644 --- a/modules/graphql-client/index.ts +++ b/modules/graphql-client/index.ts @@ -1,8 +1,6 @@ /** * GraphQL client interface for Holochain connection * - * :TODO: sniff active DNA configuration from conductor - * * @package Holo-REA GraphQL client * @since 2020-07-14 */ From f974d80898092e248fea99507f755c42e1d18d70 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 17:44:33 +1000 Subject: [PATCH 019/181] update scripts to use pnpm commands so workspace is aware of pnpm-specific module IDs (workspace:* etc) --- package.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/package.json b/package.json index 8d84b6a62..17d0a976d 100644 --- a/package.json +++ b/package.json @@ -8,22 +8,22 @@ "preinstall": "npx only-allow pnpm && scripts/preinstall.sh", "postinstall": "scripts/postinstall.sh", "shell": "nix-shell", - "start": "npm run build; npm-run-all --parallel dev:graphql:adapter dev:graphql:client dev:graphql:explorer dht", + "start": "pnpm run build; npm-run-all --parallel dev:graphql:adapter dev:graphql:client dev:graphql:explorer dht", "build": "npm-run-all --parallel build:graphql build:crates", "build:crates": "CARGO_TARGET_DIR=target cargo build --release --target wasm32-unknown-unknown && RUN_WASM_OPT=0 scripts/package-dnas.sh", "build:graphql": "npm-run-all build:graphql:adapter build:graphql:client", - "build:graphql:adapter": "cd modules/vf-graphql-holochain && npm run build", - "build:graphql:client": "cd modules/graphql-client && npm run build", - "build:apps": "npm run build:apps:explorer", - "build:apps:explorer": "npm run build:graphql:adapter && cd apps/holorea-graphql-explorer && npm run build", - "build:webhapp": "npm run build:apps:explorer && scripts/package-webhapp.sh", + "build:graphql:adapter": "cd modules/vf-graphql-holochain && pnpm run build", + "build:graphql:client": "cd modules/graphql-client && pnpm run build", + "build:apps": "pnpm run build:apps:explorer", + "build:apps:explorer": "pnpm run build:graphql:adapter && cd apps/holorea-graphql-explorer && pnpm run build", + "build:webhapp": "pnpm run build:apps:explorer && scripts/package-webhapp.sh", "build:example:custom-resource-attributes": "cd example/custom-resource-attributes && ./build-deps.sh && hc package", "build:example:knowledge-system-extensions": "cd example/knowledge-system-extensions && ./build-deps.sh && hc package", - "dht": "npm run dht:conductor", + "dht": "pnpm run dht:conductor", "dht:conductor": "scripts/run-dev-conductor.sh", "dev": "npm-run-all --parallel watch dev:graphql:adapter dev:graphql:client dev:graphql:explorer", - "dev:graphql:adapter": "cd modules/vf-graphql-holochain && npm run dev", - "dev:graphql:client": "cd modules/graphql-client && npm run dev", + "dev:graphql:adapter": "cd modules/vf-graphql-holochain && pnpm run dev", + "dev:graphql:client": "cd modules/graphql-client && pnpm run dev", "dev:graphql:explorer": "cd apps/holorea-graphql-explorer && npm start", "watch": "npm-watch", "test": "npm-run-all test:unit test:integration", From cbf2dd5d172ce153f90245506718f36b548faaf5 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 17:45:12 +1000 Subject: [PATCH 020/181] update CI scripts to use pnpm & remove old sim2h network server startup commands --- .circleci/config.yml | 7 +------ .github/workflows/release.yml | 4 ++-- .travis.yml | 4 ++-- modules/vf-graphql-holochain/README.md | 2 +- 4 files changed, 6 insertions(+), 11 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f91113725..66e240186 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,9 +10,4 @@ jobs: name: pnpm setup command: nix-shell --run 'npm i -g pnpm' no_output_timeout: 30s - - run: - name: sim2h server - command: nix-shell --run 'npm run dht:sim2h' - background: true - no_output_timeout: 20m - - run: nix-shell --run 'pnpm install && npm run build && npm run test:integration:test' + - run: nix-shell --run 'pnpm install && pnpm run build && pnpm run test:integration:test' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ae311847f..a03cb0f85 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,9 +70,9 @@ jobs: sed -i "s//https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}/g" bundles/full_suite_release_template/happ.yaml cat bundles/full_suite_release_template/happ.yaml - name: Build WASM, dnas, happs - run: nix-shell --run 'npm run build:crates' + run: nix-shell --run 'pnpm run build:crates' - name: Build explorer UI and webhapp package - run: nix-shell --run 'npm run build:webhapp' + run: nix-shell --run 'pnpm run build:webhapp' - name: upload bundles env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.travis.yml b/.travis.yml index 4d96eaae1..ea62cf24c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,6 @@ language: nix -before_script: nix-shell --run 'npm i -g pnpm' && nix-shell --run 'npm run dht:sim2h &' -script: nix-shell --run 'pnpm install --network-concurrency 1 && npm run build && npm run test:integration:test' +before_script: nix-shell --run 'npm i -g pnpm'' +script: nix-shell --run 'pnpm install --network-concurrency 1 && pnpm run build && pnpm run test:integration:test' git: quiet: true depth: false diff --git a/modules/vf-graphql-holochain/README.md b/modules/vf-graphql-holochain/README.md index 3d09ff596..44e842403 100644 --- a/modules/vf-graphql-holochain/README.md +++ b/modules/vf-graphql-holochain/README.md @@ -95,7 +95,7 @@ Other files implement the query bindings between the linked hREA app DNAs and Gr - You will need to be given access to the [VF NPM org](https://www.npmjs.com/org/valueflows) in order to update the module on the registry. You can request access in https://gitter.im/valueflows/welcome - Bump the version in `package.json` & commit to the repository -- Run `npm run build` from this directory or `npm run build:graphql-adapter` from the root of the hREA repository +- Run `pnpm run build` from this directory or `pnpm run build:graphql-adapter` from the root of the hREA repository - Change to `./build` under this directory, where the new generated files are - Run `npm publish --access public` from the `./build` directory - Tag the current release in git and push the tag to `origin` From 86ed2082bdb045dcfc32f8a41daabc51af35495d Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 17:50:43 +1000 Subject: [PATCH 021/181] pnpm installed locally in GH CI --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a03cb0f85..d4ec296b2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,9 +70,9 @@ jobs: sed -i "s//https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}/g" bundles/full_suite_release_template/happ.yaml cat bundles/full_suite_release_template/happ.yaml - name: Build WASM, dnas, happs - run: nix-shell --run 'pnpm run build:crates' + run: nix-shell --run 'npx pnpm run build:crates' - name: Build explorer UI and webhapp package - run: nix-shell --run 'pnpm run build:webhapp' + run: nix-shell --run 'npx pnpm run build:webhapp' - name: upload bundles env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From dfb3521858c40027549349b97ca5832726233f5d Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 20:03:34 +1000 Subject: [PATCH 022/181] unignore Cargo.lock to see if it makes GH actions use same deps --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index e85d26ea2..ab42bb90b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,7 @@ .hc .hc_live* .DS_Store -Cargo.lock +#Cargo.lock bundle.json .cargo/ From 25bf74aeda079569c62303070868e71e0ec5d7c9 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 25 Feb 2022 20:03:41 +1000 Subject: [PATCH 023/181] add Cargo lockfile --- Cargo.lock | 1985 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1985 insertions(+) create mode 100644 Cargo.lock diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 000000000..4ec215e57 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1985 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "bit-set" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +dependencies = [ + "libc", + "num-integer", + "num-traits", + "serde", + "time", + "winapi", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "convert_case" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56e810098d091a972466b55679bde45c8a5c1b034b8dad3e4ac5486296bd097" +dependencies = [ + "clap", + "strum", + "strum_macros", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "darling" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0d720b8683f8dd83c65155f0530560cba68cd2bf395f6513a483caee57ff7f4" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a340f241d2ceed1deb47ae36c4144b2707ec7dd0b649f894cb39bb595986324" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c41b3b7352feb3211a0d743dc5700a4e3b60f51bd2b368892d1e0f9a95f44b" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version", + "syn", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "gcollections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f551fdf23ef80329f754919669147a71c67b6cfe3569cd93b6fabdd62044377" +dependencies = [ + "bit-set", + "num-integer", + "num-traits", + "trilean", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" + +[[package]] +name = "hc_zome_agent_registration" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +dependencies = [ + "hc_zome_agent_registration_lib", + "hc_zome_agent_registration_rpc", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_agent_registration_hrea" +version = "0.1.0" +dependencies = [ + "hc_zome_agent_registration", +] + +[[package]] +name = "hc_zome_agent_registration_lib" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +dependencies = [ + "hc_zome_agent_registration_storage", + "hdk", +] + +[[package]] +name = "hc_zome_agent_registration_rpc" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +dependencies = [ + "holo_hash", + "serde", +] + +[[package]] +name = "hc_zome_agent_registration_storage" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +dependencies = [ + "hc_zome_agent_registration_storage_consts", + "hdk", +] + +[[package]] +name = "hc_zome_agent_registration_storage_consts" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" + +[[package]] +name = "hc_zome_dna_auth_resolver" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "hc_zome_dna_auth_resolver_rpc", + "hc_zome_dna_auth_resolver_storage", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_hrea" +version = "0.1.0" +dependencies = [ + "hc_zome_dna_auth_resolver", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_lib" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "hc_zome_dna_auth_resolver_rpc", + "hc_zome_dna_auth_resolver_storage", + "hdk", + "holo_hash", + "serde", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_rpc" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "holo_hash", + "holochain_serialized_bytes", + "holochain_zome_types", + "serde", +] + +[[package]] +name = "hc_zome_dna_auth_resolver_storage" +version = "0.1.0" +source = "git+https://github.com/holochain-open-dev/dna-auth-resolver?rev=b1adec5#b1adec536ef623a0cc9ef491d9faaabc55c41028" +dependencies = [ + "hdk", + "holo_hash", + "serde", +] + +[[package]] +name = "hc_zome_rea_actions" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", + "vf_actions", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_agreement" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_lib", + "hc_zome_rea_agreement_rpc", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_agreement_index_agreement" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_agreement_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_rpc", + "hc_zome_rea_agreement_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_agreement_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_agreement_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_rpc", + "hc_zome_rea_agreement_storage_consts", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_agreement_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_commitment" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_lib", + "hc_zome_rea_commitment_rpc", + "hc_zome_rea_commitment_storage", + "hc_zome_rea_commitment_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_commitment_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "paste", + "serde", +] + +[[package]] +name = "hc_zome_rea_commitment_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hc_zome_rea_commitment_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_commitment_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_commitment_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_commitment_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_economic_event" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_lib", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_event_zome_api", + "hc_zome_rea_process_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_economic_event_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_agreement_storage_consts", + "hc_zome_rea_commitment_storage_consts", + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_economic_resource_storage_consts", + "hc_zome_rea_fulfillment_storage_consts", + "hc_zome_rea_process_storage_consts", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_event_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_economic_event_zome_api", + "hc_zome_rea_economic_resource_lib", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hc_zome_rea_economic_resource_storage_consts", + "hdk", + "hdk_records", + "hdk_relay_pagination", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_economic_event_rpc" +version = "0.1.0" +dependencies = [ + "hdk_relay_pagination", + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_economic_event_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_economic_event_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_economic_event_zome_api" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_resource_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_resource_lib", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hc_zome_rea_economic_resource_zome_api", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_economic_resource_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_resource_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hc_zome_rea_economic_resource_storage_consts", + "hc_zome_rea_economic_resource_zome_api", + "hc_zome_rea_process_storage", + "hc_zome_rea_process_storage_consts", + "hc_zome_rea_resource_specification_storage_consts", + "hdk", + "hdk_records", + "hdk_relay_pagination", + "hdk_semantic_indexes_client_lib", + "paste", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource_rpc" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_economic_resource_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_event_storage", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage_consts", + "hc_zome_rea_resource_specification_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_economic_resource_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_economic_resource_zome_api" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_economic_event_rpc", + "hc_zome_rea_economic_resource_rpc", + "hc_zome_rea_economic_resource_storage", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_fulfillment_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage", + "hdk", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_fulfillment_lib_destination" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_fulfillment_lib_origin" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage", + "hc_zome_rea_fulfillment_storage_consts", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_fulfillment_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib_destination", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_lib_origin", + "hc_zome_rea_fulfillment_rpc", + "hc_zome_rea_fulfillment_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_fulfillment_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_fulfillment_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_fulfillment_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_fulfillment_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_intent" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_lib", + "hc_zome_rea_intent_rpc", + "hc_zome_rea_intent_storage", + "hc_zome_rea_intent_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_intent_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_intent_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_rpc", + "hc_zome_rea_intent_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_intent_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_intent_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_intent_rpc", + "hdk", + "hdk_records", + "serde", + "vf_actions", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_intent_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_process" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_storage_consts", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_intent_storage_consts", + "hc_zome_rea_process_lib", + "hc_zome_rea_process_rpc", + "hc_zome_rea_process_storage_consts", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_storage_consts", + "hc_zome_rea_economic_event_storage_consts", + "hc_zome_rea_intent_storage_consts", + "hc_zome_rea_process_lib", + "hc_zome_rea_process_rpc", + "hc_zome_rea_process_storage_consts", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_rpc", + "hc_zome_rea_process_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_process_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_specification_lib", + "hc_zome_rea_process_specification_rpc", + "hc_zome_rea_process_specification_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_process_specification_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_specification_rpc", + "hc_zome_rea_process_specification_storage", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_specification_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_specification_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_process_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_process_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_process_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_lib", + "hc_zome_rea_proposal_rpc", + "hc_zome_rea_proposal_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposal_index_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposal_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_rpc", + "hc_zome_rea_proposal_storage", + "hc_zome_rea_proposal_storage_consts", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_proposal_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposal_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposal_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposal_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_proposed_intent" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_lib", + "hc_zome_rea_proposed_intent_rpc", + "hc_zome_rea_proposed_intent_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_index_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_rpc", + "hc_zome_rea_proposed_intent_storage", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_intent_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_intent_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_proposed_to" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_lib", + "hc_zome_rea_proposed_to_rpc", + "hc_zome_rea_proposed_to_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_to_index_proposal" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_proposed_to_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_rpc", + "hc_zome_rea_proposed_to_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_proposed_to_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_to_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_proposed_to_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_proposed_to_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_resource_specification" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_lib", + "hc_zome_rea_resource_specification_rpc", + "hc_zome_rea_resource_specification_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_resource_specification_index_specification" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_rpc", + "hc_zome_rea_resource_specification_storage", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_resource_specification_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_resource_specification_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_satisfaction_index_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_index_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hdk", + "hdk_semantic_indexes_zome_derive", + "hdk_semantic_indexes_zome_lib", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage", + "hdk", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_satisfaction_lib_destination" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_lib", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_satisfaction_lib_origin" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_commitment_rpc", + "hc_zome_rea_satisfaction_lib", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "hdk_records", + "hdk_semantic_indexes_client_lib", + "paste", +] + +[[package]] +name = "hc_zome_rea_satisfaction_observation" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_lib_destination", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_planning" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_lib_origin", + "hc_zome_rea_satisfaction_rpc", + "hc_zome_rea_satisfaction_storage_consts", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_satisfaction_rpc" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_maybe_undefined", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_satisfaction_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_satisfaction_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", + "vf_measurement", +] + +[[package]] +name = "hc_zome_rea_satisfaction_storage_consts" +version = "0.1.0" + +[[package]] +name = "hc_zome_rea_unit" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_unit_lib", + "hc_zome_rea_unit_rpc", + "hdk", + "serde", +] + +[[package]] +name = "hc_zome_rea_unit_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_unit_rpc", + "hc_zome_rea_unit_storage", + "hc_zome_rea_unit_storage_consts", + "hdk", + "hdk_records", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_unit_rpc" +version = "0.1.0" +dependencies = [ + "hdk_records", + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_unit_storage" +version = "0.1.0" +dependencies = [ + "hc_zome_rea_unit_rpc", + "hdk", + "hdk_records", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "hc_zome_rea_unit_storage_consts" +version = "0.1.0" + +[[package]] +name = "hdk" +version = "0.0.122" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4a636e1e243760e96dc3d6a4a8ad7fa12dc515a58433d00fd8afaa55952ff57" +dependencies = [ + "hdk_derive", + "holo_hash", + "holochain_wasmer_guest", + "holochain_zome_types", + "paste", + "serde", + "serde_bytes", + "thiserror", + "tracing", + "tracing-core", +] + +[[package]] +name = "hdk_derive" +version = "0.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab4b10bdbc307e866deea7ee796064a54369f129866b28480210af4bfe48b16" +dependencies = [ + "holochain_zome_types", + "paste", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "hdk_records" +version = "0.1.0" +dependencies = [ + "hc_zome_dna_auth_resolver_lib", + "hdk", + "hdk_rpc_errors", + "hdk_semantic_indexes_zome_rpc", + "hdk_uuid_types", + "holo_hash", + "serde", + "serde_maybe_undefined", + "thiserror", +] + +[[package]] +name = "hdk_relay_pagination" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", +] + +[[package]] +name = "hdk_rpc_errors" +version = "0.1.0" +dependencies = [ + "hdk", + "holo_hash", + "serde", + "thiserror", +] + +[[package]] +name = "hdk_semantic_indexes_client_lib" +version = "0.1.0" +dependencies = [ + "hdk", + "hdk_records", + "hdk_semantic_indexes_zome_rpc", + "paste", + "serde", +] + +[[package]] +name = "hdk_semantic_indexes_zome_derive" +version = "0.1.0" +dependencies = [ + "convert_case 0.1.0", + "darling", + "quote", + "syn", +] + +[[package]] +name = "hdk_semantic_indexes_zome_lib" +version = "0.1.0" +dependencies = [ + "hc_zome_dna_auth_resolver_lib", + "hdk", + "hdk_records", + "hdk_rpc_errors", + "hdk_semantic_indexes_zome_rpc", + "serde", + "serde_maybe_undefined", +] + +[[package]] +name = "hdk_semantic_indexes_zome_rpc" +version = "0.1.0" +dependencies = [ + "hdk_rpc_errors", + "hdk_uuid_types", + "holochain_serialized_bytes", + "serde", +] + +[[package]] +name = "hdk_uuid_types" +version = "0.1.0" +dependencies = [ + "hdk", + "holo_hash", + "serde", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "holo_hash" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e16f46eacc5a28eec7f8c35119829fb0c9e9588a03ff0728e0c519cc6872a19" +dependencies = [ + "holochain_serialized_bytes", + "kitsune_p2p_dht_arc", + "serde", + "serde_bytes", + "thiserror", +] + +[[package]] +name = "holochain_serialized_bytes" +version = "0.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9805b3e01e7b5c144782a0823db4dc895fec18a9ccd45a492ce7c7bf157a9e38" +dependencies = [ + "holochain_serialized_bytes_derive", + "rmp-serde", + "serde", + "serde-transcode", + "serde_bytes", + "serde_json", + "thiserror", +] + +[[package]] +name = "holochain_serialized_bytes_derive" +version = "0.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1077232d0c427d64feb9e138fa22800e447eafb1810682d6c13beb95333cb32c" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "holochain_wasmer_common" +version = "0.0.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6de9bda7e1b991ce453ef55601405e43d7ef0cafb0108ed0b4755a1398dae05" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "serde_bytes", + "thiserror", +] + +[[package]] +name = "holochain_wasmer_guest" +version = "0.0.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "becdd2a6c662ac81a1c1aeae04eb39a8c6d987d79415fc9f6fff609bb106a90e" +dependencies = [ + "holochain_serialized_bytes", + "holochain_wasmer_common", + "parking_lot", + "serde", + "tracing", +] + +[[package]] +name = "holochain_zome_types" +version = "0.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94919c2cfdaf9950eb9ebcf3847f83fd2af8fecf5b5eac80c1c6e9ab83eb433d" +dependencies = [ + "chrono", + "holo_hash", + "holochain_serialized_bytes", + "holochain_wasmer_common", + "kitsune_p2p_timestamp", + "paste", + "serde", + "serde_bytes", + "subtle", + "thiserror", + "tracing", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "indexmap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "intervallum" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ccecd834666f695ecec3ff0d5fc32e32c91abea91a28fd0aceb4b35a82cee1" +dependencies = [ + "bit-set", + "gcollections", + "num-integer", + "num-traits", + "trilean", +] + +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + +[[package]] +name = "kitsune_p2p_dht_arc" +version = "0.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7459dbef2eef419984efb6bef5d1ff2ab1836ca7ca8f506b84b5982580b1bc9" +dependencies = [ + "derive_more", + "gcollections", + "intervallum", + "num-traits", + "serde", +] + +[[package]] +name = "kitsune_p2p_timestamp" +version = "0.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbac5369262b970c3a966c29502f17689da917678c9d60bdbaa9695d1b1c57fb" +dependencies = [ + "chrono", + "serde", + "thiserror", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06e509672465a0504304aa87f9f176f2b2b716ed8fb105ebe5c02dc6dce96a94" + +[[package]] +name = "lock_api" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "paste" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" + +[[package]] +name = "pin-project-lite" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" + +[[package]] +name = "proc-macro2" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +dependencies = [ + "bitflags", +] + +[[package]] +name = "rmp" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f55e5fa1446c4d5dd1f5daeed2a4fe193071771a2636274d0d7a3b082aa7ad6" +dependencies = [ + "byteorder", + "num-traits", +] + +[[package]] +name = "rmp-serde" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "723ecff9ad04f4ad92fe1c8ca6c20d2196d9286e9c60727c4cb5511629260e9d" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "ryu" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "semver" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0486718e92ec9a68fbed73bb5ef687d71103b142595b406835649bebd33f72c7" + +[[package]] +name = "serde" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-transcode" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "590c0e25c2a5bb6e85bf5c1bce768ceb86b316e7a01bdf07d2cb4ec2271990e2" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_bytes" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_maybe_undefined" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", +] + +[[package]] +name = "smallvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strum" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bd81eb48f4c437cadc685403cad539345bf703d78e63707418431cecd4522b" + +[[package]] +name = "strum_macros" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87c85aa3f8ea653bfd3ddf25f7ee357ee4d204731f6aa9ad04002306f6e2774c" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "syn" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" +dependencies = [ + "libc", + "wasi", + "winapi", +] + +[[package]] +name = "tracing" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d8d93354fe2a8e50d5953f5ae2e47a3fc2ef03292e7ea46e3cc38f549525fb9" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" +dependencies = [ + "lazy_static", + "valuable", +] + +[[package]] +name = "trilean" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683ba5022fe6dbd7133cad150478ccf51bdb6d861515181e5fc6b4323d4fa424" + +[[package]] +name = "unicode-segmentation" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" + +[[package]] +name = "unicode-width" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "vf_actions" +version = "0.1.0" +dependencies = [ + "hdk", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "vf_attributes_hdk" +version = "0.1.0" +dependencies = [ + "chrono", + "hdk_semantic_indexes_zome_rpc", + "hdk_uuid_types", + "holo_hash", + "holochain_serialized_bytes", + "holochain_zome_types", + "serde", +] + +[[package]] +name = "vf_measurement" +version = "0.1.0" +dependencies = [ + "holochain_serialized_bytes", + "serde", + "vf_attributes_hdk", +] + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" From 6ae65d25ccc15b6366456b32111594c4e1712953 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 25 Feb 2022 04:05:52 -0800 Subject: [PATCH 024/181] fixes #199 see the issue context there is a new option for mapZomeFn to instruct it to optionally skip the special encode decode steps --- modules/vf-graphql-holochain/connection.ts | 15 ++++++++------- modules/vf-graphql-holochain/queries/agent.ts | 15 ++++++--------- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index 683872b57..4dee776d5 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -122,7 +122,7 @@ const idMatchRegex = /^[A-Za-z0-9_+\-/]{53}={0,2}:[A-Za-z0-9_+\-/]{53}={0,2}$/ const stringIdRegex = /^\w+?:[A-Za-z0-9_+\-/]{53}={0,2}$/ // @see https://github.com/holochain-open-dev/core-types/blob/main/src/utils.ts -function deserializeHash(hash: string): Uint8Array { +export function deserializeHash(hash: string): Uint8Array { return Base64.toUint8Array(hash.slice(1)) } @@ -143,7 +143,7 @@ function deserializeStringId(field: string): Array { } // @see https://github.com/holochain-open-dev/core-types/blob/main/src/utils.ts -function serializeHash(hash: Uint8Array): string { +export function serializeHash(hash: Uint8Array): string { return `u${Base64.fromUint8Array(hash, true)}` } @@ -248,7 +248,7 @@ export type BoundZomeFn = (args: any) => any; /** * Higher-order function to generate async functions for calling zome RPC methods */ -const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_name: string): BoundZomeFn => async (args) => { +const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_name: string, skipEncodeDecode?: boolean): BoundZomeFn => async (args) => { const { callZome } = await getConnection(socketURI) const res = await callZome({ cap_secret: null, // :TODO: @@ -256,9 +256,10 @@ const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_ zome_name, fn_name, provenance: cell_id[1], - payload: encodeFields(args), + payload: skipEncodeDecode ? args : encodeFields(args), }) - decodeFields(res) + console.log(res) + if (!skipEncodeDecode) decodeFields(res) return res } @@ -271,5 +272,5 @@ const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_ * * @return bound async zome function which can be called directly */ -export const mapZomeFn = (mappings: DNAIdMappings, socketURI: string, instance: string, zome: string, fn: string) => - zomeFunction(socketURI, (mappings && mappings[instance]), zome, fn) +export const mapZomeFn = (mappings: DNAIdMappings, socketURI: string, instance: string, zome: string, fn: string, skipEncodeDecode?: boolean) => + zomeFunction(socketURI, (mappings && mappings[instance]), zome, fn, skipEncodeDecode) diff --git a/modules/vf-graphql-holochain/queries/agent.ts b/modules/vf-graphql-holochain/queries/agent.ts index d0f5fe95f..06686c403 100644 --- a/modules/vf-graphql-holochain/queries/agent.ts +++ b/modules/vf-graphql-holochain/queries/agent.ts @@ -8,22 +8,18 @@ */ import { DNAIdMappings, injectTypename } from '../types' -import { mapZomeFn } from '../connection' +import { mapZomeFn, serializeHash, deserializeHash } from '../connection' import { Agent } from '@valueflows/vf-graphql' -// :TODO: remove this, backend should use HoloHashB64 eventually -const { Base64 } = require('js-base64') -function serializeHash (hash) { - return `u${Base64.fromUint8Array(hash, true)}` -} - export default (dnaConfig: DNAIdMappings, conductorUri: string) => { const readMyAgent = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_my_agent_pubkey') const readAllAgents = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_registered_agents') - const agentExists = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'is_registered') + // special 'true' at the end is for skipEncodeDecode, because of the way this zome handles serialization and inputs + // which is different from others + const agentExists = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'is_registered', true) // read mapped DNA hash in order to construct VF-native IDs from DNA-local HC IDs const mappedDNA = dnaConfig['agent'] ? serializeHash(dnaConfig['agent'][0]) : null @@ -49,7 +45,8 @@ export default (dnaConfig: DNAIdMappings, conductorUri: string) => { }, agent: injectTypename('Person', async (root, { id }): Promise => { - const isAgent = await agentExists({ pubKey: id }) + const rawAgentPubKey = deserializeHash(id.split(':')[0]) + const isAgent = await agentExists({ pubKey: rawAgentPubKey }) if (!isAgent) { throw new Error('No agent exists with that ID') From 2cacb2955bdd68503c6c744631fbb2e9848307ae Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 25 Feb 2022 04:08:03 -0800 Subject: [PATCH 025/181] remove console.log --- modules/vf-graphql-holochain/connection.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index 4dee776d5..fd635a97d 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -258,7 +258,6 @@ const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_ provenance: cell_id[1], payload: skipEncodeDecode ? args : encodeFields(args), }) - console.log(res) if (!skipEncodeDecode) decodeFields(res) return res } From 4e1255d4c49934f74f66c64a993191393b688440 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Feb 2022 11:09:23 +1000 Subject: [PATCH 026/181] correctly parse datetime format without fractional seconds if returned from backend --- modules/vf-graphql-holochain/connection.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index 683872b57..fa3b658c6 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -156,6 +156,7 @@ function seralizeStringId(id: Array): string { } const LONG_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ss.SSSZ' +const SHORT_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ssZ' const isoDateRegex = /^\d{4}-\d\d-\d\d(T\d\d:\d\d:\d\d(\.\d\d\d)?)?([+-]\d\d:\d\d)?$/ /** @@ -188,6 +189,9 @@ const decodeFields = (result: any): void => { // recursively check for Date strings and convert to JS date objects upon receiving if (value && value.match && value.match(isoDateRegex)) { subject[prop] = parse(value, LONG_DATETIME_FORMAT) + if (subject[prop] === null) { + subject[prop] = parse(value, SHORT_DATETIME_FORMAT) + } } }) From 52e7741024b72eb111c7f48e98781111c2114c46 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Sun, 27 Feb 2022 14:45:16 -0800 Subject: [PATCH 027/181] fix the missing entry def for units --- Cargo.lock | 1 + zomes/rea_unit/zome/Cargo.toml | 1 + zomes/rea_unit/zome/src/lib.rs | 2 ++ 3 files changed, 4 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 4ec215e57..199cf4782 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1229,6 +1229,7 @@ dependencies = [ "hc_zome_rea_unit_rpc", "hdk", "serde", + "vf_attributes_hdk", ] [[package]] diff --git a/zomes/rea_unit/zome/Cargo.toml b/zomes/rea_unit/zome/Cargo.toml index 12ab79818..4ef709d4c 100644 --- a/zomes/rea_unit/zome/Cargo.toml +++ b/zomes/rea_unit/zome/Cargo.toml @@ -11,6 +11,7 @@ hdk = "0.0.122" hc_zome_rea_unit_rpc = { path = "../rpc" } hc_zome_rea_unit_lib = { path = "../lib" } +vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } [lib] path = "src/lib.rs" diff --git a/zomes/rea_unit/zome/src/lib.rs b/zomes/rea_unit/zome/src/lib.rs index b72c10d25..4e064d870 100644 --- a/zomes/rea_unit/zome/src/lib.rs +++ b/zomes/rea_unit/zome/src/lib.rs @@ -11,11 +11,13 @@ use hdk::prelude::*; use hc_zome_rea_unit_rpc::*; use hc_zome_rea_unit_lib::*; +use vf_attributes_hdk::UnitInternalAddress; #[hdk_extern] fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + UnitInternalAddress::entry_def(), EntryDef { id: UNIT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, From a1edfd327c8b436716093e827c4eeab1b3bcc8d8 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Sun, 27 Feb 2022 17:33:41 -0800 Subject: [PATCH 028/181] fixes #206 --- modules/vf-graphql-holochain/resolvers/measure.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/vf-graphql-holochain/resolvers/measure.ts b/modules/vf-graphql-holochain/resolvers/measure.ts index db8553cc7..60966dde7 100644 --- a/modules/vf-graphql-holochain/resolvers/measure.ts +++ b/modules/vf-graphql-holochain/resolvers/measure.ts @@ -9,6 +9,7 @@ import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' import { mapZomeFn } from '../connection' import { + Maybe, Measure, Unit, } from '@valueflows/vf-graphql' @@ -17,7 +18,10 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI const readUnit = mapZomeFn(dnaConfig, conductorUri, 'specification', 'unit', 'get_unit') return { - hasUnit: async (record: Measure): Promise => { + hasUnit: async (record: Measure): Promise> => { + if (!record.hasUnit) { + return null + } return (await readUnit({ id: record.hasUnit })).unit }, } From 1ebb329090bab09d177c948144a273b4ece0167f Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 1 Mar 2022 10:15:16 -0800 Subject: [PATCH 029/181] fixes #208 - update multiple times the original code didn't suit this context, and it was adapted from h-be/Acorn or hdk_crud. It didn't suit because in Acorn HeaderHash is used as "id" where as in holo-rea EntryHash is used as "id" hdk_crud creates a flat update tree, where all updates reference the original header holo-rea is supposed to create a branching Update tree, which this now allows for. However, this implementation is still very naive, and assumes that in fact the Update tree mostly just nests children in one sequence, more like an array than a tree. This won't hold true in practice, as we're dealing in offline-friendly distributed systems. Solving this properly relates to issue #196 conflict resolution --- lib/hdk_records/src/record_helpers.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index 08183882d..372293dbd 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -45,7 +45,7 @@ pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult match details.entry_dht_status { metadata::EntryDhtStatus::Live => match details.updates.len() { 0 => { - // no updates yet, latest header hash is the first one + // https://docs.rs/hdk/latest/hdk/prelude/struct.EntryDetails.html#structfield.headers Ok(get_header_hash(details.headers.first().unwrap().to_owned())) }, _ => { @@ -53,7 +53,8 @@ pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult Err(DataIntegrityError::EntryNotFound), From 33fc3881bb4038f3c6904d09d9962678d61a039e Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 1 Mar 2022 10:33:35 -0800 Subject: [PATCH 030/181] Update record_helpers.rs --- lib/hdk_records/src/record_helpers.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index 372293dbd..bc25c4639 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -40,6 +40,10 @@ fn get_header_hash(shh: element::SignedHeaderHashed) -> HeaderHash { /// /// Useful in coordinating updates between different entry types. /// +/// NOTE: this is a very naive recursive algorithm that basically assumes full network +/// connectivity between everyone at all times, and Updates form a Linked List, rather +/// than a multi-branching tree. This should be updated during other 'conflict resolution' related +/// changes outlined in issue https://github.com/holo-rea/holo-rea/issues/196 pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult { match get_details(entry_hash, GetOptions { strategy: GetStrategy::Latest })? { Some(Details::Entry(details)) => match details.entry_dht_status { From b6ce52856c5c1606f6af5fb33667575f4d76af01 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:31:25 +1000 Subject: [PATCH 031/181] update deps for latest HDK --- .../zomes/beef_economic_event/code/Cargo.toml | 2 +- .../zomes/beef_economic_resource/code/Cargo.toml | 2 +- .../zomes/beef_resource_specification/code/Cargo.toml | 2 +- zomes/agent_registration/Cargo.toml | 2 +- zomes/rea_action/zome/Cargo.toml | 2 +- zomes/rea_agreement/zome/Cargo.toml | 2 +- zomes/rea_agreement/zome_idx_agreement/Cargo.toml | 2 +- zomes/rea_commitment/zome/Cargo.toml | 2 +- zomes/rea_commitment/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_economic_event/zome/Cargo.toml | 2 +- zomes/rea_economic_event/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_economic_resource/zome/Cargo.toml | 2 +- zomes/rea_economic_resource/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_observation/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_planning/Cargo.toml | 2 +- zomes/rea_intent/zome/Cargo.toml | 2 +- zomes/rea_intent/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_process/zome/Cargo.toml | 2 +- zomes/rea_process/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_process_specification/zome/Cargo.toml | 2 +- zomes/rea_proposal/zome/Cargo.toml | 2 +- zomes/rea_proposal/zome_idx_proposal/Cargo.toml | 2 +- zomes/rea_proposed_intent/zome/Cargo.toml | 2 +- zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml | 2 +- zomes/rea_proposed_to/zome/Cargo.toml | 2 +- zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml | 2 +- zomes/rea_resource_specification/zome/Cargo.toml | 2 +- .../zome_idx_specification/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_observation/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_planning/Cargo.toml | 2 +- zomes/rea_unit/zome/Cargo.toml | 2 +- 35 files changed, 35 insertions(+), 35 deletions(-) diff --git a/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml b/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml index e9655016c..815133523 100644 --- a/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml +++ b/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_economic_event_defs = { path = "../../../../../lib/rea_economic_event/defs" } hc_zome_rea_economic_event_lib = { path = "../../../../../lib/rea_economic_event/lib" } diff --git a/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml b/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml index f53a1164a..61152d19b 100644 --- a/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml +++ b/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_economic_resource_storage_consts = { path = "../../../../../lib/rea_economic_resource/storage_consts" } hc_zome_rea_economic_resource_defs = { path = "../../../../../lib/rea_economic_resource/defs" } diff --git a/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml b/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml index c889e821e..6e4531374 100644 --- a/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml +++ b/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_resource_specification_defs = { path = "../../../../../lib/rea_resource_specification/defs" } hc_zome_rea_resource_specification_rpc = { path = "../../../../../lib/rea_resource_specification/rpc" } diff --git a/zomes/agent_registration/Cargo.toml b/zomes/agent_registration/Cargo.toml index b81e7eda1..5958146d5 100644 --- a/zomes/agent_registration/Cargo.toml +++ b/zomes/agent_registration/Cargo.toml @@ -5,7 +5,7 @@ authors = ["pospi "] edition = "2018" [dependencies] -hc_zome_agent_registration = {git = "https://github.com/holochain-open-dev/agent-registration", rev = "a37cc26", package = "hc_zome_agent_registration"} +hc_zome_agent_registration = {git = "https://github.com/holochain-open-dev/agent-registration", branch = "hdk-123-validation", package = "hc_zome_agent_registration"} [lib] path = "src/lib.rs" diff --git a/zomes/rea_action/zome/Cargo.toml b/zomes/rea_action/zome/Cargo.toml index a33c1f40d..be74e589e 100644 --- a/zomes/rea_action/zome/Cargo.toml +++ b/zomes/rea_action/zome/Cargo.toml @@ -6,7 +6,7 @@ edition = "2018" [dependencies] serde = "1" -hdk = "0.0.122" +hdk = "0.0.123" vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } vf_actions = { path = "../../../lib/vf_actions" } diff --git a/zomes/rea_agreement/zome/Cargo.toml b/zomes/rea_agreement/zome/Cargo.toml index acc4361df..d0924253a 100644 --- a/zomes/rea_agreement/zome/Cargo.toml +++ b/zomes/rea_agreement/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_agreement_rpc = { path = "../rpc" } hc_zome_rea_agreement_lib = { path = "../lib" } diff --git a/zomes/rea_agreement/zome_idx_agreement/Cargo.toml b/zomes/rea_agreement/zome_idx_agreement/Cargo.toml index 2e230d0c9..283a1e81f 100644 --- a/zomes/rea_agreement/zome_idx_agreement/Cargo.toml +++ b/zomes/rea_agreement/zome_idx_agreement/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_commitment/zome/Cargo.toml b/zomes/rea_commitment/zome/Cargo.toml index c2eaf21ad..b45fe5136 100644 --- a/zomes/rea_commitment/zome/Cargo.toml +++ b/zomes/rea_commitment/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_commitment_rpc = { path = "../rpc" } hc_zome_rea_commitment_lib = { path = "../lib" } diff --git a/zomes/rea_commitment/zome_idx_planning/Cargo.toml b/zomes/rea_commitment/zome_idx_planning/Cargo.toml index 48d10ea3a..defb79367 100644 --- a/zomes/rea_commitment/zome_idx_planning/Cargo.toml +++ b/zomes/rea_commitment/zome_idx_planning/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" paste = "1.0" serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_economic_event/zome/Cargo.toml b/zomes/rea_economic_event/zome/Cargo.toml index bba699a18..35858e1fa 100644 --- a/zomes/rea_economic_event/zome/Cargo.toml +++ b/zomes/rea_economic_event/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_economic_event_zome_api = { path = "../zome_api" } hc_zome_rea_economic_event_lib = { path = "../lib" } diff --git a/zomes/rea_economic_event/zome_idx_observation/Cargo.toml b/zomes/rea_economic_event/zome_idx_observation/Cargo.toml index d697278fd..6144cb5e5 100644 --- a/zomes/rea_economic_event/zome_idx_observation/Cargo.toml +++ b/zomes/rea_economic_event/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_economic_resource/zome/Cargo.toml b/zomes/rea_economic_resource/zome/Cargo.toml index 0e2b26742..631873ea8 100644 --- a/zomes/rea_economic_resource/zome/Cargo.toml +++ b/zomes/rea_economic_resource/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_economic_resource_zome_api = { path = "../zome_api" } hc_zome_rea_economic_resource_lib = { path = "../lib" } diff --git a/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml b/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml index 53792681c..b1f3c6802 100644 --- a/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml +++ b/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml b/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml index 237edab7a..0a109e1a6 100644 --- a/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml +++ b/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml b/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml index fa5efb789..09c1d8296 100644 --- a/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml +++ b/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_observation/Cargo.toml b/zomes/rea_fulfillment/zome_observation/Cargo.toml index cf0e75f43..d815e4164 100644 --- a/zomes/rea_fulfillment/zome_observation/Cargo.toml +++ b/zomes/rea_fulfillment/zome_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_fulfillment_lib_destination = { path = "../lib_destination" } hc_zome_rea_fulfillment_rpc = { path = "../rpc" } diff --git a/zomes/rea_fulfillment/zome_planning/Cargo.toml b/zomes/rea_fulfillment/zome_planning/Cargo.toml index fe9ca36a5..ac19e28b9 100644 --- a/zomes/rea_fulfillment/zome_planning/Cargo.toml +++ b/zomes/rea_fulfillment/zome_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_fulfillment_rpc = { path = "../rpc" } hc_zome_rea_fulfillment_lib_origin = { path = "../lib_origin" } diff --git a/zomes/rea_intent/zome/Cargo.toml b/zomes/rea_intent/zome/Cargo.toml index 5b1aa674e..30ad6c114 100644 --- a/zomes/rea_intent/zome/Cargo.toml +++ b/zomes/rea_intent/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_intent_rpc = { path = "../rpc" } hc_zome_rea_intent_lib = { path = "../lib" } diff --git a/zomes/rea_intent/zome_idx_planning/Cargo.toml b/zomes/rea_intent/zome_idx_planning/Cargo.toml index 2a007dcbc..8f5f873fc 100644 --- a/zomes/rea_intent/zome_idx_planning/Cargo.toml +++ b/zomes/rea_intent/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_process/zome/Cargo.toml b/zomes/rea_process/zome/Cargo.toml index 9f6f0b083..b8ba3660b 100644 --- a/zomes/rea_process/zome/Cargo.toml +++ b/zomes/rea_process/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_records = { path = "../../../lib/hdk_records" } vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } diff --git a/zomes/rea_process/zome_idx_observation/Cargo.toml b/zomes/rea_process/zome_idx_observation/Cargo.toml index 13fbb134d..45f2f667f 100644 --- a/zomes/rea_process/zome_idx_observation/Cargo.toml +++ b/zomes/rea_process/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_process_specification/zome/Cargo.toml b/zomes/rea_process_specification/zome/Cargo.toml index 45b403314..6d5609211 100644 --- a/zomes/rea_process_specification/zome/Cargo.toml +++ b/zomes/rea_process_specification/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_process_specification_rpc = { path = "../rpc" } hc_zome_rea_process_specification_lib = { path = "../lib" } diff --git a/zomes/rea_proposal/zome/Cargo.toml b/zomes/rea_proposal/zome/Cargo.toml index fd2faabb2..e3ce4715b 100644 --- a/zomes/rea_proposal/zome/Cargo.toml +++ b/zomes/rea_proposal/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_proposal_rpc = { path = "../rpc" } hc_zome_rea_proposal_lib = { path = "../lib" } diff --git a/zomes/rea_proposal/zome_idx_proposal/Cargo.toml b/zomes/rea_proposal/zome_idx_proposal/Cargo.toml index efe496f5e..9493ae858 100644 --- a/zomes/rea_proposal/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposal/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_proposed_intent/zome/Cargo.toml b/zomes/rea_proposed_intent/zome/Cargo.toml index 6931f1bb0..743b6178b 100644 --- a/zomes/rea_proposed_intent/zome/Cargo.toml +++ b/zomes/rea_proposed_intent/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_proposed_intent_rpc = { path = "../rpc" } hc_zome_rea_proposed_intent_lib = { path = "../lib" } diff --git a/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml b/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml index 26a52c2a5..ad8a8aa6b 100644 --- a/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_proposed_to/zome/Cargo.toml b/zomes/rea_proposed_to/zome/Cargo.toml index 8e7e85b55..0e2bbe42a 100644 --- a/zomes/rea_proposed_to/zome/Cargo.toml +++ b/zomes/rea_proposed_to/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_proposed_to_rpc = { path = "../rpc" } hc_zome_rea_proposed_to_lib = { path = "../lib" } diff --git a/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml b/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml index 9bda9b5d2..e53dcc02a 100644 --- a/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_resource_specification/zome/Cargo.toml b/zomes/rea_resource_specification/zome/Cargo.toml index 64ceb5c0e..299c98b98 100644 --- a/zomes/rea_resource_specification/zome/Cargo.toml +++ b/zomes/rea_resource_specification/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_resource_specification_rpc = { path = "../rpc" } hc_zome_rea_resource_specification_lib = { path = "../lib" } diff --git a/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml b/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml index 1c36b0260..9625e9f3b 100644 --- a/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml +++ b/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml b/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml index c767550f9..42c02b9d1 100644 --- a/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml +++ b/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml b/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml index 99b509501..1561f23bd 100644 --- a/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml +++ b/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_observation/Cargo.toml b/zomes/rea_satisfaction/zome_observation/Cargo.toml index 61683a5a4..478015611 100644 --- a/zomes/rea_satisfaction/zome_observation/Cargo.toml +++ b/zomes/rea_satisfaction/zome_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_satisfaction_lib_destination = { path = "../lib_destination" } hc_zome_rea_satisfaction_rpc = { path = "../rpc" } diff --git a/zomes/rea_satisfaction/zome_planning/Cargo.toml b/zomes/rea_satisfaction/zome_planning/Cargo.toml index 55f532cc3..681a8921c 100644 --- a/zomes/rea_satisfaction/zome_planning/Cargo.toml +++ b/zomes/rea_satisfaction/zome_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_satisfaction_rpc = { path = "../rpc" } hc_zome_rea_satisfaction_lib_origin = { path = "../lib_origin" } diff --git a/zomes/rea_unit/zome/Cargo.toml b/zomes/rea_unit/zome/Cargo.toml index 4ef709d4c..38e9619e6 100644 --- a/zomes/rea_unit/zome/Cargo.toml +++ b/zomes/rea_unit/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_rea_unit_rpc = { path = "../rpc" } hc_zome_rea_unit_lib = { path = "../lib" } From b0ae5a835a39834c2099a220e35f8fd337742a4a Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:31:41 +1000 Subject: [PATCH 032/181] update lockfile --- Cargo.lock | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 199cf4782..cf8915bf3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -182,7 +182,7 @@ checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" [[package]] name = "hc_zome_agent_registration" version = "0.1.0" -source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" dependencies = [ "hc_zome_agent_registration_lib", "hc_zome_agent_registration_rpc", @@ -200,7 +200,7 @@ dependencies = [ [[package]] name = "hc_zome_agent_registration_lib" version = "0.1.0" -source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" dependencies = [ "hc_zome_agent_registration_storage", "hdk", @@ -209,7 +209,7 @@ dependencies = [ [[package]] name = "hc_zome_agent_registration_rpc" version = "0.1.0" -source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" dependencies = [ "holo_hash", "serde", @@ -218,7 +218,7 @@ dependencies = [ [[package]] name = "hc_zome_agent_registration_storage" version = "0.1.0" -source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" dependencies = [ "hc_zome_agent_registration_storage_consts", "hdk", @@ -227,7 +227,7 @@ dependencies = [ [[package]] name = "hc_zome_agent_registration_storage_consts" version = "0.1.0" -source = "git+https://github.com/holochain-open-dev/agent-registration?rev=a37cc26#a37cc26ebabf49bb8ce3ec48b1e8548f975fe986" +source = "git+https://github.com/holochain-open-dev/agent-registration?branch=hdk-123-validation#c29b1e93d65261d1305ddd5521c456b7a4d0fe31" [[package]] name = "hc_zome_dna_auth_resolver" @@ -1271,9 +1271,9 @@ version = "0.1.0" [[package]] name = "hdk" -version = "0.0.122" +version = "0.0.123" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a636e1e243760e96dc3d6a4a8ad7fa12dc515a58433d00fd8afaa55952ff57" +checksum = "ef6a85565285d19223b72265868abc2413e530328d6ae0f4013a0455d203feab" dependencies = [ "hdk_derive", "holo_hash", @@ -1289,9 +1289,9 @@ dependencies = [ [[package]] name = "hdk_derive" -version = "0.0.24" +version = "0.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab4b10bdbc307e866deea7ee796064a54369f129866b28480210af4bfe48b16" +checksum = "9f30170490dc1b0468512cdcb72ada813d7cbf44e126fa6ab39a0b79165dd224" dependencies = [ "holochain_zome_types", "paste", @@ -1406,9 +1406,9 @@ dependencies = [ [[package]] name = "holo_hash" -version = "0.0.19" +version = "0.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e16f46eacc5a28eec7f8c35119829fb0c9e9588a03ff0728e0c519cc6872a19" +checksum = "1996c797ac295d79f884d263781c6ef3e0c63183af7d9f4e1d666c75e0d889ce" dependencies = [ "holochain_serialized_bytes", "kitsune_p2p_dht_arc", @@ -1469,9 +1469,9 @@ dependencies = [ [[package]] name = "holochain_zome_types" -version = "0.0.24" +version = "0.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94919c2cfdaf9950eb9ebcf3847f83fd2af8fecf5b5eac80c1c6e9ab83eb433d" +checksum = "2d51f55ac58054a6c87d046cda5c5e71c15cfcd9f11e9889817119c8218e3a7d" dependencies = [ "chrono", "holo_hash", @@ -1545,11 +1545,12 @@ dependencies = [ [[package]] name = "kitsune_p2p_timestamp" -version = "0.0.5" +version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbac5369262b970c3a966c29502f17689da917678c9d60bdbaa9695d1b1c57fb" +checksum = "2a7427a44837a60ccf063898340da6f8ef8c0e41812189b63d7775d8822863e7" dependencies = [ "chrono", + "derive_more", "serde", "thiserror", ] @@ -1562,9 +1563,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.118" +version = "0.2.119" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06e509672465a0504304aa87f9f176f2b2b716ed8fb105ebe5c02dc6dce96a94" +checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4" [[package]] name = "lock_api" @@ -1651,9 +1652,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c" dependencies = [ "bitflags", ] @@ -1702,9 +1703,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "semver" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0486718e92ec9a68fbed73bb5ef687d71103b142595b406835649bebd33f72c7" +checksum = "a4a3381e03edd24287172047536f20cabde766e2cd3e65e6b00fb3af51c4f38d" [[package]] name = "serde" @@ -1859,9 +1860,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d8d93354fe2a8e50d5953f5ae2e47a3fc2ef03292e7ea46e3cc38f549525fb9" +checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f" dependencies = [ "cfg-if", "pin-project-lite", From f1effbecd210d9d80e8cdfa6fc7db53dfb90badf Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:31:53 +1000 Subject: [PATCH 033/181] update nix env for latest Holochain --- default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/default.nix b/default.nix index 2b53242f2..5c0baee48 100644 --- a/default.nix +++ b/default.nix @@ -1,9 +1,9 @@ let - holonixRev = "2f7b8047d6314f64fca34394a52d465c18b2f4d5"; + holonixRev = "52158409f9b76b442e592e8f06632b0e57a6c365"; holonixPath = builtins.fetchTarball "https://github.com/holochain/holonix/archive/${holonixRev}.tar.gz"; holonix = import (holonixPath) { - holochainVersionId = "v0_0_123"; + holochainVersionId = "v0_0_127"; }; nixpkgs = holonix.pkgs; in nixpkgs.mkShell { From 08476bdb32e9bdeef8ad0fdd29c7a76661285519 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:32:09 +1000 Subject: [PATCH 034/181] update hdk revs in (outdated) module templates --- _templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t | 2 +- _templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t | 2 +- _templates/init-zome/new-index-zome/zome_Cargo.toml.t | 2 +- _templates/init-zome/new-zome/zome_Cargo.toml.t | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t b/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t index e17beec55..896d5d906 100644 --- a/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t +++ b/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_<%= h.changeCase.snake(zome_name) %>_storage = { path = "../storage" } hc_zome_<%= h.changeCase.snake(zome_name) %>_storage_consts = { path = "../storage_consts" } diff --git a/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t b/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t index e94d95493..3567661b0 100644 --- a/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t +++ b/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t @@ -9,7 +9,7 @@ edition = "2018" [dependencies] # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_records = { path = "../../../lib/hdk_records" } hc_zome_<%= h.changeCase.snake(zome_name) %>_storage_consts = { path = "../storage_consts" } diff --git a/_templates/init-zome/new-index-zome/zome_Cargo.toml.t b/_templates/init-zome/new-index-zome/zome_Cargo.toml.t index a2f11b184..36576a954 100644 --- a/_templates/init-zome/new-index-zome/zome_Cargo.toml.t +++ b/_templates/init-zome/new-index-zome/zome_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hdk_records = { path = "../../../../../lib/hdk_records" } vf_attributes_hdk = { path = "../../../../../lib/vf_attributes_hdk" } diff --git a/_templates/init-zome/new-zome/zome_Cargo.toml.t b/_templates/init-zome/new-zome/zome_Cargo.toml.t index ddad4de57..e2bea001a 100644 --- a/_templates/init-zome/new-zome/zome_Cargo.toml.t +++ b/_templates/init-zome/new-zome/zome_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.122" +hdk = "0.0.123" hc_zome_<%= h.changeCase.snake(zome_name) %>_defs = { path = "../../../../../lib/<%= h.changeCase.snake(zome_name) %>/defs" } hc_zome_<%= h.changeCase.snake(zome_name) %>_rpc = { path = "../../../../../lib/<%= h.changeCase.snake(zome_name) %>/rpc" } From 20aad9e930f6fc7f64256b95071fd4e474bc7458 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:32:38 +1000 Subject: [PATCH 035/181] update for SignedHeaderHashed API change --- lib/hdk_records/src/record_helpers.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index bc25c4639..4585e887d 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -31,7 +31,7 @@ use crate::{ /// Helper to retrieve the HeaderHash for an Element /// fn get_header_hash(shh: element::SignedHeaderHashed) -> HeaderHash { - shh.header_hashed().as_hash().to_owned() + shh.as_hash().to_owned() } //--------------------------------[ READ ]-------------------------------------- From ec8f2446ee9bd3af52e162f80a7d57fc50464a60 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:32:49 +1000 Subject: [PATCH 036/181] fix Unit fields not being public --- lib/vf_measurement/src/lib.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/vf_measurement/src/lib.rs b/lib/vf_measurement/src/lib.rs index 8d966f5fd..10d76f158 100644 --- a/lib/vf_measurement/src/lib.rs +++ b/lib/vf_measurement/src/lib.rs @@ -9,9 +9,9 @@ use vf_attributes_hdk::UnitId; #[derive(Debug, Clone)] pub struct Unit { - id: UnitId, - name: Option, - symbol: Option, + pub id: UnitId, + pub name: Option, + pub symbol: Option, } #[derive(Clone, PartialEq, Serialize, Deserialize, SerializedBytes, Debug)] From b429b2ff5557b66992ff9412f7e64830db459b92 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:33:21 +1000 Subject: [PATCH 037/181] update existing entry field validation to use new validations API --- zomes/rea_commitment/zome/src/lib.rs | 19 ++++++++++++------- zomes/rea_economic_event/zome_api/src/lib.rs | 19 ++++++++++++------- .../rea_economic_resource/zome_api/src/lib.rs | 19 ++++++++++++------- zomes/rea_intent/zome/src/lib.rs | 19 ++++++++++++------- 4 files changed, 48 insertions(+), 28 deletions(-) diff --git a/zomes/rea_commitment/zome/src/lib.rs b/zomes/rea_commitment/zome/src/lib.rs index 2aeca4c0e..6727ef1bc 100644 --- a/zomes/rea_commitment/zome/src/lib.rs +++ b/zomes/rea_commitment/zome/src/lib.rs @@ -19,14 +19,19 @@ use hc_zome_rea_commitment_storage::*; use hc_zome_rea_commitment_storage_consts::*; #[hdk_extern] -fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; +fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } +} +fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(event_storage) => { let record = event_storage.entry(); diff --git a/zomes/rea_economic_event/zome_api/src/lib.rs b/zomes/rea_economic_event/zome_api/src/lib.rs index 8f3d9d696..c26c0af1a 100644 --- a/zomes/rea_economic_event/zome_api/src/lib.rs +++ b/zomes/rea_economic_event/zome_api/src/lib.rs @@ -67,14 +67,19 @@ macro_rules! declare_economic_event_zome_api { macro_rules! declare_economic_event_zome_validation_defaults { ( /*$zome_api:ty*/ ) => { #[hdk_extern] - fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; + fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } + } + fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(event_storage) => { let record = event_storage.entry(); diff --git a/zomes/rea_economic_resource/zome_api/src/lib.rs b/zomes/rea_economic_resource/zome_api/src/lib.rs index c43a0162f..8b7876e47 100644 --- a/zomes/rea_economic_resource/zome_api/src/lib.rs +++ b/zomes/rea_economic_resource/zome_api/src/lib.rs @@ -82,14 +82,19 @@ macro_rules! declare_economic_resource_zome_api { macro_rules! declare_economic_resource_zome_validation_defaults { ( /*$zome_api:ty*/ ) => { #[hdk_extern] - fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; + fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } + } + fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(resource_storage) => { let record = resource_storage.entry(); diff --git a/zomes/rea_intent/zome/src/lib.rs b/zomes/rea_intent/zome/src/lib.rs index 68367f915..f47d3a758 100644 --- a/zomes/rea_intent/zome/src/lib.rs +++ b/zomes/rea_intent/zome/src/lib.rs @@ -15,14 +15,19 @@ use hc_zome_rea_intent_storage::*; use hc_zome_rea_intent_storage_consts::*; #[hdk_extern] -fn validate(validation_data: ValidateData) -> ExternResult { - let element = validation_data.element; - let entry = element.into_inner().1; - let entry = match entry { - ElementEntry::Present(e) => e, - _ => return Ok(ValidateCallbackResult::Valid), - }; +fn validate(op: Op) -> ExternResult { + match op { + Op::StoreElement { .. } => Ok(ValidateCallbackResult::Valid), + Op::StoreEntry { entry, .. } => validate_entry(entry), + Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDeleteLink { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterUpdate { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterDelete { .. } => Ok(ValidateCallbackResult::Valid), + Op::RegisterAgentActivity { .. } => Ok(ValidateCallbackResult::Valid), + } +} +fn validate_entry(entry: Entry) -> ExternResult { match EntryStorage::try_from(&entry) { Ok(event_storage) => { let record = event_storage.entry(); From 6b8cebd6c8099caaa9db5bde2361bfbfd2b04a47 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:42:08 +1000 Subject: [PATCH 038/181] fix method binding for all_agents query, though it doesn't yet seem to be working anyway --- modules/vf-graphql-holochain/queries/agent.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/vf-graphql-holochain/queries/agent.ts b/modules/vf-graphql-holochain/queries/agent.ts index 06686c403..8e45ba31d 100644 --- a/modules/vf-graphql-holochain/queries/agent.ts +++ b/modules/vf-graphql-holochain/queries/agent.ts @@ -16,7 +16,7 @@ import { export default (dnaConfig: DNAIdMappings, conductorUri: string) => { const readMyAgent = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_my_agent_pubkey') - const readAllAgents = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_registered_agents') + const readAllAgents = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'get_registered') // special 'true' at the end is for skipEncodeDecode, because of the way this zome handles serialization and inputs // which is different from others const agentExists = mapZomeFn(dnaConfig, conductorUri, 'agent', 'agent_registration', 'is_registered', true) From 8a7b287a8827e6fb9681dc01e6f4cc0b08e80755 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 13:43:03 +1000 Subject: [PATCH 039/181] add todo --- modules/vf-graphql-holochain/queries/agent.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/vf-graphql-holochain/queries/agent.ts b/modules/vf-graphql-holochain/queries/agent.ts index 8e45ba31d..883265d74 100644 --- a/modules/vf-graphql-holochain/queries/agent.ts +++ b/modules/vf-graphql-holochain/queries/agent.ts @@ -35,6 +35,8 @@ export default (dnaConfig: DNAIdMappings, conductorUri: string) => { } }), + // :TODO: this and the associated functionality in 'get_registered' needs to be revisited + // or potentially integrated from other projects affording similar functionality. agents: async (root, args): Promise => { return (await readAllAgents(null)).map(agentAddress => ({ // :TODO: wire to Personas hApp From 6c2aa53ee93bc1ace39554987e99d5eea992a070 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 3 Mar 2022 14:26:30 +1000 Subject: [PATCH 040/181] fix EconomicResource mising name field --- zomes/rea_economic_event/rpc/src/lib.rs | 3 +++ zomes/rea_economic_resource/storage/src/lib.rs | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/zomes/rea_economic_event/rpc/src/lib.rs b/zomes/rea_economic_event/rpc/src/lib.rs index 9f2e6000a..dbefe3480 100644 --- a/zomes/rea_economic_event/rpc/src/lib.rs +++ b/zomes/rea_economic_event/rpc/src/lib.rs @@ -271,6 +271,9 @@ impl<'a> CreateRequest { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] #[serde(rename_all = "camelCase")] pub struct ResourceCreateRequest { + #[serde(default)] + pub name: MaybeUndefined, + #[serde(default)] pub conforms_to: MaybeUndefined, #[serde(default)] pub tracking_identifier: MaybeUndefined, diff --git a/zomes/rea_economic_resource/storage/src/lib.rs b/zomes/rea_economic_resource/storage/src/lib.rs index 444ccb24a..05fd761b9 100644 --- a/zomes/rea_economic_resource/storage/src/lib.rs +++ b/zomes/rea_economic_resource/storage/src/lib.rs @@ -54,6 +54,7 @@ pub struct EconomicResourceZomeConfig { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] pub struct EntryData { + pub name: Option, pub conforms_to: Option, pub classified_as: Option>, pub tracking_identifier: Option, @@ -89,6 +90,7 @@ impl From for EntryData let r = t.resource; let e = t.event; EntryData { + name: r.name.to_option(), conforms_to: conforming.clone(), classified_as: if e.resource_classified_as == MaybeUndefined::Undefined { None } else { e.resource_classified_as.to_owned().to_option() }, tracking_identifier: if r.tracking_identifier == MaybeUndefined::Undefined { None } else { r.tracking_identifier.to_owned().to_option() }, @@ -157,6 +159,7 @@ fn get_default_unit_for_specification(specification_id: ResourceSpecificationAdd impl Updateable for EntryData { fn update_with(&self, e: UpdateRequest) -> EntryData { EntryData { + name: self.name.to_owned(), conforms_to: self.conforms_to.to_owned(), classified_as: if e.classified_as == MaybeUndefined::Undefined { self.classified_as.to_owned() } else { e.classified_as.to_owned().to_option() }, tracking_identifier: self.tracking_identifier.to_owned(), @@ -183,6 +186,7 @@ impl Updateable for EntryData { impl Updateable for EntryData { fn update_with(&self, e: EventCreateRequest) -> EntryData { EntryData { + name: self.name.to_owned(), conforms_to: self.conforms_to.to_owned(), classified_as: { if let MaybeUndefined::Some(classified_as) = e.resource_classified_as.to_owned() { From d6092c7407731dc575287a3ce0f26bd7f958d030 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 9 Mar 2022 17:49:19 +1000 Subject: [PATCH 041/181] update clean commands for latest Holonix --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 17d0a976d..f4272c558 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ "test:integration": "cd test && npm test", "clean": "npm-run-all --parallel clean:modules clean:build", "clean:modules": "scripts/clean-modules.sh", - "clean:build": "nix-shell --run hn-flush && rm happs/**/*.dna" + "clean:build": "nix-shell --run hn-node-flush && nix-shell --run hn-rust-flush && rm happs/**/*.dna" }, "watch": { "test:unit": { From 834b66f951e7fc9fcc49cda9032dbde3920ad328 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:00:06 +1000 Subject: [PATCH 042/181] make test package private to prevent workspace-wide publish from including it --- test/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/test/package.json b/test/package.json index 07f3ef523..7870240c2 100644 --- a/test/package.json +++ b/test/package.json @@ -1,6 +1,7 @@ { "name": "@valueflows/holo-rea-tests", "version": "0.0.1", + "private": true, "description": "Integration tests for HoloREA DHTs", "main": "index.js", "scripts": { From 74cc4bf896b03de0ccf9d74d1ad8af1a821ce711 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:20:44 +1000 Subject: [PATCH 043/181] fix PNPM package refs to interpret build folders correctly --- modules/graphql-client/package.json | 5 ++++- modules/vf-graphql-holochain/package.json | 3 +++ pnpm-workspace.yaml | 3 ++- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/modules/graphql-client/package.json b/modules/graphql-client/package.json index 87c617e29..89288f4bc 100644 --- a/modules/graphql-client/package.json +++ b/modules/graphql-client/package.json @@ -27,8 +27,11 @@ "url": "https://github.com/holo-rea/holo-rea/issues" }, "homepage": "https://github.com/holo-rea/holo-rea#readme", + "publishConfig": { + "directory": "build" + }, "dependencies": { - "@valueflows/vf-graphql-holochain": "link:../vf-graphql-holochain/build", + "@valueflows/vf-graphql-holochain": "workspace:*", "@apollo/link-schema": "^2.0.0-beta.3", "tslib": "^2.0.0" }, diff --git a/modules/vf-graphql-holochain/package.json b/modules/vf-graphql-holochain/package.json index d366a9b75..3e0189565 100644 --- a/modules/vf-graphql-holochain/package.json +++ b/modules/vf-graphql-holochain/package.json @@ -32,6 +32,9 @@ "url": "https://github.com/holo-rea/holo-rea/issues" }, "homepage": "https://github.com/holo-rea/holo-rea#readme", + "publishConfig": { + "directory": "build" + }, "dependencies": { "buffer": "^6.0.3", "@graphql-tools/schema": "^8.3.1", diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index a98ebe729..e647d095e 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,5 +1,6 @@ packages: - 'apps/**' - 'example/**' - - 'modules/**' + - 'modules/graphql-client' + - 'modules/vf-graphql-holochain' - 'test/**' From 6ffdd165db833e254dac6d3498772419e3fed769 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:23:59 +1000 Subject: [PATCH 044/181] remove build folders for node modules when cleaning --- scripts/clean-modules.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/clean-modules.sh b/scripts/clean-modules.sh index a4891fc3f..4931d2708 100755 --- a/scripts/clean-modules.sh +++ b/scripts/clean-modules.sh @@ -8,6 +8,7 @@ ## rm -Rf node_modules +rm -Rf modules/**/build for DIR in $(find -type d -iname node_modules); do echo " Remove $DIR" From ce2605fbfb87eacc080baf9e0045b7ac03635ffe Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:24:55 +1000 Subject: [PATCH 045/181] update lockfile, client now refs unbuilt package but *hopefully* PNPM will resolve this --- pnpm-lock.yaml | 64 ++++---------------------------------------------- 1 file changed, 4 insertions(+), 60 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 53f814b6b..644f5d582 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -33,7 +33,7 @@ importers: typescript: 4.5.5 dependencies: '@apollo/client': 3.5.8_graphql@16.3.0+react@16.13.1 - '@valueflows/vf-graphql-holochain': link:../../modules/vf-graphql-holochain/build + '@valueflows/vf-graphql-holochain': link:../../modules/vf-graphql-holochain graphiql: 1.5.16_355f8d31a5628c68d7d9f31bdf4bf39a graphiql-explorer: 0.6.3_660a82361cf03a823b6e74d2a302c521 graphql: 16.3.0 @@ -55,33 +55,14 @@ importers: specifiers: '@apollo/client': ^3.5.7 '@apollo/link-schema': ^2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../vf-graphql-holochain/build - tslib: ^2.0.0 - tslint: 5.16.0 - tslint-config-standard: 8.0.1 - typescript: 4.5.5 - dependencies: - '@apollo/link-schema': 2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../vf-graphql-holochain/build - tslib: 2.3.1 - devDependencies: - '@apollo/client': 3.5.8 - tslint: 5.16.0_typescript@4.5.5 - tslint-config-standard: 8.0.1_tslint@5.16.0+typescript@4.5.5 - typescript: 4.5.5 - - modules/graphql-client/build: - specifiers: - '@apollo/client': ^3.5.7 - '@apollo/link-schema': ^2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../../vf-graphql-holochain/build + '@valueflows/vf-graphql-holochain': workspace:* tslib: ^2.0.0 tslint: 5.16.0 tslint-config-standard: 8.0.1 typescript: 4.5.5 dependencies: '@apollo/link-schema': 2.0.0-beta.3 - '@valueflows/vf-graphql-holochain': link:../../vf-graphql-holochain/build + '@valueflows/vf-graphql-holochain': link:../vf-graphql-holochain tslib: 2.3.1 devDependencies: '@apollo/client': 3.5.8 @@ -126,43 +107,6 @@ importers: tslint-config-standard: 8.0.1_tslint@5.16.0+typescript@4.5.5 typescript: 4.5.5 - modules/vf-graphql-holochain/build: - specifiers: - '@graphql-tools/schema': ^8.3.1 - '@graphql-tools/utils': ^8.6.1 - '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2 - buffer: ^6.0.3 - dataloader: ^1.4.0 - deep-for-each: ^3.0.0 - fecha: ^4.1.0 - graphql: ^16.2.0 - graphql-scalars: ^1.14.1 - is-object: ^1.0.2 - js-base64: ^3.6.0 - tslib: 1.10.0 - tslint: 5.16.0 - tslint-config-standard: 8.0.1 - typescript: 4.5.5 - dependencies: - '@graphql-tools/schema': 8.3.1_graphql@16.3.0 - '@graphql-tools/utils': 8.6.1_graphql@16.3.0 - '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@16.3.0 - buffer: 6.0.3 - dataloader: 1.4.0 - deep-for-each: 3.0.0 - fecha: 4.2.1 - graphql-scalars: 1.14.1_graphql@16.3.0 - is-object: 1.0.2 - js-base64: 3.7.2 - devDependencies: - graphql: 16.3.0 - tslib: 1.10.0 - tslint: 5.16.0_typescript@4.5.5 - tslint-config-standard: 8.0.1_tslint@5.16.0+typescript@4.5.5 - typescript: 4.5.5 - test: specifiers: '@holochain-playground/cli': 0.0.8 @@ -192,7 +136,7 @@ importers: '@holochain-playground/cli': 0.0.8 '@holochain/tryorama': 0.4.10 '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@16.3.0 - '@valueflows/vf-graphql-holochain': link:../modules/vf-graphql-holochain/build + '@valueflows/vf-graphql-holochain': link:../modules/vf-graphql-holochain deep-for-each: 3.0.0 easygraphql-tester: 6.0.1_graphql@16.3.0 eslint: 5.16.0 From 6faabb11a1b8ee87543c9a65c3bc0604cab0fdf2 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:25:19 +1000 Subject: [PATCH 046/181] add publishing instructions for node packages to developer docs --- docs/README.md | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/docs/README.md b/docs/README.md index 19b725f83..d0e7d3d22 100644 --- a/docs/README.md +++ b/docs/README.md @@ -3,7 +3,7 @@ - [Quick start](#quick-start) - - [Install Required Binaries](#install-required-binaries) + - [Install required binaries](#install-required-binaries) - [Setup the project](#setup-the-project) - [Running](#running) - [Contributing](#contributing) @@ -16,6 +16,8 @@ - [File headers](#file-headers) - [Known issues](#known-issues) - [Gotchas](#gotchas) +- [Publishing](#publishing) + - [Publishing Node packages](#publishing-node-packages) - [Multi-project setup](#multi-project-setup) @@ -182,6 +184,20 @@ You can configure your editor to automatically add new header comment blocks to - These errors are often encountered when confusing cross-DNA link fields for same-DNA links. Check that you are using the appropriate helpers for the link type (`_index` vs `_remote_index` helpers). +## Publishing + +### Publishing Node packages + +The JavaScript API client modules are published to NPM with PNPM. **You must use PNPM** to publish these, since packages contain PNPM-specific workspace metadata that NPM does not know how to deal with. + +- Ensure all packages requiring publication have their `version` field in `package.json` updated to reflect the next version to be published. +- Ensure a successful `pnpm run build` completes after the version updates are made. +- Run `pnpm -r publish --access public` from the root directory to publish all packages with new versions. + +TODO: instructions for publishing Rust crates + +TODO: instructions for publishing built DNA & zome artifacts to Holochain Devhub + ## Multi-project setup From 1b1f757d25c2b6366e148ef203a54a814ae4a883 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:30:04 +1000 Subject: [PATCH 047/181] mark node packages as non-private, publishConfig.directory handles the src/dist separation now --- modules/graphql-client/package.json | 1 - modules/vf-graphql-holochain/package.json | 1 - 2 files changed, 2 deletions(-) diff --git a/modules/graphql-client/package.json b/modules/graphql-client/package.json index 89288f4bc..84fe8e727 100644 --- a/modules/graphql-client/package.json +++ b/modules/graphql-client/package.json @@ -1,6 +1,5 @@ { "name": "@vf-ui/graphql-client-holochain", - "private": true, "version": "0.0.1-alpha.3", "description": "ValueFlows GraphQLClient configurations, providing pluggable backend datasources for different distributed, federated and client/server infrastructure.", "main": "index.ts", diff --git a/modules/vf-graphql-holochain/package.json b/modules/vf-graphql-holochain/package.json index 3e0189565..bc5280d56 100644 --- a/modules/vf-graphql-holochain/package.json +++ b/modules/vf-graphql-holochain/package.json @@ -1,7 +1,6 @@ { "name": "@valueflows/vf-graphql-holochain", "version": "0.0.1-alpha.3", - "private": true, "main": "build/index.js", "types": "build/index.d.ts", "description": "GraphQL schema bindings for the Holochain implementation of ValueFlows", From 7c37c49ebc8e10b50c5ca78d90844c97745bdc6b Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:42:57 +1000 Subject: [PATCH 048/181] update publishConfigs and remove manual package file munging which PNPM should handle itself --- modules/graphql-client/finish-build.js | 11 +---------- modules/graphql-client/package.json | 7 +++++-- modules/vf-graphql-holochain/finish-build.js | 9 +-------- modules/vf-graphql-holochain/package.json | 4 +++- 4 files changed, 10 insertions(+), 21 deletions(-) diff --git a/modules/graphql-client/finish-build.js b/modules/graphql-client/finish-build.js index cbaebdfd5..082617293 100644 --- a/modules/graphql-client/finish-build.js +++ b/modules/graphql-client/finish-build.js @@ -11,17 +11,8 @@ const fs = require('fs') const path = require('path') -fs.copyFileSync(path.resolve(__dirname, '../../LICENSE'), path.resolve(__dirname, './build/LICENSE')) fs.copyFileSync(path.resolve(__dirname, './README.md'), path.resolve(__dirname, './build/README.md')) -fs.copyFileSync(path.resolve(__dirname, './package.json'), path.resolve(__dirname, './build/package.json')) -const packageJson = require(path.resolve(__dirname, './build/package.json')) - -delete packageJson['private'] -delete packageJson['main'] -delete packageJson['types'] +const packageJson = require(path.resolve(__dirname, './package.json')) delete packageJson.scripts['prepare'] -packageJson.dependencies['@valueflows/vf-graphql-holochain'] = packageJson.dependencies['@valueflows/vf-graphql-holochain'].replace('../', '../../') - fs.writeFileSync(path.resolve(__dirname, './build/package.json'), JSON.stringify(packageJson, undefined, " ")) - diff --git a/modules/graphql-client/package.json b/modules/graphql-client/package.json index 84fe8e727..21279037e 100644 --- a/modules/graphql-client/package.json +++ b/modules/graphql-client/package.json @@ -2,7 +2,8 @@ "name": "@vf-ui/graphql-client-holochain", "version": "0.0.1-alpha.3", "description": "ValueFlows GraphQLClient configurations, providing pluggable backend datasources for different distributed, federated and client/server infrastructure.", - "main": "index.ts", + "main": "build/index.js", + "types": "build/index.d.ts", "scripts": { "prepare": "npm run build", "build": "tsc -p ./tsconfig.dev.json; node ./finish-build", @@ -27,7 +28,9 @@ }, "homepage": "https://github.com/holo-rea/holo-rea#readme", "publishConfig": { - "directory": "build" + "directory": "build", + "main": "index.js", + "types": "index.d.ts" }, "dependencies": { "@valueflows/vf-graphql-holochain": "workspace:*", diff --git a/modules/vf-graphql-holochain/finish-build.js b/modules/vf-graphql-holochain/finish-build.js index b7b01b030..082617293 100644 --- a/modules/vf-graphql-holochain/finish-build.js +++ b/modules/vf-graphql-holochain/finish-build.js @@ -11,15 +11,8 @@ const fs = require('fs') const path = require('path') -fs.copyFileSync(path.resolve(__dirname, '../../LICENSE'), path.resolve(__dirname, './build/LICENSE')) fs.copyFileSync(path.resolve(__dirname, './README.md'), path.resolve(__dirname, './build/README.md')) -fs.copyFileSync(path.resolve(__dirname, './package.json'), path.resolve(__dirname, './build/package.json')) -const packageJson = require(path.resolve(__dirname, './build/package.json')) - -delete packageJson['private'] -delete packageJson['main'] -delete packageJson['types'] +const packageJson = require(path.resolve(__dirname, './package.json')) delete packageJson.scripts['prepare'] - fs.writeFileSync(path.resolve(__dirname, './build/package.json'), JSON.stringify(packageJson, undefined, " ")) diff --git a/modules/vf-graphql-holochain/package.json b/modules/vf-graphql-holochain/package.json index bc5280d56..e057b6875 100644 --- a/modules/vf-graphql-holochain/package.json +++ b/modules/vf-graphql-holochain/package.json @@ -32,7 +32,9 @@ }, "homepage": "https://github.com/holo-rea/holo-rea#readme", "publishConfig": { - "directory": "build" + "directory": "build", + "main": "index.js", + "types": "index.d.ts" }, "dependencies": { "buffer": "^6.0.3", From be90c34bc7bee8d25210136d9e2133519bd00de3 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:48:09 +1000 Subject: [PATCH 049/181] swap out workspace protocol ref in built version of GraphQL client module for actual version of module this seems dumb. Why doesn't PNPM do this for us? --- modules/graphql-client/finish-build.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/graphql-client/finish-build.js b/modules/graphql-client/finish-build.js index 082617293..01a88e8b8 100644 --- a/modules/graphql-client/finish-build.js +++ b/modules/graphql-client/finish-build.js @@ -14,5 +14,9 @@ const path = require('path') fs.copyFileSync(path.resolve(__dirname, './README.md'), path.resolve(__dirname, './build/README.md')) const packageJson = require(path.resolve(__dirname, './package.json')) +const dependentPackageJson = require(path.resolve(__dirname, '../vf-graphql-holochain/package.json')) + delete packageJson.scripts['prepare'] +packageJson.dependencies['@valueflows/vf-graphql-holochain'] = dependentPackageJson.version + fs.writeFileSync(path.resolve(__dirname, './build/package.json'), JSON.stringify(packageJson, undefined, " ")) From 9daea44f078d5af0b8076170a5cf5e4202050ec7 Mon Sep 17 00:00:00 2001 From: pospi Date: Thu, 10 Mar 2022 11:55:05 +1000 Subject: [PATCH 050/181] update readme for GraphQL client module --- modules/graphql-client/README.md | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/modules/graphql-client/README.md b/modules/graphql-client/README.md index e42aab67f..30dda73e7 100644 --- a/modules/graphql-client/README.md +++ b/modules/graphql-client/README.md @@ -23,12 +23,6 @@ In a [Svelte](https://svelte.dev/) application, simple app initialisation logic import App from './my-happ-ui' - // define connection params - const conductorUri = process.env.REACT_APP_HC_CONN_URL || 'ws://localhost:4001' - const dnaConfig = { - // :TODO: determine appropriate `CellId`s by interrogating admin websocket - } - // init and manage GraphQL client connection let client = null let loading = true @@ -44,9 +38,17 @@ In a [Svelte](https://svelte.dev/) application, simple app initialisation logic error = null } + // Omit these options for connecting via the Holochain Launcher. + // During development, you can provide them as follows: initConnection({ - conductorUri, - dnaConfig, + // A websocket URI to connect to the Holochain Conductor on: + // conductorUri, + + // Mapping of hREA module IDs to Holochain CellIds. If ommitted, + // The client will attempt to sniff them by inspecting the names + // of active app cells. Any cell with a known 'hrea_*_X' format + // will be matched. + // dnaConfig, }) // workaround to set the context outside of init action From b9dbacdcf51a2965e1654b07866c0f02c2c45c55 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 11 Mar 2022 10:51:02 +1000 Subject: [PATCH 051/181] downgrade GraphQL in tests, seems to cause errors with easygraphql-tester not understanding schema types --- test/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/package.json b/test/package.json index 7870240c2..ba10672b7 100644 --- a/test/package.json +++ b/test/package.json @@ -22,7 +22,7 @@ "eslint-plugin-promise": "^4.1.1", "eslint-plugin-standard": "^4.0.0", "faucet": "^0.0.1", - "graphql": "^16.2.0", + "graphql": "15.8.0", "is-function": "^1.0.1", "json3": "^3.3.2", "source-map-support": "^0.5.16", From 161951dfd3d1290ee3330c9a196e7da10db578ff Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 11 Mar 2022 11:11:06 +1000 Subject: [PATCH 052/181] fix casing of parameter names in EconomicResource tests --- test/economic-resource/resource_links.js | 6 +++--- test/economic-resource/resource_logic.js | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/test/economic-resource/resource_links.js b/test/economic-resource/resource_links.js index 61ed4cbc7..a3cdd22fe 100644 --- a/test/economic-resource/resource_links.js +++ b/test/economic-resource/resource_links.js @@ -33,7 +33,7 @@ runner.registerScenario('EconomicResource composition / containment functionalit note: 'container resource', conformsTo: resourceSpecificationId, } - const cResp1 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent, new_inventoried_resource: inputResource }) + const cResp1 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent, newInventoriedResource: inputResource }) await s.consistency() const event1 = cResp1.economicEvent const resource1 = cResp1.economicResource @@ -52,7 +52,7 @@ runner.registerScenario('EconomicResource composition / containment functionalit conformsTo: resourceSpecificationId, note: 'internal resource', } - const cResp2 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent2, new_inventoried_resource: inputResource2 }) + const cResp2 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent2, newInventoriedResource: inputResource2 }) await s.consistency() t.ok(cResp2.economicResource, 'internal resource created successfully') const resource2 = cResp2.economicResource @@ -80,7 +80,7 @@ runner.registerScenario('EconomicResource composition / containment functionalit conformsTo: resourceSpecificationId, note: 'another internal resource', } - const cResp3 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent3, new_inventoried_resource: inputResource3 }) + const cResp3 = await alice.call('economic_event', 'create_economic_event', { event: inputEvent3, newInventoriedResource: inputResource3 }) await s.consistency() t.ok(cResp3.economicResource, 'additional internal resource created successfully') const resource3 = cResp3.economicResource diff --git a/test/economic-resource/resource_logic.js b/test/economic-resource/resource_logic.js index 798e1757b..36d6bdb8f 100644 --- a/test/economic-resource/resource_logic.js +++ b/test/economic-resource/resource_logic.js @@ -77,7 +77,7 @@ runner.registerScenario('EconomicResource & EconomicEvent record interactions', note: 'test resource observed in inventory', conformsTo: resourceSpecificationId, } - const cResp1 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent, new_inventoried_resource: inputResource }) + const cResp1 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent, newInventoriedResource: inputResource }) await s.consistency() const inputEventDest = { @@ -91,7 +91,7 @@ runner.registerScenario('EconomicResource & EconomicEvent record interactions', const inputResourceDest = { note: 'destination resource for move target', } - const dResp = await observation.call('economic_event', 'create_economic_event', { event: inputEventDest, new_inventoried_resource: inputResourceDest }) + const dResp = await observation.call('economic_event', 'create_economic_event', { event: inputEventDest, newInventoriedResource: inputResourceDest }) await s.consistency() t.ok(dResp.economicEvent, 'destination inventory created successfully') const destResourceId = dResp.economicResource.id @@ -225,7 +225,7 @@ runner.registerScenario('EconomicResource & EconomicEvent record interactions', note: 'receiver test resource', conformsTo: resourceSpecificationId, } - const cResp2 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent2, new_inventoried_resource: inputResource2 }) + const cResp2 = await observation.call('economic_event', 'create_economic_event', { event: inputEvent2, newInventoriedResource: inputResource2 }) await s.consistency() const event2 = cResp2.economicEvent const resource2 = cResp2.economicResource From c5707f51e8373c4e0c3fd549a81d6cd9df2a8ac1 Mon Sep 17 00:00:00 2001 From: Jillian Burrows Date: Mon, 14 Mar 2022 12:32:01 -0700 Subject: [PATCH 053/181] Add pnpm to nix-shell environment. --- default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/default.nix b/default.nix index 5c0baee48..3dcdbf638 100644 --- a/default.nix +++ b/default.nix @@ -12,5 +12,6 @@ in nixpkgs.mkShell { # :TODO: binaryen, wasm-opt? # Additional packages go here nodejs-16_x + nodePackages.pnpm ]; } From 07382a7c59e911006b4b21e82544e922b070b12b Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 15 Mar 2022 20:45:19 +1000 Subject: [PATCH 054/181] update lockfile --- pnpm-lock.yaml | 107 +++++++++++++++++++++++++++++++++++-------------- 1 file changed, 78 insertions(+), 29 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 644f5d582..ba5406fc0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -122,7 +122,7 @@ importers: eslint-plugin-promise: ^4.1.1 eslint-plugin-standard: ^4.0.0 faucet: ^0.0.1 - graphql: ^16.2.0 + graphql: 15.8.0 is-function: ^1.0.1 js-base64: ^3.6.0 json3: ^3.3.2 @@ -135,10 +135,10 @@ importers: devDependencies: '@holochain-playground/cli': 0.0.8 '@holochain/tryorama': 0.4.10 - '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@16.3.0 + '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@15.8.0 '@valueflows/vf-graphql-holochain': link:../modules/vf-graphql-holochain deep-for-each: 3.0.0 - easygraphql-tester: 6.0.1_graphql@16.3.0 + easygraphql-tester: 6.0.1_graphql@15.8.0 eslint: 5.16.0 eslint-config-standard: 12.0.0_68c275d3ee18e545b4013e907c923945 eslint-plugin-import: 2.25.4_eslint@5.16.0 @@ -146,7 +146,7 @@ importers: eslint-plugin-promise: 4.3.1 eslint-plugin-standard: 4.1.0_eslint@5.16.0 faucet: 0.0.1 - graphql: 16.3.0 + graphql: 15.8.0 is-function: 1.0.2 json3: 3.3.3 source-map-support: 0.5.21 @@ -1766,14 +1766,24 @@ packages: tslib: 2.3.1 dev: false - /@graphql-tools/merge/6.2.17_graphql@16.3.0: + /@graphql-tools/merge/6.2.17_graphql@15.8.0: resolution: {integrity: sha512-G5YrOew39fZf16VIrc49q3c8dBqQDD0ax5LYPiNja00xsXDi0T9zsEWVt06ApjtSdSF6HDddlu5S12QjeN8Tow==} peerDependencies: graphql: ^14.0.0 || ^15.0.0 dependencies: - '@graphql-tools/schema': 8.3.1_graphql@16.3.0 - '@graphql-tools/utils': 8.0.2_graphql@16.3.0 - graphql: 16.3.0 + '@graphql-tools/schema': 8.3.1_graphql@15.8.0 + '@graphql-tools/utils': 8.0.2_graphql@15.8.0 + graphql: 15.8.0 + tslib: 2.3.1 + dev: true + + /@graphql-tools/merge/8.2.1_graphql@15.8.0: + resolution: {integrity: sha512-Q240kcUszhXiAYudjuJgNuLgy9CryDP3wp83NOZQezfA6h3ByYKU7xI6DiKrdjyVaGpYN3ppUmdj0uf5GaXzMA==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-tools/utils': 8.6.1_graphql@15.8.0 + graphql: 15.8.0 tslib: 2.3.1 dev: true @@ -1785,6 +1795,19 @@ packages: '@graphql-tools/utils': 8.6.1_graphql@16.3.0 graphql: 16.3.0 tslib: 2.3.1 + dev: false + + /@graphql-tools/schema/8.3.1_graphql@15.8.0: + resolution: {integrity: sha512-3R0AJFe715p4GwF067G5i0KCr/XIdvSfDLvTLEiTDQ8V/hwbOHEKHKWlEBHGRQwkG5lwFQlW1aOn7VnlPERnWQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-tools/merge': 8.2.1_graphql@15.8.0 + '@graphql-tools/utils': 8.6.1_graphql@15.8.0 + graphql: 15.8.0 + tslib: 2.3.1 + value-or-promise: 1.0.11 + dev: true /@graphql-tools/schema/8.3.1_graphql@16.3.0: resolution: {integrity: sha512-3R0AJFe715p4GwF067G5i0KCr/XIdvSfDLvTLEiTDQ8V/hwbOHEKHKWlEBHGRQwkG5lwFQlW1aOn7VnlPERnWQ==} @@ -1796,6 +1819,7 @@ packages: graphql: 16.3.0 tslib: 2.3.1 value-or-promise: 1.0.11 + dev: false /@graphql-tools/url-loader/7.7.1_82e2361edcf6fd636da034b389a21d41: resolution: {integrity: sha512-K/5amdeHtKYI976HVd/AXdSNvLL7vx5QVjMlwN0OHeYyxSgC+UOH+KkS7cshYgL13SekGu0Mxbg9ABfgQ34ECA==} @@ -1829,12 +1853,21 @@ packages: - utf-8-validate dev: false - /@graphql-tools/utils/8.0.2_graphql@16.3.0: + /@graphql-tools/utils/8.0.2_graphql@15.8.0: resolution: {integrity: sha512-gzkavMOgbhnwkHJYg32Adv6f+LxjbQmmbdD5Hty0+CWxvaiuJq+nU6tzb/7VSU4cwhbNLx/lGu2jbCPEW1McZQ==} peerDependencies: graphql: ^14.0.0 || ^15.0.0 dependencies: - graphql: 16.3.0 + graphql: 15.8.0 + tslib: 2.3.1 + dev: true + + /@graphql-tools/utils/8.6.1_graphql@15.8.0: + resolution: {integrity: sha512-uxcfHCocp4ENoIiovPxUWZEHOnbXqj3ekWc0rm7fUhW93a1xheARNHcNKhwMTR+UKXVJbTFQdGI1Rl5XdyvDBg==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + graphql: 15.8.0 tslib: 2.3.1 dev: true @@ -1845,6 +1878,7 @@ packages: dependencies: graphql: 16.3.0 tslib: 2.3.1 + dev: false /@graphql-tools/wrap/8.3.3_graphql@16.3.0: resolution: {integrity: sha512-TpXN1S4Cv+oMA1Zsg9Nu4N9yrFxLuJkX+CTtSRrrdfETGHIxqfyDkm5slPDCckxP+RILA00g8ny2jzsYyNvX1w==} @@ -2838,6 +2872,15 @@ packages: eslint-visitor-keys: 3.2.0 dev: true + /@valueflows/vf-graphql/0.9.0-alpha.2_graphql@15.8.0: + resolution: {integrity: sha512-vwO+6srw8/uiQ/VjSNOJVWoDJnlUpLyrq/EWw7Q6OjR2mKmIsHdE8RDeOvJKs1fGuDiTfxM4Nw07jb2pjymbvg==} + peerDependencies: + graphql: '>=14' + dependencies: + '@graphql-tools/merge': 8.2.1_graphql@15.8.0 + graphql: 15.8.0 + dev: true + /@valueflows/vf-graphql/0.9.0-alpha.2_graphql@16.3.0: resolution: {integrity: sha512-vwO+6srw8/uiQ/VjSNOJVWoDJnlUpLyrq/EWw7Q6OjR2mKmIsHdE8RDeOvJKs1fGuDiTfxM4Nw07jb2pjymbvg==} peerDependencies: @@ -2845,6 +2888,7 @@ packages: dependencies: '@graphql-tools/merge': 8.2.1_graphql@16.3.0 graphql: 16.3.0 + dev: false /@webassemblyjs/ast/1.11.1: resolution: {integrity: sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==} @@ -3225,26 +3269,26 @@ packages: picomatch: 2.3.1 dev: true - /apollo-link/1.2.14_graphql@16.3.0: + /apollo-link/1.2.14_graphql@15.8.0: resolution: {integrity: sha512-p67CMEFP7kOG1JZ0ZkYZwRDa369w5PIjtMjvrQd/HnIV8FRsHRqLqK+oAZQnFa1DDdZtOtHTi+aMIW6EatC2jg==} peerDependencies: graphql: ^0.11.3 || ^0.12.3 || ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - apollo-utilities: 1.3.4_graphql@16.3.0 - graphql: 16.3.0 + apollo-utilities: 1.3.4_graphql@15.8.0 + graphql: 15.8.0 ts-invariant: 0.4.4 tslib: 1.14.1 zen-observable-ts: 0.8.21 dev: true - /apollo-utilities/1.3.4_graphql@16.3.0: + /apollo-utilities/1.3.4_graphql@15.8.0: resolution: {integrity: sha512-pk2hiWrCXMAy2fRPwEyhvka+mqwzeP60Jr1tRYi5xru+3ko94HI9o6lK0CT33/w4RDlxWchmdhDCrvdr+pHCig==} peerDependencies: graphql: ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: '@wry/equality': 0.1.11 fast-json-stable-stringify: 2.1.0 - graphql: 16.3.0 + graphql: 15.8.0 ts-invariant: 0.4.4 tslib: 1.14.1 dev: true @@ -5115,33 +5159,33 @@ packages: resolution: {integrity: sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=} dev: true - /easygraphql-mock/0.1.17_graphql@16.3.0: + /easygraphql-mock/0.1.17_graphql@15.8.0: resolution: {integrity: sha512-J+OLxUfV0dw5LjMlargd9iAjtur7ifgrC0djZQgDnxXLj0g5K0JQX48cY9S95Tw4MXZP24y79w5MJtJxPcDwgQ==} dependencies: chance: 1.1.8 - easygraphql-parser: 0.0.15_graphql@16.3.0 + easygraphql-parser: 0.0.15_graphql@15.8.0 transitivePeerDependencies: - graphql dev: true - /easygraphql-parser/0.0.15_graphql@16.3.0: + /easygraphql-parser/0.0.15_graphql@15.8.0: resolution: {integrity: sha512-0fEXFnFlIjgyo1rxBmEsOa1wYZwIEm5Qk3qLR1bY4d7iMsPNIPKy4M6eohyQHYXww0v3RYWrHNoks0QnktJ9bw==} peerDependencies: graphql: ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - '@graphql-tools/merge': 6.2.17_graphql@16.3.0 - graphql: 16.3.0 + '@graphql-tools/merge': 6.2.17_graphql@15.8.0 + graphql: 15.8.0 dev: true - /easygraphql-tester/6.0.1_graphql@16.3.0: + /easygraphql-tester/6.0.1_graphql@15.8.0: resolution: {integrity: sha512-Xn7wi5g8cep1QCy5wq5e7ZpfEPVUD0SnR+lwCogWXLmNnGhaw6jW9Gzmt00NSa4qyB5LHo1h6dcF+u8zYFwGrA==} peerDependencies: graphql: ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - easygraphql-mock: 0.1.17_graphql@16.3.0 - easygraphql-parser: 0.0.15_graphql@16.3.0 - graphql: 16.3.0 - graphql-tools: 4.0.8_graphql@16.3.0 + easygraphql-mock: 0.1.17_graphql@15.8.0 + easygraphql-parser: 0.0.15_graphql@15.8.0 + graphql: 15.8.0 + graphql-tools: 4.0.8_graphql@15.8.0 lodash.isobject: 3.0.2 dev: true @@ -6734,16 +6778,16 @@ packages: tslib: 2.3.1 dev: false - /graphql-tools/4.0.8_graphql@16.3.0: + /graphql-tools/4.0.8_graphql@15.8.0: resolution: {integrity: sha512-MW+ioleBrwhRjalKjYaLQbr+920pHBgy9vM/n47sswtns8+96sRn5M/G+J1eu7IMeKWiN/9p6tmwCHU7552VJg==} deprecated: This package has been deprecated and now it only exports makeExecutableSchema.\nAnd it will no longer receive updates.\nWe recommend you to migrate to scoped packages such as @graphql-tools/schema, @graphql-tools/utils and etc.\nCheck out https://www.graphql-tools.com to learn what package you should use instead peerDependencies: graphql: ^0.13.0 || ^14.0.0 || ^15.0.0 dependencies: - apollo-link: 1.2.14_graphql@16.3.0 - apollo-utilities: 1.3.4_graphql@16.3.0 + apollo-link: 1.2.14_graphql@15.8.0 + apollo-utilities: 1.3.4_graphql@15.8.0 deprecated-decorator: 0.1.6 - graphql: 16.3.0 + graphql: 15.8.0 iterall: 1.3.0 uuid: 3.4.0 dev: true @@ -6757,6 +6801,11 @@ packages: graphql: 16.3.0 dev: false + /graphql/15.8.0: + resolution: {integrity: sha512-5gghUc24tP9HRznNpV2+FIoq3xKkj5dTQqf4v0CpdPbFVwFkWoxOM+o+2OC9ZSvjEMTjfmG9QT+gcvggTwW1zw==} + engines: {node: '>= 10.x'} + dev: true + /graphql/16.3.0: resolution: {integrity: sha512-xm+ANmA16BzCT5pLjuXySbQVFwH3oJctUVdy81w1sV0vBU0KgDdBGtxQOUd5zqOBk/JayAFeG8Dlmeq74rjm/A==} engines: {node: ^12.22.0 || ^14.16.0 || >=16.0.0} From 66621872b5b9e8574329330906e595b6ce5afbcb Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 15 Mar 2022 21:26:52 +1000 Subject: [PATCH 055/181] update HDK --- _templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t | 2 +- _templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t | 2 +- _templates/init-zome/new-index-zome/zome_Cargo.toml.t | 2 +- _templates/init-zome/new-zome/zome_Cargo.toml.t | 2 +- .../zomes/beef_economic_event/code/Cargo.toml | 2 +- .../zomes/beef_economic_resource/code/Cargo.toml | 2 +- .../zomes/beef_resource_specification/code/Cargo.toml | 2 +- zomes/rea_action/zome/Cargo.toml | 2 +- zomes/rea_agreement/zome/Cargo.toml | 2 +- zomes/rea_agreement/zome_idx_agreement/Cargo.toml | 2 +- zomes/rea_commitment/zome/Cargo.toml | 2 +- zomes/rea_commitment/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_economic_event/zome/Cargo.toml | 2 +- zomes/rea_economic_event/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_economic_resource/zome/Cargo.toml | 2 +- zomes/rea_economic_resource/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_observation/Cargo.toml | 2 +- zomes/rea_fulfillment/zome_planning/Cargo.toml | 2 +- zomes/rea_intent/zome/Cargo.toml | 2 +- zomes/rea_intent/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_process/zome/Cargo.toml | 2 +- zomes/rea_process/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_process_specification/zome/Cargo.toml | 2 +- zomes/rea_proposal/zome/Cargo.toml | 2 +- zomes/rea_proposal/zome_idx_proposal/Cargo.toml | 2 +- zomes/rea_proposed_intent/zome/Cargo.toml | 2 +- zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml | 2 +- zomes/rea_proposed_to/zome/Cargo.toml | 2 +- zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml | 2 +- zomes/rea_resource_specification/zome/Cargo.toml | 2 +- .../zome_idx_specification/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_idx_observation/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_idx_planning/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_observation/Cargo.toml | 2 +- zomes/rea_satisfaction/zome_planning/Cargo.toml | 2 +- zomes/rea_unit/zome/Cargo.toml | 2 +- 38 files changed, 38 insertions(+), 38 deletions(-) diff --git a/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t b/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t index 896d5d906..af5619797 100644 --- a/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t +++ b/_templates/init-zome/mixin-zome-libs/defs_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_<%= h.changeCase.snake(zome_name) %>_storage = { path = "../storage" } hc_zome_<%= h.changeCase.snake(zome_name) %>_storage_consts = { path = "../storage_consts" } diff --git a/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t b/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t index 3567661b0..e701e087d 100644 --- a/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t +++ b/_templates/init-zome/mixin-zome-libs/lib_Cargo.toml.t @@ -9,7 +9,7 @@ edition = "2018" [dependencies] # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_records = { path = "../../../lib/hdk_records" } hc_zome_<%= h.changeCase.snake(zome_name) %>_storage_consts = { path = "../storage_consts" } diff --git a/_templates/init-zome/new-index-zome/zome_Cargo.toml.t b/_templates/init-zome/new-index-zome/zome_Cargo.toml.t index 36576a954..ed173043f 100644 --- a/_templates/init-zome/new-index-zome/zome_Cargo.toml.t +++ b/_templates/init-zome/new-index-zome/zome_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_records = { path = "../../../../../lib/hdk_records" } vf_attributes_hdk = { path = "../../../../../lib/vf_attributes_hdk" } diff --git a/_templates/init-zome/new-zome/zome_Cargo.toml.t b/_templates/init-zome/new-zome/zome_Cargo.toml.t index e2bea001a..7c85ee105 100644 --- a/_templates/init-zome/new-zome/zome_Cargo.toml.t +++ b/_templates/init-zome/new-zome/zome_Cargo.toml.t @@ -10,7 +10,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_<%= h.changeCase.snake(zome_name) %>_defs = { path = "../../../../../lib/<%= h.changeCase.snake(zome_name) %>/defs" } hc_zome_<%= h.changeCase.snake(zome_name) %>_rpc = { path = "../../../../../lib/<%= h.changeCase.snake(zome_name) %>/rpc" } diff --git a/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml b/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml index 815133523..b1b5a3181 100644 --- a/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml +++ b/example/custom-resource-attributes/zomes/beef_economic_event/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_economic_event_defs = { path = "../../../../../lib/rea_economic_event/defs" } hc_zome_rea_economic_event_lib = { path = "../../../../../lib/rea_economic_event/lib" } diff --git a/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml b/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml index 61152d19b..b0ae572d0 100644 --- a/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml +++ b/example/custom-resource-attributes/zomes/beef_economic_resource/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_economic_resource_storage_consts = { path = "../../../../../lib/rea_economic_resource/storage_consts" } hc_zome_rea_economic_resource_defs = { path = "../../../../../lib/rea_economic_resource/defs" } diff --git a/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml b/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml index 6e4531374..b6b9b62fd 100644 --- a/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml +++ b/example/knowledge-system-extensions/zomes/beef_resource_specification/code/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_resource_specification_defs = { path = "../../../../../lib/rea_resource_specification/defs" } hc_zome_rea_resource_specification_rpc = { path = "../../../../../lib/rea_resource_specification/rpc" } diff --git a/zomes/rea_action/zome/Cargo.toml b/zomes/rea_action/zome/Cargo.toml index be74e589e..ff0e95883 100644 --- a/zomes/rea_action/zome/Cargo.toml +++ b/zomes/rea_action/zome/Cargo.toml @@ -6,7 +6,7 @@ edition = "2018" [dependencies] serde = "1" -hdk = "0.0.123" +hdk = "0.0.124" vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } vf_actions = { path = "../../../lib/vf_actions" } diff --git a/zomes/rea_agreement/zome/Cargo.toml b/zomes/rea_agreement/zome/Cargo.toml index d0924253a..34573be64 100644 --- a/zomes/rea_agreement/zome/Cargo.toml +++ b/zomes/rea_agreement/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_agreement_rpc = { path = "../rpc" } hc_zome_rea_agreement_lib = { path = "../lib" } diff --git a/zomes/rea_agreement/zome_idx_agreement/Cargo.toml b/zomes/rea_agreement/zome_idx_agreement/Cargo.toml index 283a1e81f..393c09880 100644 --- a/zomes/rea_agreement/zome_idx_agreement/Cargo.toml +++ b/zomes/rea_agreement/zome_idx_agreement/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_commitment/zome/Cargo.toml b/zomes/rea_commitment/zome/Cargo.toml index b45fe5136..5ae5dcd68 100644 --- a/zomes/rea_commitment/zome/Cargo.toml +++ b/zomes/rea_commitment/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_commitment_rpc = { path = "../rpc" } hc_zome_rea_commitment_lib = { path = "../lib" } diff --git a/zomes/rea_commitment/zome_idx_planning/Cargo.toml b/zomes/rea_commitment/zome_idx_planning/Cargo.toml index defb79367..c0f8876e9 100644 --- a/zomes/rea_commitment/zome_idx_planning/Cargo.toml +++ b/zomes/rea_commitment/zome_idx_planning/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" paste = "1.0" serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_economic_event/zome/Cargo.toml b/zomes/rea_economic_event/zome/Cargo.toml index 35858e1fa..cb4052261 100644 --- a/zomes/rea_economic_event/zome/Cargo.toml +++ b/zomes/rea_economic_event/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_economic_event_zome_api = { path = "../zome_api" } hc_zome_rea_economic_event_lib = { path = "../lib" } diff --git a/zomes/rea_economic_event/zome_idx_observation/Cargo.toml b/zomes/rea_economic_event/zome_idx_observation/Cargo.toml index 6144cb5e5..ab9bf2b5c 100644 --- a/zomes/rea_economic_event/zome_idx_observation/Cargo.toml +++ b/zomes/rea_economic_event/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_economic_resource/zome/Cargo.toml b/zomes/rea_economic_resource/zome/Cargo.toml index 631873ea8..f69559473 100644 --- a/zomes/rea_economic_resource/zome/Cargo.toml +++ b/zomes/rea_economic_resource/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_economic_resource_zome_api = { path = "../zome_api" } hc_zome_rea_economic_resource_lib = { path = "../lib" } diff --git a/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml b/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml index b1f3c6802..2673ba401 100644 --- a/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml +++ b/zomes/rea_economic_resource/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml b/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml index 0a109e1a6..80081aecd 100644 --- a/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml +++ b/zomes/rea_fulfillment/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml b/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml index 09c1d8296..6480c3c03 100644 --- a/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml +++ b/zomes/rea_fulfillment/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_fulfillment/zome_observation/Cargo.toml b/zomes/rea_fulfillment/zome_observation/Cargo.toml index d815e4164..507acbe10 100644 --- a/zomes/rea_fulfillment/zome_observation/Cargo.toml +++ b/zomes/rea_fulfillment/zome_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_fulfillment_lib_destination = { path = "../lib_destination" } hc_zome_rea_fulfillment_rpc = { path = "../rpc" } diff --git a/zomes/rea_fulfillment/zome_planning/Cargo.toml b/zomes/rea_fulfillment/zome_planning/Cargo.toml index ac19e28b9..92543abba 100644 --- a/zomes/rea_fulfillment/zome_planning/Cargo.toml +++ b/zomes/rea_fulfillment/zome_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_fulfillment_rpc = { path = "../rpc" } hc_zome_rea_fulfillment_lib_origin = { path = "../lib_origin" } diff --git a/zomes/rea_intent/zome/Cargo.toml b/zomes/rea_intent/zome/Cargo.toml index 30ad6c114..9d26dbe74 100644 --- a/zomes/rea_intent/zome/Cargo.toml +++ b/zomes/rea_intent/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_intent_rpc = { path = "../rpc" } hc_zome_rea_intent_lib = { path = "../lib" } diff --git a/zomes/rea_intent/zome_idx_planning/Cargo.toml b/zomes/rea_intent/zome_idx_planning/Cargo.toml index 8f5f873fc..52969cd12 100644 --- a/zomes/rea_intent/zome_idx_planning/Cargo.toml +++ b/zomes/rea_intent/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_process/zome/Cargo.toml b/zomes/rea_process/zome/Cargo.toml index b8ba3660b..bae5424b4 100644 --- a/zomes/rea_process/zome/Cargo.toml +++ b/zomes/rea_process/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_records = { path = "../../../lib/hdk_records" } vf_attributes_hdk = { path = "../../../lib/vf_attributes_hdk" } diff --git a/zomes/rea_process/zome_idx_observation/Cargo.toml b/zomes/rea_process/zome_idx_observation/Cargo.toml index 45f2f667f..e85f273fa 100644 --- a/zomes/rea_process/zome_idx_observation/Cargo.toml +++ b/zomes/rea_process/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_process_specification/zome/Cargo.toml b/zomes/rea_process_specification/zome/Cargo.toml index 6d5609211..721a2c215 100644 --- a/zomes/rea_process_specification/zome/Cargo.toml +++ b/zomes/rea_process_specification/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_process_specification_rpc = { path = "../rpc" } hc_zome_rea_process_specification_lib = { path = "../lib" } diff --git a/zomes/rea_proposal/zome/Cargo.toml b/zomes/rea_proposal/zome/Cargo.toml index e3ce4715b..047b84b5a 100644 --- a/zomes/rea_proposal/zome/Cargo.toml +++ b/zomes/rea_proposal/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_proposal_rpc = { path = "../rpc" } hc_zome_rea_proposal_lib = { path = "../lib" } diff --git a/zomes/rea_proposal/zome_idx_proposal/Cargo.toml b/zomes/rea_proposal/zome_idx_proposal/Cargo.toml index 9493ae858..553f3ed06 100644 --- a/zomes/rea_proposal/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposal/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_proposed_intent/zome/Cargo.toml b/zomes/rea_proposed_intent/zome/Cargo.toml index 743b6178b..fe2d73eb5 100644 --- a/zomes/rea_proposed_intent/zome/Cargo.toml +++ b/zomes/rea_proposed_intent/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_proposed_intent_rpc = { path = "../rpc" } hc_zome_rea_proposed_intent_lib = { path = "../lib" } diff --git a/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml b/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml index ad8a8aa6b..3625845cd 100644 --- a/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposed_intent/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_proposed_to/zome/Cargo.toml b/zomes/rea_proposed_to/zome/Cargo.toml index 0e2bbe42a..1ab23f1c6 100644 --- a/zomes/rea_proposed_to/zome/Cargo.toml +++ b/zomes/rea_proposed_to/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_proposed_to_rpc = { path = "../rpc" } hc_zome_rea_proposed_to_lib = { path = "../lib" } diff --git a/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml b/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml index e53dcc02a..86f0307cc 100644 --- a/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml +++ b/zomes/rea_proposed_to/zome_idx_proposal/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_resource_specification/zome/Cargo.toml b/zomes/rea_resource_specification/zome/Cargo.toml index 299c98b98..e4b6d9368 100644 --- a/zomes/rea_resource_specification/zome/Cargo.toml +++ b/zomes/rea_resource_specification/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_resource_specification_rpc = { path = "../rpc" } hc_zome_rea_resource_specification_lib = { path = "../lib" } diff --git a/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml b/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml index 9625e9f3b..57934e730 100644 --- a/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml +++ b/zomes/rea_resource_specification/zome_idx_specification/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml b/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml index 42c02b9d1..5a4f6dcd0 100644 --- a/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml +++ b/zomes/rea_satisfaction/zome_idx_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml b/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml index 1561f23bd..70ca3e229 100644 --- a/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml +++ b/zomes/rea_satisfaction/zome_idx_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hdk_semantic_indexes_zome_lib = { path = "../../../lib/hdk_semantic_indexes/zome" } hdk_semantic_indexes_zome_derive = { path = "../../../lib/hdk_semantic_indexes/zome_derive" } diff --git a/zomes/rea_satisfaction/zome_observation/Cargo.toml b/zomes/rea_satisfaction/zome_observation/Cargo.toml index 478015611..98c2d1e06 100644 --- a/zomes/rea_satisfaction/zome_observation/Cargo.toml +++ b/zomes/rea_satisfaction/zome_observation/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_satisfaction_lib_destination = { path = "../lib_destination" } hc_zome_rea_satisfaction_rpc = { path = "../rpc" } diff --git a/zomes/rea_satisfaction/zome_planning/Cargo.toml b/zomes/rea_satisfaction/zome_planning/Cargo.toml index 681a8921c..0d77b9a6c 100644 --- a/zomes/rea_satisfaction/zome_planning/Cargo.toml +++ b/zomes/rea_satisfaction/zome_planning/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_satisfaction_rpc = { path = "../rpc" } hc_zome_rea_satisfaction_lib_origin = { path = "../lib_origin" } diff --git a/zomes/rea_unit/zome/Cargo.toml b/zomes/rea_unit/zome/Cargo.toml index 38e9619e6..3ea110a86 100644 --- a/zomes/rea_unit/zome/Cargo.toml +++ b/zomes/rea_unit/zome/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] serde = "1" # :DUPE: hdk-rust-revid -hdk = "0.0.123" +hdk = "0.0.124" hc_zome_rea_unit_rpc = { path = "../rpc" } hc_zome_rea_unit_lib = { path = "../lib" } From 4759451a22534a3a29190a70a717c3cec1dcdfb4 Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 15 Mar 2022 21:37:54 +1000 Subject: [PATCH 056/181] update lockfile --- Cargo.lock | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cf8915bf3..24f112c0d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1271,9 +1271,9 @@ version = "0.1.0" [[package]] name = "hdk" -version = "0.0.123" +version = "0.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef6a85565285d19223b72265868abc2413e530328d6ae0f4013a0455d203feab" +checksum = "b3ac26c39ff5c824046f6c3192dfcf91eb19111569ae67547f4c86e810bba2cd" dependencies = [ "hdk_derive", "holo_hash", @@ -1289,9 +1289,9 @@ dependencies = [ [[package]] name = "hdk_derive" -version = "0.0.25" +version = "0.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f30170490dc1b0468512cdcb72ada813d7cbf44e126fa6ab39a0b79165dd224" +checksum = "ff276a067b2a6cc6c8e11f40b72151ffdab235f4e9746ab0d91f99a56bbbd50d" dependencies = [ "holochain_zome_types", "paste", @@ -1406,9 +1406,9 @@ dependencies = [ [[package]] name = "holo_hash" -version = "0.0.20" +version = "0.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1996c797ac295d79f884d263781c6ef3e0c63183af7d9f4e1d666c75e0d889ce" +checksum = "0d0ccc68b3c7b63b82fc41ec03d4e921ab144102c6af763f8ce729709ca54ad2" dependencies = [ "holochain_serialized_bytes", "kitsune_p2p_dht_arc", @@ -1469,9 +1469,9 @@ dependencies = [ [[package]] name = "holochain_zome_types" -version = "0.0.25" +version = "0.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d51f55ac58054a6c87d046cda5c5e71c15cfcd9f11e9889817119c8218e3a7d" +checksum = "3941d7e413be0a0298dfd9b31c4d9a87b0d89aca3eadbd00b379f2d4f5478d40" dependencies = [ "chrono", "holo_hash", @@ -1532,9 +1532,9 @@ checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" [[package]] name = "kitsune_p2p_dht_arc" -version = "0.0.9" +version = "0.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7459dbef2eef419984efb6bef5d1ff2ab1836ca7ca8f506b84b5982580b1bc9" +checksum = "3203925b13eb83d95825025eb1f4a97f5a4e049e5d8e480783bc30b5a1b3cd2f" dependencies = [ "derive_more", "gcollections", @@ -1545,9 +1545,9 @@ dependencies = [ [[package]] name = "kitsune_p2p_timestamp" -version = "0.0.6" +version = "0.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a7427a44837a60ccf063898340da6f8ef8c0e41812189b63d7775d8822863e7" +checksum = "f1528a9d36c33444a8d70b3264e72df64351d41afd76ceebf0d2103ca81dad6c" dependencies = [ "chrono", "derive_more", @@ -1563,9 +1563,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.119" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4" +checksum = "ad5c14e80759d0939d013e6ca49930e59fc53dd8e5009132f76240c179380c09" [[package]] name = "lock_api" @@ -1809,9 +1809,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.86" +version = "1.0.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +checksum = "ebd69e719f31e88618baa1eaa6ee2de5c9a1c004f1e9ecdb58e8352a13f20a01" dependencies = [ "proc-macro2", "quote", @@ -1860,9 +1860,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f" +checksum = "4a1bdf54a7c28a2bbf701e1d2233f6c77f473486b94bee4f9678da5a148dca7f" dependencies = [ "cfg-if", "pin-project-lite", @@ -1872,9 +1872,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" +checksum = "2e65ce065b4b5c53e73bb28912318cb8c9e9ad3921f1d669eb0e68b4c8143a2b" dependencies = [ "proc-macro2", "quote", @@ -1883,9 +1883,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" +checksum = "aa31669fa42c09c34d94d8165dd2012e8ff3c66aca50f3bb226b68f216f2706c" dependencies = [ "lazy_static", "valuable", From 556b28982aac7c93e6556b67e2759de17d3ba9f2 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 12:51:17 +1000 Subject: [PATCH 057/181] fix link deletion to only target entries in question --- lib/hdk_records/src/link_helpers.rs | 24 ++++++++++++++++++++ lib/hdk_semantic_indexes/zome/src/lib.rs | 28 +++++++++++++++--------- 2 files changed, 42 insertions(+), 10 deletions(-) diff --git a/lib/hdk_records/src/link_helpers.rs b/lib/hdk_records/src/link_helpers.rs index d9eaf06db..0ffa2db88 100644 --- a/lib/hdk_records/src/link_helpers.rs +++ b/lib/hdk_records/src/link_helpers.rs @@ -43,6 +43,30 @@ pub fn get_linked_headers( pull_links_data(base_address, link_tag, get_link_target_header) } +/// Execute the provided `link_map` function against the set of links +/// between a `base_address` and `target_address` via the given `link_tag`. +/// +/// If you have a bidirectional link between two `EntryHash`es, you must +/// run this method twice (once to remove each direction of the paired link). +/// +pub fn walk_links_matching_entry( + base_address: &EntryHash, + target_address: &EntryHash, + link_tag: LinkTag, + link_map: F, +) -> RecordAPIResult> + where F: Fn(&Link) -> T, +{ + let links_result = get_links(base_address.to_owned(), Some(link_tag))?; + + Ok(links_result + .iter() + .filter(|l| { l.target == *target_address }) + .map(link_map) + .collect() + ) +} + //----------------------------------------------------- // :TODO: ensure ordering is latest-first diff --git a/lib/hdk_semantic_indexes/zome/src/lib.rs b/lib/hdk_semantic_indexes/zome/src/lib.rs index e8606197f..9b897394c 100644 --- a/lib/hdk_semantic_indexes/zome/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome/src/lib.rs @@ -13,7 +13,7 @@ use hdk_records::{ create_entry_identity, read_entry_identity, }, - links::{get_linked_addresses, get_linked_headers}, + links::{get_linked_addresses, walk_links_matching_entry}, rpc::call_local_zome_method, }; pub use hdk_records::{ RecordAPIResult, DataIntegrityError }; @@ -364,8 +364,6 @@ fn delete_dest_indexes<'a, A, B, S, I, E>( /// Deletes a bidirectional link between two entry addresses. Any active links between /// the given addresses using the given tags will be deleted. /// -/// :TODO: this should probably only delete the referenced IDs, at the moment it clears anything matching tags. -/// fn delete_index<'a, A, B, S, I, E>( source_entry_type: &I, source: &A, @@ -386,18 +384,28 @@ fn delete_index<'a, A, B, S, I, E>( let address_source = calculate_identity_address(source_entry_type, source)?; let address_dest = calculate_identity_address(dest_entry_type, dest)?; - let mut links = get_linked_headers(&address_source, tag_source)?; - links.append(& mut get_linked_headers(&address_dest, tag_dest)?); + let mut links = walk_links_matching_entry( + &address_source, + &address_dest, + tag_source, + delete_link_target_header, + )?; + links.append(& mut walk_links_matching_entry( + &address_dest, + &address_source, + tag_dest, + delete_link_target_header, + )?); - Ok(links - .iter().cloned() - .map(|l| { Ok(delete_link(l)?) }) - .collect() - ) + Ok(links) } //--------------------------[ UTILITIES / INTERNALS ]--------------------- +fn delete_link_target_header(l: &Link) -> RecordAPIResult { + Ok(delete_link(l.create_link_hash.to_owned())?) +} + /// Returns the first error encountered (if any). Best used with the `?` operator. fn throw_any_error(mut errors: Vec>) -> RecordAPIResult<()> { if errors.len() == 0 { From d809c8eb3060f929ede310c636bc90ea5e6f5524 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 19:50:28 +1000 Subject: [PATCH 058/181] implement alternative more flexible architecture for managing index updates in response to record changes indexing now supports mixed DnaHashes in identifier data for many:many relationships between Holochain cells --- lib/hdk_semantic_indexes/client/src/lib.rs | 294 ++++++++++++++++++++- 1 file changed, 293 insertions(+), 1 deletion(-) diff --git a/lib/hdk_semantic_indexes/client/src/lib.rs b/lib/hdk_semantic_indexes/client/src/lib.rs index fa48b7e6b..b97b8a16d 100644 --- a/lib/hdk_semantic_indexes/client/src/lib.rs +++ b/lib/hdk_semantic_indexes/client/src/lib.rs @@ -26,7 +26,9 @@ * @package hdk_semantic_indexes_client_lib * @since 2020-08-07 */ +use std::collections::HashMap; use hdk::prelude::*; +use holo_hash::DnaHash; use hdk_records::{ RecordAPIResult, DataIntegrityError, OtherCellResult, CrossCellError, @@ -99,6 +101,42 @@ macro_rules! create_index { ) } }; + // automatic index: + // local/remote determination is managed by DnaHash of target addresses + ( + $record_type:ident.$rel:ident($dest_record_id:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + [], + &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), + &stringify!([]), + vec![$dest_record_id.to_owned()].as_slice(), + vec![].as_slice(), + ) + } + }; + // self-referential, local-only indexes + ( + $record_type:ident($record_id:expr).$rel:ident($dest_record_id:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + |_| { None }, // specify none for destination index + &"", // ignored, since no index zome name is returned + &"", // ignored, since no index zome name is returned + vec![$dest_record_id.to_owned()].as_slice(), + vec![].as_slice(), + ) + } + }; } /// Fetch the identifiers stored for a referenced relationship @@ -289,11 +327,205 @@ macro_rules! update_index { ) } }; -} + // automatic index, add only + ( + $record_type:ident.$rel:ident($dest_record_ids:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + [], + &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), + &stringify!([]), + $dest_record_ids, + vec![].as_slice(), + ) + } + }; + // automatic index, add and remove + ( + $record_type:ident.$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + [], + &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), + &stringify!([]), + $dest_record_ids, + $remove_record_ids, + ) + } + }; + // automatic index, remove only + ( + $record_type:ident.$rel:ident.not($remove_record_ids:expr), + $dest_record_type:ident.$inv_rel:ident($record_id:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + [], + &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), + &stringify!([]), + vec![].as_slice(), + $remove_record_ids, + ) + } + }; + + // self-referential or local-only indexes, add only + ( + $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + |_| { None }, // specify none for destination index + &"", // ignored, since no index zome name is returned + &"", // ignored, since no index zome name is returned + $dest_record_ids, + &vec![].as_slice(), + ) + } + }; + // self-referential or local-only indexes, remove only + ( + $record_type:ident($record_id:expr).$rel:ident.not($remove_record_ids:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + |_| { None }, // specify none for destination index + &"", // ignored, since no index zome name is returned + &"", // ignored, since no index zome name is returned + &vec![].as_slice(), + $remove_record_ids, + ) + } + }; + // self-referential or local-only indexes, add & remove + ( + $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr) + ) => { + paste! { + manage_index( + [], + &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), + $record_id, + |_| { None }, // specify none for destination index + &"", // ignored, since no index zome name is returned + &"", // ignored, since no index zome name is returned + $dest_record_ids, + $remove_record_ids, + ) + } + }; +} //-------------------------------[ CREATE ]------------------------------------- +/// Outer method for creating indexes. +/// +/// :TODO: documentation +/// +/// @see create_index! +/// +pub fn manage_index( + origin_zome_name_from_config: F, + origin_fn_name: &S, + source: &A, + dest_zome_name_from_config: G, + dest_fn_name: &S, + remote_permission_id: &S, + dest_addresses: &[B], + remove_addresses: &[B], +) -> RecordAPIResult>> + where S: AsRef, + A: DnaAddressable, + B: DnaAddressable, + C: std::fmt::Debug, + SerializedBytes: TryInto, + F: Copy + Fn(C) -> Option, + G: Copy + Fn(C) -> Option, +{ + // altering an index with no targets is a no-op + if dest_addresses.len() == 0 && remove_addresses.len() == 0 { + return Ok(vec![]) + } + + let sources = vec![source.clone()]; + let targets = prefilter_target_dnas(dest_addresses, remove_addresses)?; + + // Manage local index creation / removal + + let local_forward_add = targets.local_dests.0.iter() + .map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &sources, &vec![], + ) + }); + let local_forward_remove = targets.local_dests.1.iter() + .map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &vec![], &sources, + ) + }); + let local_reciprocal_update = std::iter::once(request_sync_local_index( + dest_zome_name_from_config, dest_fn_name, + source, targets.local_dests.0.as_slice(), targets.local_dests.1.as_slice(), + )); + + // Manage remote index creation / removal & append to resultset + + Ok(std::iter::empty() + .chain(local_forward_add) + .chain(local_forward_remove) + .chain(local_reciprocal_update) + .chain(targets.remote_dests.iter() + .flat_map(|(_dna, (add_dests, remove_dests))| { + let remote_forward_add = add_dests.iter() + .map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &sources, &vec![], + ) + }); + let remote_forward_remove = remove_dests.iter() + .map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &vec![], &sources, + ) + }); + let remote_reciprocal_update = std::iter::once(request_sync_remote_index( + remote_permission_id, + source, add_dests, remove_dests, + )); + + std::iter::empty() + .chain(remote_forward_add) + .chain(remote_forward_remove) + .chain(remote_reciprocal_update) + })) + .collect()) +} + /// Toplevel method for triggering a link creation flow between two records in /// different DNA cells. The calling cell will have an 'origin query index' created for /// fetching the referenced remote IDs; the destination cell will have a @@ -627,3 +859,63 @@ fn request_sync_local_index( ) )?) } + + +/// internal struct for pre-arranging lists of IDs for transmission to remote +/// DNA-relative API endpoints +#[derive(Debug)] +struct TargetsByDna + where B: DnaAddressable, +{ + pub local_dests: (Vec, Vec), + pub remote_dests: HashMap, Vec)>, +} + +// pre-arrange input IDs for transmission to target DNAs +fn prefilter_target_dnas<'a, B>( + dest_addresses: &'a [B], + remove_addresses: &'a [B], +) -> RecordAPIResult> + where B: DnaAddressable, +{ + let local_dna = dna_info()?.hash; + + let results = dest_addresses.iter() + .fold(TargetsByDna { + local_dests: (vec![], vec![]), + remote_dests: HashMap::new(), + }, |mut targets: TargetsByDna, val| { + let target_dna: &DnaHash = val.as_ref(); + if local_dna == target_dna.to_owned() { + targets.local_dests.0.push(val.to_owned()); + } else { + match targets.remote_dests.insert(target_dna.to_owned(), (vec![val.to_owned()], vec![])) { + Some(mut prev_val) => { + let vals = targets.remote_dests.get_mut(target_dna).unwrap(); + vals.0.append(&mut prev_val.0); + }, + None => (), + } + } + targets + }); + + Ok(remove_addresses.iter() + .fold(results, |mut targets: TargetsByDna, val| { + let target_dna: &DnaHash = val.as_ref(); + if local_dna == target_dna.to_owned() { + targets.local_dests.1.push(val.to_owned()); + } else { + match targets.remote_dests.insert(target_dna.to_owned(), (vec![], vec![val.to_owned()])) { + Some(mut prev_val) => { + let vals = targets.remote_dests.get_mut(target_dna).unwrap(); + vals.0.append(&mut prev_val.0); + vals.1.append(&mut prev_val.1); + }, + None => (), + } + } + targets + }) + ) +} From 906fb115aac9e077157a4abd9a5b7d2af6cd7edd Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 20:02:46 +1000 Subject: [PATCH 059/181] fix #212 contains & containedIn resolvers for EconomicResource --- .../resolvers/economicResource.ts | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/modules/vf-graphql-holochain/resolvers/economicResource.ts b/modules/vf-graphql-holochain/resolvers/economicResource.ts index 8448f289f..adea1106f 100644 --- a/modules/vf-graphql-holochain/resolvers/economicResource.ts +++ b/modules/vf-graphql-holochain/resolvers/economicResource.ts @@ -29,12 +29,20 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( { - containedIn: async (record: EconomicResource): Promise => { - return (await readResources({ params: { contains: record.id } })).pop()['economicResource'] + containedIn: async (record: EconomicResource): Promise> => { + const resources = await readResources({ params: { contains: record.id } }) + if (!resources.results || !resources.results.length) { + return null + } + return resources.results.pop()['economicResource'] }, contains: async (record: EconomicResource): Promise => { - return (await readResources({ params: { containedIn: record.id } })).map(({ economicResource }) => economicResource) + const resources = await readResources({ params: { containedIn: record.id } }) + if (!resources.results || !resources.results.length) { + return [] + } + return resources.results.map(({ economicResource }) => economicResource) }, }, (hasKnowledge ? { From 2a0d98aa87453038f107aa398882fbb6e692ee9d Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 20:03:21 +1000 Subject: [PATCH 060/181] export serializeId from test helpers for use in tests which mix low-level Cell API with GraphQL queries --- test/init.js | 1 + 1 file changed, 1 insertion(+) diff --git a/test/init.js b/test/init.js index 4908fbfb4..862c82190 100644 --- a/test/init.js +++ b/test/init.js @@ -169,6 +169,7 @@ module.exports = { buildRunner, bridge: Config.bridge, buildConfig: Config.gen, + seralizeId, // :TODO: :SHONK: temporary code for mocking, eventually tests will need to populate mock data with referential integrity to pass mockAgentId: (asStr = true) => { From d29aa471a13f01729e45d574bb5bfc88afe3f07f Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 20:04:12 +1000 Subject: [PATCH 061/181] fix serialization errors in resource_links test; now passing --- test/economic-resource/resource_links.js | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/test/economic-resource/resource_links.js b/test/economic-resource/resource_links.js index a3cdd22fe..262008ceb 100644 --- a/test/economic-resource/resource_links.js +++ b/test/economic-resource/resource_links.js @@ -2,6 +2,7 @@ const { buildConfig, buildRunner, buildPlayer, + seralizeId, // :NOTE: needed due to mixing of direct API and GraphQL in same test mockAgentId, mockIdentifier, mockAddress, @@ -87,8 +88,9 @@ runner.registerScenario('EconomicResource composition / containment functionalit const resourceId3 = resource3.id readResp = await alice.call('economic_resource', 'get_economic_resource', { address: resourceId1 }) + readResource = readResp.economicResource - t.ok(readResource.id, 'container resource re-retrieval OK') + t.deepEqual(readResource.id, resourceId1, 'container resource re-retrieval OK') console.log(readResource) t.equal(readResource.contains && readResource.contains.length, 2, 'container resource reference appended') t.deepEqual(readResource.contains && readResource.contains[0], resourceId2, 'container resource reference B OK') @@ -116,12 +118,12 @@ runner.registerScenario('EconomicResource composition / containment functionalit await s.consistency() readResp = await graphQL(` { - container: economicResource(id: "${resourceId1}") { + container: economicResource(id: "${seralizeId(resourceId1)}") { contains { id } } - contained: economicResource(id: "${resourceId2}") { + contained: economicResource(id: "${seralizeId(resourceId2)}") { containedIn { id } @@ -129,11 +131,11 @@ runner.registerScenario('EconomicResource composition / containment functionalit }`) t.equal(readResp.data.container.contains.length, 1, 'contains ref present in GraphQL API') - t.equal(readResp.data.container.contains[0].id, resourceId2, 'contains ref OK in GraphQL API') - t.equal(readResp.data.contained.containedIn.id, resourceId1, 'containedIn ref OK in GraphQL API') + t.equal(readResp.data.container.contains[0].id, seralizeId(resourceId2), 'contains ref OK in GraphQL API') + t.equal(readResp.data.contained.containedIn.id, seralizeId(resourceId1), 'containedIn ref OK in GraphQL API') // SCENARIO: delete resource, check links are removed - // :TODO: needs some thought + // :TODO: needs some thought; resources should only be deleted via last linked EconomicEvent's deletion // const dResp = await alice.call('economic_resource', 'delete_resource', { address: resourceId3 }) // await s.consistency() // t.ok(dResp.economicResource, 'resource deleted successfully') From cc1f55a5118abd14e59f9ebc164a3bcf37fadc62 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 20:27:18 +1000 Subject: [PATCH 062/181] return correct paginated Relay structure for ResourceSpecification.conformingResources --- .../vf-graphql-holochain/resolvers/resourceSpecification.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts b/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts index a01cae5ed..6fa42c95e 100644 --- a/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts +++ b/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts @@ -10,7 +10,7 @@ import { mapZomeFn } from '../connection' import { Maybe, - EconomicResource, + EconomicResourceConnection, ResourceSpecification, Unit, } from '@valueflows/vf-graphql' @@ -24,8 +24,8 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI return Object.assign( (hasObservation ? { - conformingResources: async (record: ResourceSpecification): Promise => { - return (await queryResources({ params: { conformsTo: record.id } })).results.map(({ economicResource }) => economicResource ) + conformingResources: async (record: ResourceSpecification): Promise => { + return await queryResources({ params: { conformsTo: record.id } }) }, } : {}), (hasMeasurement ? { From 45f1cb1f913ad780da087f9342cf8fc6c863b229 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 21:30:51 +1000 Subject: [PATCH 063/181] quick first pass at converting index zome return data to Relay-compatible pagination result --- lib/hdk_semantic_indexes/zome/Cargo.toml | 1 + lib/hdk_semantic_indexes/zome/src/lib.rs | 1 + .../zome_derive/src/lib.rs | 42 +++++++++++++++---- 3 files changed, 37 insertions(+), 7 deletions(-) diff --git a/lib/hdk_semantic_indexes/zome/Cargo.toml b/lib/hdk_semantic_indexes/zome/Cargo.toml index 16d7bfdc0..36e5dac94 100644 --- a/lib/hdk_semantic_indexes/zome/Cargo.toml +++ b/lib/hdk_semantic_indexes/zome/Cargo.toml @@ -11,6 +11,7 @@ hdk = "0.0" hdk_semantic_indexes_zome_rpc = { path = "../rpc" } hdk_rpc_errors = { path = "../../hdk_rpc_errors" } hdk_records = { path = "../../hdk_records" } +hdk_relay_pagination = { path = "../../hdk_relay_pagination" } serde_maybe_undefined = { path = "../../serde_maybe_undefined" } hc_zome_dna_auth_resolver_lib = {git = "https://github.com/holochain-open-dev/dna-auth-resolver", rev = "b1adec5", package = "hc_zome_dna_auth_resolver_lib"} diff --git a/lib/hdk_semantic_indexes/zome/src/lib.rs b/lib/hdk_semantic_indexes/zome/src/lib.rs index 9b897394c..128fb18bc 100644 --- a/lib/hdk_semantic_indexes/zome/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome/src/lib.rs @@ -18,6 +18,7 @@ use hdk_records::{ }; pub use hdk_records::{ RecordAPIResult, DataIntegrityError }; pub use hdk_semantic_indexes_zome_rpc::*; +pub use hdk_relay_pagination::PageInfo; //--------------- ZOME CONFIGURATION ATTRIBUTES ---------------- diff --git a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs index c871ff7c3..fce2c679d 100644 --- a/lib/hdk_semantic_indexes/zome_derive/src/lib.rs +++ b/lib/hdk_semantic_indexes/zome_derive/src/lib.rs @@ -46,6 +46,7 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { // build toplevel variables for generated code let record_type = &input.ident; let record_type_str_attribute = record_type.to_string().to_case(Case::Snake); + let record_type_str_ident = format_ident!("{}", record_type_str_attribute); let record_type_index_attribute = format_ident!("{}_index", record_type_str_attribute); let record_read_api_method_name = format_ident!("get_{}", record_type_str_attribute); @@ -194,17 +195,23 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { #index_mutators )* - // define query results structure as a flat array which separates errors into own list + // query results structure mimicing Relay's pagination format #[derive(Debug, Serialize, Deserialize)] struct QueryResults { + pub page_info: PageInfo, #[serde(default)] - pub results: Vec, - // :TODO: pagination metadata + pub edges: Vec, #[serde(default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub errors: Vec, } + #[derive(Debug, Serialize, Deserialize)] + struct Edge { + node: Response, + cursor: String, + } + // declare public query method with injected handler logic #[hdk_extern] fn #exposed_query_api_method_name(SearchInputs { params }: SearchInputs) -> ExternResult @@ -218,11 +225,32 @@ pub fn index_zome(attribs: TokenStream, input: TokenStream) -> TokenStream { let entries = entries_result?; + let formatted_edges = entries.iter() + .cloned() + .filter_map(Result::ok) + .map(|node| { + let record_cursor: Vec = node.#record_type_str_ident.id.to_owned().into(); + Edge { + node: node.#record_type_str_ident, + // :TODO: use HoloHashb64 once API stabilises + cursor: String::from_utf8(record_cursor).unwrap_or("".to_string()) + } + }); + + let mut edge_cursors = formatted_edges.clone().map(|e| { e.cursor }); + let first_cursor = edge_cursors.next().unwrap_or("0".to_string()); + Ok(QueryResults { - results: entries.iter() - .cloned() - .filter_map(Result::ok) - .collect(), + edges: formatted_edges.collect(), + page_info: PageInfo { + end_cursor: edge_cursors.last().unwrap_or(first_cursor.clone()), + start_cursor: first_cursor, + // :TODO: + has_next_page: true, + has_previous_page: true, + page_limit: None, + total_count: None, + }, errors: entries.iter() .cloned() .filter_map(Result::err) From 623861eda6bcde92b02ad8afa30529104655cd3e Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 21:31:01 +1000 Subject: [PATCH 064/181] update lockfile --- Cargo.lock | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.lock b/Cargo.lock index 24f112c0d..b8fe54848 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1361,6 +1361,7 @@ dependencies = [ "hc_zome_dna_auth_resolver_lib", "hdk", "hdk_records", + "hdk_relay_pagination", "hdk_rpc_errors", "hdk_semantic_indexes_zome_rpc", "serde", From a2a6c8f6fac578ca8090c0c71158aedd5e865e1c Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 21:31:24 +1000 Subject: [PATCH 065/181] update conformingResources test for Relay API --- test/economic-resource/conforming_resources.js | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/economic-resource/conforming_resources.js b/test/economic-resource/conforming_resources.js index 525ca7018..2fa6d62cd 100644 --- a/test/economic-resource/conforming_resources.js +++ b/test/economic-resource/conforming_resources.js @@ -81,14 +81,18 @@ runner.registerScenario('can locate EconomicResources conforming to a ResourceSp resp = await graphQL(`{ rs: resourceSpecification(id: "${rsId}") { conformingResources { - id + edges { + node { + id + } + } } } }`) - t.equal(resp.data.rs.conformingResources.length, 2, 'all resources indexed via ResourceSpecification link') - t.equal(resp.data.rs.conformingResources[0].id, resource1Id, 'resource 2 ref OK') - t.equal(resp.data.rs.conformingResources[1].id, resource2Id, 'resource 1 ref OK') + t.equal(resp.data.rs.conformingResources.edges.length, 2, 'all resources indexed via ResourceSpecification link') + t.equal(resp.data.rs.conformingResources.edges[0].node.id, resource1Id, 'resource 2 ref OK') + t.equal(resp.data.rs.conformingResources.edges[1].node.id, resource2Id, 'resource 1 ref OK') }) runner.run() From 8175f81f7c3db38001f4a9e52ff86465f3b34651 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 21:32:06 +1000 Subject: [PATCH 066/181] only update EconomicResource container refs if changed --- zomes/rea_economic_resource/lib/src/lib.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/zomes/rea_economic_resource/lib/src/lib.rs b/zomes/rea_economic_resource/lib/src/lib.rs index 14052563a..1baa683dd 100644 --- a/zomes/rea_economic_resource/lib/src/lib.rs +++ b/zomes/rea_economic_resource/lib/src/lib.rs @@ -139,9 +139,11 @@ impl API for EconomicResourceZomePermissableDefault { let (revision_id, identity_address, entry, prev_entry): (_,_, EntryData, EntryData) = update_record(&entry_def_id, &address, resource)?; // :TODO: this may eventually be moved to an EconomicEvent update, see https://lab.allmende.io/valueflows/valueflows/-/issues/637 - let now_contained = if let Some(contained) = &entry.contained_in { vec![contained.clone()] } else { vec![] }; - let prev_contained = if let Some(contained) = &prev_entry.contained_in { vec![contained.clone()] } else { vec![] }; - update_index!(Self(economic_resource(&identity_address).contained_in(now_contained.as_slice()).not(prev_contained.as_slice())))?; + if entry.contained_in != prev_entry.contained_in { + let now_contained = if let Some(contained) = &entry.contained_in { vec![contained.clone()] } else { vec![] }; + let prev_contained = if let Some(contained) = &prev_entry.contained_in { vec![contained.clone()] } else { vec![] }; + update_index!(economic_resource(&identity_address).contained_in(now_contained.as_slice()).not(prev_contained.as_slice()))?; + } // :TODO: optimise this- should pass results from `replace_direct_index` instead of retrieving from `get_link_fields` where updates construct_response(&identity_address, &revision_id, &entry, get_link_fields(&event_entry_def_id, &process_entry_def_id, &identity_address)?) From 9bf106a5f41fe15b224ca7b3a69b6f92a62527ab Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 21:39:50 +1000 Subject: [PATCH 067/181] refix economicResource link resolvers per new indexing API return format --- .../vf-graphql-holochain/resolvers/economicResource.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/vf-graphql-holochain/resolvers/economicResource.ts b/modules/vf-graphql-holochain/resolvers/economicResource.ts index adea1106f..cde8698e7 100644 --- a/modules/vf-graphql-holochain/resolvers/economicResource.ts +++ b/modules/vf-graphql-holochain/resolvers/economicResource.ts @@ -31,18 +31,18 @@ export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAI { containedIn: async (record: EconomicResource): Promise> => { const resources = await readResources({ params: { contains: record.id } }) - if (!resources.results || !resources.results.length) { + if (!resources.edges || !resources.edges.length) { return null } - return resources.results.pop()['economicResource'] + return resources.edges.pop()['node'] }, contains: async (record: EconomicResource): Promise => { const resources = await readResources({ params: { containedIn: record.id } }) - if (!resources.results || !resources.results.length) { + if (!resources.edges || !resources.edges.length) { return [] } - return resources.results.map(({ economicResource }) => economicResource) + return resources.edges.map(({ node }) => node) }, }, (hasKnowledge ? { From 8fc1e0cccb67b4d8608855cfb3aff30befdd02e6 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 22:03:26 +1000 Subject: [PATCH 068/181] remove old indexing implementation, entirely superceded by automatic indexes --- lib/hdk_semantic_indexes/client/src/lib.rs | 482 +-------------------- 1 file changed, 8 insertions(+), 474 deletions(-) diff --git a/lib/hdk_semantic_indexes/client/src/lib.rs b/lib/hdk_semantic_indexes/client/src/lib.rs index b97b8a16d..5188b965a 100644 --- a/lib/hdk_semantic_indexes/client/src/lib.rs +++ b/lib/hdk_semantic_indexes/client/src/lib.rs @@ -30,8 +30,7 @@ use std::collections::HashMap; use hdk::prelude::*; use holo_hash::DnaHash; use hdk_records::{ - RecordAPIResult, DataIntegrityError, - OtherCellResult, CrossCellError, + RecordAPIResult, OtherCellResult, DnaAddressable, rpc::{ call_local_zome_method, @@ -46,63 +45,11 @@ use hdk_semantic_indexes_zome_rpc::{ //-------------------------------[ MACRO LAYER ]------------------------------------- /// Create indexes by defining record types, relationships and associated IDs. +/// Local / remote determination is managed by DnaHash of target addresses. /// #[macro_export] macro_rules! create_index { - // local bidirectional index - ( - Local( - $lrecord_type:ident.$lrel:ident($ldest_record_id:expr), - $ldest_record_type:ident.$linv_rel:ident($lrecord_id:expr) - ) - ) => { - paste! { - create_local_index( - [], - &stringify!([<_internal_index_ $lrecord_type:lower:snake _ $lrel:lower:snake>]), - $lrecord_id, - [], - &stringify!([<_internal_index_ $ldest_record_type:lower:snake _ $linv_rel:lower:snake>]), - $ldest_record_id, - ) - } - }; - // remote bidirectional index - ( - Remote( - $rrecord_type:ident.$rrel:ident($rdest_record_id:expr), - $rdest_record_type:ident.$rinv_rel:ident($rrecord_id:expr) - ) - ) => { - paste! { - create_remote_index( - [], - &stringify!([<_internal_index_ $rrecord_type:lower:snake _ $rrel:lower:snake>]), - $rrecord_id, - &stringify!([]), - vec![$rdest_record_id.to_owned()].as_slice(), - ) - } - }; - // special case for self-referential or local-only indexes - ( - Self( - $record_type:ident($record_id:expr).$rel:ident($dest_record_id:expr) - ) - ) => { - paste! { - create_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - |_| { None }, // specify none for destination index - &"", // ignored, since no index zome name is returned - $dest_record_id, - ) - } - }; - // automatic index: - // local/remote determination is managed by DnaHash of target addresses + // bidirectional 1:1 indexes ( $record_type:ident.$rel:ident($dest_record_id:expr), $dest_record_type:ident.$inv_rel:ident($record_id:expr) @@ -157,178 +104,12 @@ macro_rules! read_index { } /// Update indexes by defining added and removed identifiers. +/// Local / remote determination is managed by DnaHash of target addresses, and +/// you can freely mix identifiers from disparate DNAs in the same input. /// #[macro_export] macro_rules! update_index { - // local index, add only - ( - Local( - $record_type:ident.$rel:ident($dest_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - [], - &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), - $dest_record_ids, - &vec![].as_slice(), - ) - } - }; - // local index, remove only - ( - Local( - $record_type:ident.$rel:ident.not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - [], - &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), - &vec![].as_slice(), - $remove_record_ids, - ) - } - }; - // local index, add and remove - ( - Local( - $record_type:ident.$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - [], - &stringify!([<_internal_index_ $dest_record_type:lower:snake _ $inv_rel:lower:snake>]), - $dest_record_ids, - $remove_record_ids, - ) - } - }; - - // remote index, add only - ( - Remote( - $record_type:ident.$rel:ident($dest_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_remote_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - &stringify!([]), - $dest_record_ids, - &vec![].as_slice(), - ) - } - }; - // remote index, remove only - ( - Remote( - $record_type:ident.$rel:ident.not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_remote_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - &stringify!([]), - &vec![].as_slice(), - $remove_record_ids, - ) - } - }; - // remote index, add and remove - ( - Remote( - $record_type:ident.$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr), - $dest_record_type:ident.$inv_rel:ident($record_id:expr) - ) - ) => { - paste! { - update_remote_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - &stringify!([]), - $dest_record_ids, - $remove_record_ids, - ) - } - }; - - // self-referential or local-only indexes, add only - ( - Self( - $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr) - ) - ) => { - paste! { - update_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - |_| { None }, // specify none for destination index - &"", // ignored, since no index zome name is returned - $dest_record_ids, - &vec![].as_slice(), - ) - } - }; - // self-referential or local-only indexes, remove only - ( - Self( - $record_type:ident($record_id:expr).$rel:ident.not($remove_record_ids:expr) - ) - ) => { - paste! { - update_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - |_| { None }, // specify none for destination index - &"", // ignored, since no index zome name is returned - &vec![].as_slice(), - $remove_record_ids, - ) - } - }; - // self-referential or local-only indexes, add & remove - ( - Self( - $record_type:ident($record_id:expr).$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr) - ) - ) => { - paste! { - update_local_index( - [], - &stringify!([<_internal_index_ $record_type:lower:snake _ $rel:lower:snake>]), - $record_id, - |_| { None }, // specify none for destination index - &"", // ignored, since no index zome name is returned - $dest_record_ids, - $remove_record_ids, - ) - } - }; - - // automatic index, add only + // add only ( $record_type:ident.$rel:ident($dest_record_ids:expr), $dest_record_type:ident.$inv_rel:ident($record_id:expr) @@ -346,7 +127,7 @@ macro_rules! update_index { ) } }; - // automatic index, add and remove + // add and remove ( $record_type:ident.$rel:ident($dest_record_ids:expr).not($remove_record_ids:expr), $dest_record_type:ident.$inv_rel:ident($record_id:expr) @@ -364,7 +145,7 @@ macro_rules! update_index { ) } }; - // automatic index, remove only + // remove only ( $record_type:ident.$rel:ident.not($remove_record_ids:expr), $dest_record_type:ident.$inv_rel:ident($record_id:expr) @@ -526,120 +307,6 @@ pub fn manage_index( .collect()) } -/// Toplevel method for triggering a link creation flow between two records in -/// different DNA cells. The calling cell will have an 'origin query index' created for -/// fetching the referenced remote IDs; the destination cell will have a -/// 'destination query index' created for querying the referenced records in full. -/// -/// :IMPORTANT: in handling errors from this method, one should take care to test -/// ALL `OtherCellResult`s in the returned Vector if the success of updating both -/// sides of the index is important. By default, an index failure on either side -/// may occur without the outer result failing. -/// -/// :TODO: consider a robust method for handling updates of data in remote DNAs & -/// foreign zomes where the transactionality guarantees of single-zome execution -/// are foregone. -/// -pub fn create_remote_index( - origin_zome_name_from_config: F, - origin_fn_name: &S, - source: &A, - remote_permission_id: &S, - dest_addresses: &[B], -) -> RecordAPIResult>> - where S: AsRef, - A: DnaAddressable, - B: DnaAddressable, - C: std::fmt::Debug, - SerializedBytes: TryInto, - F: Clone + FnOnce(C) -> Option, -{ - let sources = vec![source.clone()]; - - // Build local index first (for reading linked record IDs from the `source`) - // :TODO: optimise to call once per target DNA - let created: Vec> = dest_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &sources, &vec![], - ) - }).collect(); - - // request building of remote index in foreign cell - let resp = request_sync_remote_index( - remote_permission_id, - source, dest_addresses, &vec![], - ); - - let mut indexes_created = merge_indexing_results(&created, |r| { r.indexes_created.to_owned() }); - - match resp { - Ok(mut remote_results) => { - indexes_created.append(&mut remote_results.indexes_created) - }, - Err(e) => { - indexes_created.push(Err(e.into())) - }, - }; - - Ok(indexes_created) -} - -/// Creates a bidirectional link between a local entry and another from a foreign zome in the same DNA, -/// and returns a vector of the `HeaderHash`es of the (respectively) forward & reciprocal links created. -/// -/// :IMPORTANT: unlike remote indexes, it can be considered that DNA-local indexes are executing -/// "in good faith", since their inclusion in a DNA means they become part of the (hashed & shared) -/// computation space. The expectation is placed onto the DNA configurator to ensure that all -/// identifiers align and options correctly validate. As such, we treat local index failures more -/// severely than remote ones and WILL return a toplevel `DataIntegrityError` if *either* of the -/// paired index zomes fails to update; possibly causing a rollback in any other client logic -/// already attempted. -/// -/// :TODO: as above for remote indexes, so with local ones. -/// -pub fn create_local_index( - origin_zome_name_from_config: F, - origin_fn_name: &S, - source: &A, - dest_zome_name_from_config: G, - dest_fn_name: &S, - dest: &B, -) -> RecordAPIResult>> - where S: AsRef, - C: std::fmt::Debug, - SerializedBytes: TryInto, - F: FnOnce(C) -> Option, - G: FnOnce(C) -> Option, - A: DnaAddressable, - B: DnaAddressable, -{ - let dests = vec![(*dest).clone()]; - let sources = vec![source.clone()]; - - let or = request_sync_local_index(origin_zome_name_from_config, origin_fn_name, dest, &sources, &vec![]); - let dr = request_sync_local_index(dest_zome_name_from_config, dest_fn_name, source, &dests, &vec![]); - - let indexes_created = vec! [ - match dr { - Ok(drr) => drr.indexes_created - .first().ok_or(CrossCellError::Internal("cross-zome index creation failed".to_string()))? - .clone() - .map_err(|e| { DataIntegrityError::RemoteRequestError(e.to_string()) }), - Err(e) => Err(e.into()), - }, - match or { - Ok(orr) => orr.indexes_created - .first().ok_or(CrossCellError::Internal("cross-zome index creation failed".to_string()))? - .clone() - .map_err(|e| { DataIntegrityError::RemoteRequestError(e.to_string()) }), - Err(e) => Err(e.into()), - }, - ]; - - Ok(indexes_created) -} - //--------------------------------[ READ ]-------------------------------------- /// Reads and returns all entry identities referenced by the given index from @@ -668,139 +335,6 @@ pub fn read_local_index<'a, O, A, S, F, C>( //-------------------------------[ UPDATE ]------------------------------------- -/// Toplevel method for triggering a link update flow between two records in -/// different DNAs. Indexes on both sides of the network boundary will be updated. -/// -/// :NOTE: All remote index deletion logic should use the update/sync API, as IDs -/// must be explicitly provided in order to guard against indexes from unrelated -/// cells being wiped by this cell. -/// -pub fn update_remote_index( - origin_zome_name_from_config: F, - origin_fn_name: &S, - source: &A, - remote_permission_id: &S, - dest_addresses: &[B], - remove_addresses: &[B], -) -> RecordAPIResult - where S: AsRef, - A: DnaAddressable, - B: DnaAddressable, - C: std::fmt::Debug, - SerializedBytes: TryInto, - F: Clone + FnOnce(C) -> Option, -{ - // handle local 'origin' index first - let sources = vec![source.clone()]; - - // :TODO: optimise to call once per target DNA - let created: Vec> = dest_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &sources, &vec![], - ) - }).collect(); - - let deleted: Vec> = remove_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &vec![], &sources, - ) - }).collect(); - - // forward request to remote cell to update destination indexes - let resp = request_sync_remote_index( - remote_permission_id, - source, dest_addresses, remove_addresses, - ); - - let mut indexes_created = merge_indexing_results(&created, |r| { r.indexes_created.to_owned() }); - let mut indexes_removed = merge_indexing_results(&deleted, |r| { r.indexes_removed.to_owned() }); - match resp { - Ok(mut remote_results) => { - indexes_created.append(&mut remote_results.indexes_created); - indexes_removed.append(&mut remote_results.indexes_removed); - }, - Err(e) => { - indexes_created.push(Err(e)); - }, - }; - - Ok(RemoteEntryLinkResponse { indexes_created, indexes_removed }) -} - -/// Toplevel API for triggering an update flow between two indexes, where one is -/// in the local DNA and another is managed in a remote Cell. -/// -pub fn update_local_index( - origin_zome_name_from_config: F, - origin_fn_name: &S, - source: &A, - dest_zome_name_from_config: G, - dest_fn_name: &S, - dest_addresses: &[B], - remove_addresses: &[B], -) -> RecordAPIResult - where S: AsRef, - C: std::fmt::Debug, - SerializedBytes: TryInto, - F: Clone + FnOnce(C) -> Option, - G: FnOnce(C) -> Option, - A: DnaAddressable, - B: DnaAddressable, -{ - let sources = vec![source.clone()]; - - // :TODO: optimise to call once per target DNA - let created: Vec> = dest_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &sources, &vec![], - ) - }).collect(); - - let deleted: Vec> = remove_addresses.iter().map(|dest| { - request_sync_local_index( - origin_zome_name_from_config.to_owned(), origin_fn_name, - dest, &vec![], &sources, - ) - }).collect(); - - let resp = request_sync_local_index( - dest_zome_name_from_config, dest_fn_name, - source, dest_addresses, remove_addresses, - ); - - let mut indexes_created = merge_indexing_results(&created, |r| { r.indexes_created.to_owned() }); - let mut indexes_removed = merge_indexing_results(&deleted, |r| { r.indexes_removed.to_owned() }); - match resp { - Ok(mut remote_results) => { - indexes_created.append(&mut remote_results.indexes_created); - indexes_removed.append(&mut remote_results.indexes_removed); - }, - Err(e) => { - indexes_created.push(Err(e)); - }, - }; - - Ok(RemoteEntryLinkResponse { indexes_created, indexes_removed }) -} - -fn merge_indexing_results( - foreign_zome_results: &[OtherCellResult], - response_accessor: impl Fn(&RemoteEntryLinkResponse) -> Vec>, -) -> Vec> -{ - foreign_zome_results.iter() - .flat_map(|r| { - match r { - Ok(resp) => response_accessor(resp), - Err(e) => vec![Err(e.to_owned())], - } - }) - .collect() -} - /// Ask another bridged cell to build a 'destination query index' to match the /// 'origin' one that we have just created locally. /// When calling zomes within the same DNA, use `None` as `to_cell`. From 2bc60309687171a87cbae7e366a6b120fa4bf61d Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 22:07:27 +1000 Subject: [PATCH 069/181] update all index handler methods to transport-agnostic calling format --- zomes/rea_commitment/lib/src/lib.rs | 34 ++++++++++++------- zomes/rea_commitment/storage/src/lib.rs | 2 ++ zomes/rea_economic_event/lib/src/lib.rs | 18 ++++++---- zomes/rea_economic_event/storage/src/lib.rs | 1 + zomes/rea_economic_resource/lib/src/lib.rs | 9 +++-- .../rea_economic_resource/storage/src/lib.rs | 1 + .../lib_destination/src/lib.rs | 8 ++--- zomes/rea_fulfillment/lib_origin/src/lib.rs | 8 ++--- zomes/rea_intent/lib/src/lib.rs | 21 +++++++----- zomes/rea_intent/storage/src/lib.rs | 1 + zomes/rea_proposed_intent/lib/src/lib.rs | 13 ++++--- zomes/rea_proposed_intent/storage/src/lib.rs | 3 +- zomes/rea_proposed_to/lib/src/lib.rs | 4 +-- .../lib_destination/src/lib.rs | 8 ++--- zomes/rea_satisfaction/lib_origin/src/lib.rs | 20 +++++------ 15 files changed, 93 insertions(+), 58 deletions(-) diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index 872e3e821..6fb6b1d88 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -32,13 +32,13 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - // handle link fields if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &commitment { - create_index!(Remote(commitment.input_of(input_of), process.committed_inputs(&base_address)))?; + create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address))?; }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { - create_index!(Remote(commitment.output_of(output_of), process.committed_outputs(&base_address)))?; + create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address))?; }; if let CreateRequest { clause_of: MaybeUndefined::Some(clause_of), .. } = &commitment { - create_index!(Remote(commitment.clause_of(clause_of), agreement.commitments(&base_address)))?; + create_index!(commitment.clause_of(clause_of), agreement.commitments(&base_address))?; }; // :TODO: pass results from link creation rather than re-reading @@ -62,32 +62,32 @@ pub fn handle_update_commitment(entry_def_id: S, commitment: UpdateRequest) - if new_entry.input_of != prev_entry.input_of { let new_value = match &new_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + update_index!( commitment .input_of(new_value.as_slice()) .not(prev_value.as_slice()), process.committed_inputs(&base_address) - ))?; + )?; } if new_entry.output_of != prev_entry.output_of { let new_value = match &new_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + update_index!( commitment .output_of(new_value.as_slice()) .not(prev_value.as_slice()), process.committed_outputs(&base_address) - ))?; + )?; } if new_entry.clause_of != prev_entry.clause_of { let new_value = match &new_entry.clause_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.clause_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + update_index!( commitment .clause_of(new_value.as_slice()) .not(prev_value.as_slice()), agreement.commitments(&base_address) - ))?; + )?; } construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) @@ -100,13 +100,13 @@ pub fn handle_delete_commitment(revision_id: HeaderHash) -> RecordAPIResult Option { Some(conf.commitment.index_zome) } +/// Properties accessor for zome config +fn read_process_index_zome(conf: DnaConfigSlice) -> Option { + conf.commitment.process_index_zome +} + +/// Properties accessor for zome config +fn read_agreement_index_zome(conf: DnaConfigSlice) -> Option { + conf.commitment.agreement_index_zome +} + // @see construct_response fn get_link_fields(commitment: &CommitmentAddress) -> RecordAPIResult<( Vec, diff --git a/zomes/rea_commitment/storage/src/lib.rs b/zomes/rea_commitment/storage/src/lib.rs index 35ebc4631..aee613a76 100644 --- a/zomes/rea_commitment/storage/src/lib.rs +++ b/zomes/rea_commitment/storage/src/lib.rs @@ -47,6 +47,8 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct CommitmentZomeConfig { pub index_zome: String, + pub process_index_zome: Option, + pub agreement_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_economic_event/lib/src/lib.rs b/zomes/rea_economic_event/lib/src/lib.rs index ac779e64d..1eab40fe8 100644 --- a/zomes/rea_economic_event/lib/src/lib.rs +++ b/zomes/rea_economic_event/lib/src/lib.rs @@ -93,7 +93,7 @@ impl API for EconomicEventZomePermissableDefault { // Link any affected resources to this event so that we can pull all the events which affect any resource for resource_data in resources_affected.iter() { - create_index!(Local(economic_event.affects(&(resource_data.1)), economic_resource.affected_by(&event_address)))?; + create_index!(economic_event.affects(resource_data.1), economic_resource.affected_by(&event_address))?; } match resource_created { @@ -131,13 +131,13 @@ impl API for EconomicEventZomePermissableDefault { // handle link fields if let Some(process_address) = entry.input_of { - update_index!(Local(economic_event.input_of.not(&vec![process_address.to_owned()]), process.inputs(&base_address)))?; + update_index!(economic_event.input_of.not(&vec![process_address.to_owned()]), process.inputs(&base_address))?; } if let Some(process_address) = entry.output_of { - update_index!(Local(economic_event.output_of.not(&vec![process_address.to_owned()]), process.outputs(&base_address)))?; + update_index!(economic_event.output_of.not(&vec![process_address.to_owned()]), process.outputs(&base_address))?; } if let Some(agreement_address) = entry.realization_of { - let _ = update_index!(Remote(economic_event.realization_of.not(&vec![agreement_address.to_owned()]), agreement.economic_events(&base_address))); + let _ = update_index!(economic_event.realization_of.not(&vec![agreement_address.to_owned()]), agreement.economic_events(&base_address)); } // :TODO: handle cleanup of foreign key fields? (fulfillment, satisfaction) @@ -167,6 +167,10 @@ fn read_process_index_zome(conf: DnaConfigSlice) -> Option { conf.economic_event.process_index_zome } +fn read_agreement_index_zome(conf: DnaConfigSlice) -> Option { + conf.economic_event.agreement_index_zome +} + fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEventCreateRequest, resource_address: Option, ) -> RecordAPIResult<(HeaderHash, EconomicEventAddress, EntryData)> where S: AsRef @@ -182,13 +186,13 @@ fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEvent // handle link fields // :TODO: propagate errors if let EconomicEventCreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = event { - create_index!(Local(economic_event.input_of(input_of), process.inputs(&base_address)))?; + create_index!(economic_event.input_of(input_of), process.inputs(&base_address))?; }; if let EconomicEventCreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = event { - create_index!(Local(economic_event.output_of(output_of), process.outputs(&base_address)))?; + create_index!(economic_event.output_of(output_of), process.outputs(&base_address))?; }; if let EconomicEventCreateRequest { realization_of: MaybeUndefined::Some(realization_of), .. } = event { - create_index!(Remote(economic_event.realization_of(realization_of), agreement.realized(&base_address)))?; + create_index!(economic_event.realization_of(realization_of), agreement.realized(&base_address))?; }; Ok((revision_id, base_address, entry_resp)) diff --git a/zomes/rea_economic_event/storage/src/lib.rs b/zomes/rea_economic_event/storage/src/lib.rs index 5c4b6f14f..ec58c0e4c 100644 --- a/zomes/rea_economic_event/storage/src/lib.rs +++ b/zomes/rea_economic_event/storage/src/lib.rs @@ -48,6 +48,7 @@ pub struct EconomicEventZomeConfig { pub economic_resource_zome: Option, pub economic_resource_index_zome: Option, pub process_index_zome: Option, + pub agreement_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_economic_resource/lib/src/lib.rs b/zomes/rea_economic_resource/lib/src/lib.rs index 1baa683dd..c86eed78e 100644 --- a/zomes/rea_economic_resource/lib/src/lib.rs +++ b/zomes/rea_economic_resource/lib/src/lib.rs @@ -87,10 +87,10 @@ impl API for EconomicResourceZomePermissableDefault { // :NOTE: this will always run- resource without a specification ID would fail entry validation (implicit in the above) if let Some(conforms_to) = resource_spec { - let _ = create_index!(Remote(economic_resource.conforms_to(conforms_to), resource_specification.conforming_resources(&base_address))); + let _ = create_index!(economic_resource.conforms_to(conforms_to), resource_specification.conforming_resources(&base_address)); } if let Some(contained_in) = resource_params.get_contained_in() { - create_index!(Self(economic_resource(&base_address).contained_in(&contained_in)))?; + let _ = create_index!(economic_resource(&base_address).contained_in(&contained_in))?; }; Ok((revision_id, base_address, entry_resp)) @@ -162,6 +162,11 @@ fn read_economic_resource_index_zome(conf: DnaConfigSlice) -> Option { Some(conf.economic_resource.index_zome) } +/// Properties accessor for zome config +fn read_resource_specification_index_zome(conf: DnaConfigSlice) -> Option { + conf.economic_resource.resource_specification_index_zome +} + fn handle_update_inventory_resource( resource_entry_def_id: S, resource_addr: &HeaderHash, diff --git a/zomes/rea_economic_resource/storage/src/lib.rs b/zomes/rea_economic_resource/storage/src/lib.rs index 05fd761b9..fa67d279b 100644 --- a/zomes/rea_economic_resource/storage/src/lib.rs +++ b/zomes/rea_economic_resource/storage/src/lib.rs @@ -48,6 +48,7 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct EconomicResourceZomeConfig { pub index_zome: String, + pub resource_specification_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index e6cbdaa66..15bd22a24 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -32,7 +32,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - create_index!(Local(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), event.fulfills(&fulfillment_address)))?; + create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), event.fulfills(&fulfillment_address))?; // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs @@ -52,12 +52,12 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) let (revision_id, base_address, new_entry, prev_entry): (_, FulfillmentAddress, EntryData, EntryData) = update_record(&entry_def_id, &fulfillment.get_revision_id(), fulfillment.to_owned())?; if new_entry.fulfilled_by != prev_entry.fulfilled_by { - update_index!(Local( + update_index!( fulfillment .fulfilled_by(&vec![new_entry.fulfilled_by.clone()]) .not(&vec![prev_entry.fulfilled_by]), event.fulfills(&base_address) - ))?; + )?; } construct_response(&base_address, &revision_id, &new_entry) @@ -69,7 +69,7 @@ pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // handle link fields - update_index!(Local(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), event.fulfills(&base_address)))?; + update_index!(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), event.fulfills(&base_address))?; delete_record::(&revision_id) } diff --git a/zomes/rea_fulfillment/lib_origin/src/lib.rs b/zomes/rea_fulfillment/lib_origin/src/lib.rs index 4b20eb318..18c4064ef 100644 --- a/zomes/rea_fulfillment/lib_origin/src/lib.rs +++ b/zomes/rea_fulfillment/lib_origin/src/lib.rs @@ -34,7 +34,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - create_index!(Local(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address)))?; + create_index!(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address))?; // update in the associated foreign DNA as well let _pingback: OtherCellResult = call_zome_method( @@ -61,12 +61,12 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) // update commitment indexes in local DNA if new_entry.fulfills != prev_entry.fulfills { - update_index!(Local( + update_index!( fulfillment .fulfills(&vec![new_entry.fulfills.clone()]) .not(&vec![prev_entry.fulfills]), commitment.fulfilled_by(&base_address) - ))?; + )?; } // update fulfillment records in remote DNA (and by proxy, event indexes in remote DNA) @@ -88,7 +88,7 @@ pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // update commitment indexes in local DNA - update_index!(Local(fulfillment.fulfills.not(&vec![entry.fulfills]), commitment.fulfilled_by(&base_address)))?; + update_index!(fulfillment.fulfills.not(&vec![entry.fulfills]), commitment.fulfilled_by(&base_address))?; // update fulfillment records in remote DNA (and by proxy, event indexes in remote DNA) let _pingback: OtherCellResult = call_zome_method( diff --git a/zomes/rea_intent/lib/src/lib.rs b/zomes/rea_intent/lib/src/lib.rs index f4d2493bb..9fbc6afff 100644 --- a/zomes/rea_intent/lib/src/lib.rs +++ b/zomes/rea_intent/lib/src/lib.rs @@ -33,10 +33,10 @@ pub fn handle_create_intent(entry_def_id: S, intent: CreateRequest) -> Record // handle link fields if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &intent { - create_index!(Remote(intent.input_of(input_of), process.intended_inputs(&base_address)))?; + create_index!(intent.input_of(input_of), process.intended_inputs(&base_address))?; }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &intent { - create_index!(Remote(intent.output_of(output_of), process.intended_outputs(&base_address)))?; + create_index!(intent.output_of(output_of), process.intended_outputs(&base_address))?; }; // return entire record structure @@ -60,22 +60,22 @@ pub fn handle_update_intent(entry_def_id: S, intent: UpdateRequest) -> Record if new_entry.input_of != prev_entry.input_of { let new_value = match &new_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + update_index!( intent .input_of(new_value.as_slice()) .not(prev_value.as_slice()), process.intended_inputs(&base_address) - ))?; + )?; } if new_entry.output_of != prev_entry.output_of { let new_value = match &new_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!(Remote( + update_index!( intent .output_of(new_value.as_slice()) .not(prev_value.as_slice()), process.intended_outputs(&base_address) - ))?; + )?; } construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) @@ -88,10 +88,10 @@ pub fn handle_delete_intent(revision_id: HeaderHash) -> RecordAPIResult // handle link fields if let Some(process_address) = entry.input_of { - update_index!(Remote(intent.input_of.not(&vec![process_address]), process.intended_inputs(&base_address)))?; + update_index!(intent.input_of.not(&vec![process_address]), process.intended_inputs(&base_address))?; } if let Some(process_address) = entry.output_of { - update_index!(Remote(intent.output_of.not(&vec![process_address]), process.intended_outputs(&base_address)))?; + update_index!(intent.output_of.not(&vec![process_address]), process.intended_outputs(&base_address))?; } // delete entry last, as it must be present in order for links to be removed @@ -146,6 +146,11 @@ fn read_intent_index_zome(conf: DnaConfigSlice) -> Option { Some(conf.intent.index_zome) } +/// Properties accessor for zome config +fn read_process_index_zome(conf: DnaConfigSlice) -> Option { + conf.intent.process_index_zome +} + // @see construct_response pub fn get_link_fields(intent: &IntentAddress) -> RecordAPIResult<( Vec, diff --git a/zomes/rea_intent/storage/src/lib.rs b/zomes/rea_intent/storage/src/lib.rs index ecedd6730..4e7289f20 100644 --- a/zomes/rea_intent/storage/src/lib.rs +++ b/zomes/rea_intent/storage/src/lib.rs @@ -45,6 +45,7 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct IntentZomeConfig { pub index_zome: String, + pub process_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_proposed_intent/lib/src/lib.rs b/zomes/rea_proposed_intent/lib/src/lib.rs index f1d812688..bbb979e41 100644 --- a/zomes/rea_proposed_intent/lib/src/lib.rs +++ b/zomes/rea_proposed_intent/lib/src/lib.rs @@ -27,8 +27,8 @@ pub fn handle_create_proposed_intent(entry_def_id: S, proposed_intent: Create let (revision_id, base_address, entry_resp): (_, ProposedIntentAddress, EntryData) = create_record(&entry_def_id, proposed_intent.to_owned())?; // handle link fields - create_index!(Local(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address)))?; - create_index!(Remote(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address)))?; + create_index!(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address))?; + create_index!(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address))?; Ok(construct_response(&base_address, &revision_id, &entry_resp)) } @@ -46,7 +46,7 @@ pub fn handle_delete_proposed_intent(revision_id: &HeaderHash) -> RecordAPIResul // Notify indexing zomes in local DNA (& validate). // Allows authors of indexing modules to intervene in the deletion of a record. - update_index!(Local(proposed_intent.published_in.not(&vec![entry.published_in]), proposal.publishes(&base_address)))?; + update_index!(proposed_intent.published_in.not(&vec![entry.published_in]), proposal.publishes(&base_address))?; // manage record deletion let res = delete_record::(&revision_id); @@ -54,7 +54,7 @@ pub fn handle_delete_proposed_intent(revision_id: &HeaderHash) -> RecordAPIResul // Update in associated foreign DNAs as well. // :TODO: In this pattern, foreign cells can also intervene in record deletion, and cause rollback. // Is this desirable? Should the behaviour be configurable? - update_index!(Remote(proposed_intent.publishes.not(&vec![entry.publishes]), intent.proposed_in(&base_address)))?; + update_index!(proposed_intent.publishes.not(&vec![entry.publishes]), intent.proposed_in(&base_address))?; res } @@ -83,3 +83,8 @@ fn read_proposed_intent_index_zome(conf: DnaConfigSlice) -> Option { fn read_proposal_index_zome(conf: DnaConfigSlice) -> Option { Some(conf.proposed_intent.proposal_index_zome) } + +/// Properties accessor for zome config. +fn read_intent_index_zome(conf: DnaConfigSlice) -> Option { + conf.proposed_intent.intent_index_zome +} diff --git a/zomes/rea_proposed_intent/storage/src/lib.rs b/zomes/rea_proposed_intent/storage/src/lib.rs index 65a183861..aaa5bf0eb 100644 --- a/zomes/rea_proposed_intent/storage/src/lib.rs +++ b/zomes/rea_proposed_intent/storage/src/lib.rs @@ -26,8 +26,9 @@ pub struct DnaConfigSlice { #[derive(Clone, Serialize, Deserialize, SerializedBytes, PartialEq, Debug)] pub struct ProposedIntentZomeConfig { - pub proposal_index_zome: String, pub index_zome: String, + pub proposal_index_zome: String, + pub intent_index_zome: Option, } //---------------- RECORD INTERNALS & VALIDATION ---------------- diff --git a/zomes/rea_proposed_to/lib/src/lib.rs b/zomes/rea_proposed_to/lib/src/lib.rs index 5b34c39fa..827f66648 100644 --- a/zomes/rea_proposed_to/lib/src/lib.rs +++ b/zomes/rea_proposed_to/lib/src/lib.rs @@ -27,7 +27,7 @@ pub fn handle_create_proposed_to(entry_def_id: S, proposed_to: CreateRequest) let (revision_id, base_address, entry_resp): (_, ProposedToAddress, EntryData) = create_record(&entry_def_id, proposed_to.to_owned())?; // handle link fields - create_index!(Local(proposed_to.proposed(&proposed_to.proposed), proposal.proposed_to(&base_address)))?; + create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.proposed_to(&base_address))?; // :TODO: create index for retrieving all proposals for an agent @@ -45,7 +45,7 @@ pub fn handle_delete_proposed_to(revision_id: &HeaderHash) -> RecordAPIResult(&revision_id)?; - update_index!(Local(proposed_to.proposed.not(&vec![entry.proposed]), proposal.proposed_to(&base_address)))?; + update_index!(proposed_to.proposed.not(&vec![entry.proposed]), proposal.proposed_to(&base_address))?; delete_record::(&revision_id) } diff --git a/zomes/rea_satisfaction/lib_destination/src/lib.rs b/zomes/rea_satisfaction/lib_destination/src/lib.rs index f50ed6fd2..4c480f400 100644 --- a/zomes/rea_satisfaction/lib_destination/src/lib.rs +++ b/zomes/rea_satisfaction/lib_destination/src/lib.rs @@ -32,7 +32,7 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - create_index!(Local(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address)))?; + create_index!(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address))?; // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs @@ -52,12 +52,12 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques let (revision_id, base_address, new_entry, prev_entry): (_, SatisfactionAddress, EntryData, EntryData) = update_record(&entry_def_id, &satisfaction.get_revision_id(), satisfaction.to_owned())?; if new_entry.satisfied_by != prev_entry.satisfied_by { - update_index!(Local( + update_index!( satisfaction .satisfied_by(&vec![new_entry.satisfied_by.to_owned()]) .not(&vec![prev_entry.satisfied_by]), economic_event.satisfies(&base_address) - ))?; + )?; } construct_response(&base_address, &revision_id, &new_entry) @@ -69,7 +69,7 @@ pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // handle link fields - update_index!(Local(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), economic_event.satisfies(&base_address)))?; + update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), economic_event.satisfies(&base_address))?; delete_record::(&revision_id) } diff --git a/zomes/rea_satisfaction/lib_origin/src/lib.rs b/zomes/rea_satisfaction/lib_origin/src/lib.rs index 63758a23e..5c29c84a8 100644 --- a/zomes/rea_satisfaction/lib_origin/src/lib.rs +++ b/zomes/rea_satisfaction/lib_origin/src/lib.rs @@ -36,13 +36,13 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - create_index!(Local(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address)))?; + create_index!(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address))?; // link entries which may be local or remote let event_or_commitment = satisfaction.get_satisfied_by(); if is_satisfiedby_local_commitment(event_or_commitment)? { // links to local commitment, create link index pair - create_index!(Local(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address)))?; + create_index!(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address))?; } else { // links to remote event, ping associated foreign DNA & fail if there's an error // :TODO: consider the implications of this in loosely coordinated multi-network spaces @@ -70,12 +70,12 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques // update intent indexes in local DNA if new_entry.satisfies != prev_entry.satisfies { - update_index!(Local( + update_index!( satisfaction .satisfies(&vec![new_entry.satisfies.to_owned()]) .not(&vec![prev_entry.satisfies]), intent.satisfied_by(&base_address) - ))?; + )?; } // update commitment / event indexes in local and/or remote DNA @@ -87,12 +87,12 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques if same_dna { if is_satisfiedby_local_commitment(&prev_entry.satisfied_by)? { // both values were local, update the index directly - update_index!(Local( + update_index!( satisfaction .satisfied_by(&vec![new_entry.satisfied_by.to_owned()]) .not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address) - ))?; + )?; } else { // both values were remote and in the same DNA, forward the update call_zome_method( @@ -104,7 +104,7 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques } else { if is_satisfiedby_local_commitment(&prev_entry.satisfied_by)? { // previous value was local, clear the index directly - update_index!(Local(satisfaction.satisfied_by.not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address)))?; + update_index!(satisfaction.satisfied_by.not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address))?; } else { // previous value was remote, handle the remote update as a deletion call_zome_method( @@ -116,7 +116,7 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques if is_satisfiedby_local_commitment(&new_entry.satisfied_by)? { // new value was local, add the index directly - update_index!(Local(satisfaction.satisfied_by(&vec![new_entry.satisfied_by.to_owned()]), commitment.satisfies(&base_address)))?; + update_index!(satisfaction.satisfied_by(&vec![new_entry.satisfied_by.to_owned()]), commitment.satisfies(&base_address))?; } else { // new value was remote, handle the remote update as a creation call_zome_method( @@ -144,12 +144,12 @@ pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // update intent indexes in local DNA - update_index!(Local(satisfaction.satisfies.not(&vec![entry.satisfies]), intent.satisfied_by(&base_address)))?; + update_index!(satisfaction.satisfies.not(&vec![entry.satisfies]), intent.satisfied_by(&base_address))?; // update commitment & event indexes in local or remote DNAs let event_or_commitment = entry.satisfied_by.to_owned(); if is_satisfiedby_local_commitment(&event_or_commitment)? { - update_index!(Local(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), commitment.satisfies(&base_address)))?; + update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), commitment.satisfies(&base_address))?; } else { // links to remote event, ping associated foreign DNA & fail if there's an error // :TODO: consider the implications of this in loosely coordinated multi-network spaces From a999c7cfc49a23aa8562c44f330b5c427f8708b3 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 22:11:05 +1000 Subject: [PATCH 070/181] fix tests for EconomicEvent / EconomicResource list API --- .../economic-event/event_resource_list_api.js | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/test/economic-event/event_resource_list_api.js b/test/economic-event/event_resource_list_api.js index c95d0db8a..4da96f77b 100644 --- a/test/economic-event/event_resource_list_api.js +++ b/test/economic-event/event_resource_list_api.js @@ -118,22 +118,30 @@ runner.registerScenario('Event/Resource list APIs', async (s, t) => { resp = await alice.graphQL(`{ economicEvents { - id + edges { + node { + id + } + } } economicResources { - id + edges { + node { + id + } + } } }`) - t.equal(resp.data.economicEvents.length, 5, 'all events correctly retrievable') + t.equal(resp.data.economicEvents.edges.length, 5, 'all events correctly retrievable') t.deepEqual( - resp.data.economicEvents.sort(sortById), + resp.data.economicEvents.edges.map(e => e.node).sort(sortById), [{ id: event1Id }, { id: event2Id }, { id: event3Id }, { id: event4Id }, { id: event5Id }].sort(sortById), 'event IDs OK' ) - t.equal(resp.data.economicResources.length, 2, 'all resources correctly retrievable') + t.equal(resp.data.economicResources.edges.length, 2, 'all resources correctly retrievable') t.deepEqual( - resp.data.economicResources.sort(sortById), + resp.data.economicResources.edges.map(e => e.node).sort(sortById), [{ id: resource1Id }, { id: resource2Id }].sort(sortById), 'resource IDs OK' ) From 63e0730b8cfcb4080097e00f565bc67d70e36600 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 22:19:31 +1000 Subject: [PATCH 071/181] start porting test for new Tryorama, but can't get past install anyway --- test/flows/flow_records_graphql.js | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/test/flows/flow_records_graphql.js b/test/flows/flow_records_graphql.js index 9581052cd..6af89ebe5 100644 --- a/test/flows/flow_records_graphql.js +++ b/test/flows/flow_records_graphql.js @@ -3,24 +3,19 @@ const { buildConfig, buildRunner, buildPlayer, + mockAgentId, } = require('../init') const runner = buildRunner() - -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), -}, { - vf_observation: ['planning', 'observation'], -}) +const config = buildConfig() runner.registerScenario('flow records and relationships', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { cells: [observation, planning], graphQL } = await buildPlayer(s, config, ['observation', 'planning']) - const tempProviderAgentId = 'some-agent-provider' - const tempReceiverAgentId = 'some-agent-receiver' + const tempProviderAgentId = mockAgentId() + const tempReceiverAgentId = mockAgentId() - const pResp = await alice.graphQL(` + const pResp = await graphQL(` mutation($process: ProcessCreateParams!) { createProcess(process: $process) { process { @@ -38,7 +33,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { t.ok(pResp.data.createProcess.process.id, "process created OK") const processId = pResp.data.createProcess.process.id - const cResp = await alice.graphQL(` + const cResp = await graphQL(` mutation( $eventI: EconomicEventCreateParams!, $commitmentI: CommitmentCreateParams!, @@ -148,7 +143,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { const outputCommitmentId = cResp.data.outputCommitment.commitment.id const outputEventId = cResp.data.outputEvent.economicEvent.id - let resp = await alice.graphQL(` + let resp = await graphQL(` { process(id: "${processId}") { inputs { @@ -223,7 +218,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { t.equal(resp.data.outputCommitment.outputOf.id, processId, 'output commitment process ref OK') t.equal(resp.data.outputIntent.outputOf.id, processId, 'output intent process ref OK') - const mResp = await alice.graphQL(` + const mResp = await graphQL(` mutation( $inputFulfillment: FulfillmentCreateParams!, $inputEventSatisfaction: SatisfactionCreateParams!, @@ -269,7 +264,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { const iesId = mResp.data.ies.satisfaction.id const icsId = mResp.data.ics.satisfaction.id - resp = await alice.graphQL(` + resp = await graphQL(` { inputEvent: economicEvent(id:"${inputEventId}") { fulfills { From 57681b2aa84577fd95985e1d8fc4585c0361fc16 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 22:21:16 +1000 Subject: [PATCH 072/181] fix EconomicResource index zome failing EntryDef checks --- .../zome_idx_observation/src/lib.rs | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs b/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs index a273ed0b7..9e7b41fea 100644 --- a/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_resource/zome_idx_observation/src/lib.rs @@ -6,7 +6,22 @@ */ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_economic_resource_rpc::*; -use hc_zome_rea_economic_event_rpc::ResourceResponseData as ResponseData; +use hc_zome_rea_economic_event_rpc::{ + ResourceResponse as Response, + ResourceResponseData as ResponseData, +}; + +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + EconomicResourceAddress::entry_def(), + ResourceSpecificationAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} #[index_zome] struct EconomicResource { From aee0c9c4305ccd4ed51ce920dc52193b615cc9dd Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 22:28:26 +1000 Subject: [PATCH 073/181] presume PNPM to be installed in Github CI as per new Nix config addition --- .github/workflows/release.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d4ec296b2..f62168638 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -62,7 +62,7 @@ jobs: - name: Prepare Nix environment run: nix-shell --command "echo Completed" - name: Install PNPM and dependencies - run: nix-shell --command "npm i pnpm && npx pnpm install --no-frozen-lockfile" + run: nix-shell --command "pnpm install --no-frozen-lockfile" - name: Set up release bundle from template run: | cp -a bundles_templates/* bundles/ @@ -70,9 +70,9 @@ jobs: sed -i "s//https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}/g" bundles/full_suite_release_template/happ.yaml cat bundles/full_suite_release_template/happ.yaml - name: Build WASM, dnas, happs - run: nix-shell --run 'npx pnpm run build:crates' + run: nix-shell --run 'pnpm run build:crates' - name: Build explorer UI and webhapp package - run: nix-shell --run 'npx pnpm run build:webhapp' + run: nix-shell --run 'pnpm run build:webhapp' - name: upload bundles env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 40a030f341df28c609ea384a88ff5a8305ccf994 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 16 Mar 2022 22:31:01 +1000 Subject: [PATCH 074/181] bump package versions --- modules/graphql-client/package.json | 2 +- modules/vf-graphql-holochain/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/graphql-client/package.json b/modules/graphql-client/package.json index 21279037e..cde9da1ea 100644 --- a/modules/graphql-client/package.json +++ b/modules/graphql-client/package.json @@ -1,6 +1,6 @@ { "name": "@vf-ui/graphql-client-holochain", - "version": "0.0.1-alpha.3", + "version": "0.0.1-alpha.4", "description": "ValueFlows GraphQLClient configurations, providing pluggable backend datasources for different distributed, federated and client/server infrastructure.", "main": "build/index.js", "types": "build/index.d.ts", diff --git a/modules/vf-graphql-holochain/package.json b/modules/vf-graphql-holochain/package.json index e057b6875..318bb98a1 100644 --- a/modules/vf-graphql-holochain/package.json +++ b/modules/vf-graphql-holochain/package.json @@ -1,6 +1,6 @@ { "name": "@valueflows/vf-graphql-holochain", - "version": "0.0.1-alpha.3", + "version": "0.0.1-alpha.4", "main": "build/index.js", "types": "build/index.d.ts", "description": "GraphQL schema bindings for the Holochain implementation of ValueFlows", From b42aed5404cb7e7f1ac737ce9c4104c18c640db9 Mon Sep 17 00:00:00 2001 From: HackMD Date: Wed, 23 Mar 2022 22:40:10 +0000 Subject: [PATCH 075/181] initial push --- completions.md | 222 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 222 insertions(+) create mode 100644 completions.md diff --git a/completions.md b/completions.md new file mode 100644 index 000000000..8cc5dae3e --- /dev/null +++ b/completions.md @@ -0,0 +1,222 @@ +# holo-rea Graphql API Completions & Statuses + +From the point-of-view of someone calling through Graphql, what is the overall status of each function that exists in the graphql schema. Many functions are not yet implemented so it is important to know that upfront, which are, and which aren't. + +A filtered list of related github issues for tracking these work statuses, so that you can contribute, or report or discuss issues, can be found here: https://github.com/holo-rea/holo-rea/labels/graphql-api + +## System of Record Comparison + +All of the implementation details should be sourced from the [Valueflows RDF Turtle file](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL) (here's a [Formatted View](http://150.146.207.114/lode/extract?owlapi=true&url=https://lab.allmende.io/valueflows/valueflows/-/raw/master/release-doc-in-process/all_vf.TTL)), which is the system of record. While you are looking around, please note that the objects themselves don't have property definitions. The properties themselves define which objects the apply to in the `rdfs:domain` field. The range of values the properties can take is defined by the `rdfs:range` field This is because RDF views these things like arrows or maps, going from the domain to the range. + +The top level objects found in the spec are: + +**Key** +| symbol | meaning | +| ------------------- | --------------------- | +| :grey_exclamation: | Not used | +| - | Not found/not started | +| :hammer_and_wrench: | In progress | +| :heavy_check_mark: | Done | +| K | Knowledge Layer | +| P | Planning Layer | +| O | Observation Layer | + +**Outside Ontologies** +| RDF Object | vf-schema file | zome | comments | +| --------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------- | +| [foaf:Agent](http://xmlns.com/foaf/spec/) | :grey_exclamation: [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql) | :grey_exclamation: | | +| [org:Organization](https://www.w3.org/TR/vocab-org/) | :grey_exclamation: | :grey_exclamation: | | +| [om2:Measure](https://raw.githubusercontent.com/HajoRijgersberg/OM/master/om-2.0.rdf) | [measurement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/measurement.gql#L64) | [lib/vf_measurement](https://github.com/holo-rea/holo-rea/blob/sprout/lib/vf_measurement/src/lib.rs#L19) | | +| [om2:Unit](https://raw.githubusercontent.com/HajoRijgersberg/OM/master/om-2.0.rdf) | :grey_exclamation: [measurement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/measurement.gql#L48) | :grey_exclamation: [rea_unit](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_unit) | This is a technicality. The general shape of it is correct, however the ontology represents a hierarchy of units that are not correctly reflected in the backend since it only stores a label and a symbol. The full ontology allows for more flexibility with prefixes, dimension, exponent, etc.; it has enough information to allow conversion between units. It would be hard to implement without a triple-store. | +| [geo:SpatialThing](https://www.w3.org/2003/01/geo/) | [geolocation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/geolocation.gql#L15) | - | | +| [time](https://www.w3.org/2006/time#) | :grey_exclamation: | :grey_exclamation: | vf-schema: The GraphQL spec only uses the `DateTime and Duration` scalars. | +| [cd:created](https://www.dublincore.org/specifications/dublin-core/dcmi-terms/#created) | :grey_exclamation: | :grey_exclamation: | vf-schema: GraphQL spec only uses the `DateTime` scalar. | +| [skos:note](https://www.w3.org/TR/skos-reference/#note) | :grey_exclamation: | :grey_exclamation: | vf-schema: Just a `String`. | +| [dtype:numericUnion](http://www.linkedmodel.org/schema/dtype#numericUnion) | :grey_exclamation: | :grey_exclamation: | This is only needed for the [`om2:hasNumericalValue`](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L549), so it's only internal. | + + +You may notice there is no specification of an Agent. This is because the Valueflows RDF spec uses the [FOAF Vocabulary](http://xmlns.com/foaf/spec/) and the [Organization Ontology](https://www.w3.org/TR/vocab-org/). The holo-rea project has [it's own set of concepts right now](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql). + +| layer | RDF object | vf-schemas file | zome | hrea "module" or DNA | comments | +| ------| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| K | [Scenario Definition](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L124) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L44) | - | - | | +| K | [Process Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L104) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L70) | [rea_process_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/happs/specification) | | +| K | [Resource Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L92) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L45) | [rea_resource_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_resource_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/happs/specification) | zome: Missing `resource_classified_as`, `default_unit_of_resource`. | +| K | [Action](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L32) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L19) | [rea_action](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_action/zome) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/happs/specification) | vf-schema: Missing `containedEffect`, `locationEffect`. zome: Same as vf-schema. | +| K | [Agent Relationship Role](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L74) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L126) | - | - | vf-schema: Missing `roleBehavior`. | +| K | [Role Behavior](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L80) | - | - | - | vf-schema: This doesn't seem to be implemented yet. | +| K | [Recipe Exchange](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L118) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L106) | - | - | | +| K | [Recipe Flow](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L112) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L53) | - | - | | +| K | [Recipe Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L98) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L84) | - | - | | +| K | [Recipe Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L18) | - | - | | +| P | [Scenario](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L16) | - | - | | +| P | [Plan](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L133) | [plan](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/plan.gql#L16) | - | - | vf-schema: has extra fields `deletable` and `inScopeOf` are these for internal use? | +| P, O | [Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L196) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L155) | [rea_process](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | vf-schema: Missing `plannedIn` What is `unplannedEvents`? For the inverse relationships, do we want to group all `Intent`s, `Commitment`s, and `EconomicEvent`s together in the `inputs` and `outputs`? How is `track` and `trace` being handled? dna: Has extra `before` and `after` fields. `planned_within` is present, despite no implementation (because it just points to an `entryHash`.) This is often placed in with Observation layer, or on the line between Observation and Planning. | +| P | [Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L139) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L94) | [rea_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_intent) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/happs/planning) | vf-schema: Missing `provider`, `reciever`, `atLocation`. Has a `satisfiedBy` inverse map to `Satisfaction`'s `satisfies`. | +| P | [Proposed Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L151) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L49) | [rea_proposed_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_intent) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/happs/proposal) | | +| P | [Proposal](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L145) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L16) | [rea_proposal](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposal) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/happs/proposal) | vf-schema: Missing `eligibleLocation`. Has a `publishes` inverse map to `ProposedIntent`'s `publishedIn`. zome: same. | +| P | [Proposed To](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L157) | [proposal.agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/bridging/proposal.agent.gql#L20) | [rea_proposed_to](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_to) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/happs/proposal) | | +| P | [Commitment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L163) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L24) | [rea_commitment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_commitment) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/happs/planning) | vf-schema: Missing `atLocation` and `clauseOf`. Has `fullfilledBy` and `satisfies` inverse maps to `Fulfillment`'s`fulfill` and `Satisfation`'s `satisfiedBy`. zome: has `plan` instead of `planed_within`. | +| P | [Satisfaction](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L169) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L188) | [rea_satisfaction](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_satisfaction) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/happs/planning) | zome: allows `satisfied_by` to only be either one `EconomicEvent` or `Commitment`. Is this correct? | +| P | [Agreement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [agreement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agreement.gql#L19) | [rea_agreement](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_agreement) | [agreement](https://github.com/holo-rea/holo-rea/tree/sprout/happs/agreement) | | +| P | [Claim](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L18) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | +| O | [Economic Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L190) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L83) | [rea_economic_resource](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_resource) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | vf-schema: Missing `currentLocation`. Has `contains`, `track`, `trace` maps as additions. | +| O | [dfc:ProductBatch](http://www.virtual-assembly.org/DataFoodConsortium/BusinessOntology) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L139) | - | - | vf-schema: Missing links to `identifies`, but that probably doesn't matter for our use case. | +| O | [Economic Event](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L202) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L19) | [rea_economic_event](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_event) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | vf-schema: Missing `realizationOf`, `image`, `provider`, `receiver`, `atLocation`, `toLocation`. Has `track` and `trace` going to `ProductionFlowItem`s. zome: Missing `to_location`. | +| O | [Appreciation](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L232) | [appreciation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/appreciation.gql#L17) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | +| P, O | [Fulfillment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L214) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L166) | [rea_fulfillment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_fulfillment) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | !! Discrepancy between "layer" and "vf-schema" files. FIXME | +| O | [Settlement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L226) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L61) | - | - | | +| O | [Agent Relationship](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L208) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L104) | - | - | | + +There are internal system objects used to help specify the rules of logic around the actions: + +* [Resource Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L1278) +* [Contained Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L68) +* [Location Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L62) +* [Onhand Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L56) +* [Input/Output](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L44) +* [Pairs With](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L38) + +In the gql version, these are just strings (need to learn more specifics). + + + + + +## GraphQL Implementation + +### Mutations + +#### Implemented & Tested +- [x] createEconomicEvent + - [x] fixed - `newInventoriedResource` `name` property is not persisted - [issue #202](https://github.com/holo-rea/holo-rea/issues/202) +- [x] createUnit +- [x] createProcess + +#### Implemented & Not Yet Tested +- [x] createAgreement +- [x] updateAgreement +- [x] deleteAgreement +- [x] createCommitment +- [x] updateCommitment +- [x] deleteCommitment +- [x] updateEconomicEvent +- [x] deleteEconomicEvent +- [x] createFulfillment +- [x] updateFulfillment +- [x] deleteFulfillment +- [x] updateEconomicResource +- [x] createIntent +- [x] updateIntent +- [x] deleteIntent +- [x] updateProcess +- [x] deleteProcess +- [x] createProcessSpecification +- [x] updateProcessSpecification +- [x] deleteProcessSpecification +- [x] createProposal +- [x] updateProposal +- [x] deleteProposal +- [x] proposeIntent +- [x] deleteProposedIntent +- [x] proposeTo +- [x] deleteProposedTo +- [x] updateResourceSpecification +- [x] deleteResourceSpecification +- [x] createSatisfaction +- [x] updateSatisfaction +- [x] deleteSatisfaction +- [x] updateUnit +- [x] deleteUnit + +#### Partially Implemented +- [x] createResourceSpecification + - [ ] lacking `defaultUnitOfResource` - [issue #155](https://github.com/holo-rea/holo-rea/issues/155) + +#### Has Minor Bug + +#### Has Fatal Bug + +#### Not Yet Implemented +- [ ] deleteEconomicResource - [issue #67](https://github.com/holo-rea/holo-rea/issues/67) +- [ ] createProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] updateProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] deleteProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] createPerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updatePerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deletePerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) + + + +### Queries + +#### Implemented & Tested +- [x] action +- [x] actions +- [x] unit +- [x] economicEvent + +#### Implemented & Not Yet Tested +- [x] agreement +- [x] commitment +- [x] resourceSpecification +- [x] processSpecification +- [x] process +- [x] intent +- [x] fulfillment +- [x] satisfaction +- [x] proposal + +__Has Partial Implementation__ +- [x] myAgent + - [ ] TODO: define what's lacking +- [x] agent + - [ ] TODO: define what's lacking +- [x] economicResources + - [ ] lacking pagination - [issue #85](https://github.com/holo-rea/holo-rea/issues/85) +- [x] economicEvents + - [ ] lacking pagination - [issue #85](https://github.com/holo-rea/holo-rea/issues/85) +- [x] economicResource + - [ ] `primaryAccountable` is not implemented - [issue #133](https://github.com/holo-rea/holo-rea/issues/133) + +__Has Minor Bug__ + + +__Has Fatal Bug__ +- [ ] agents (response always gives empty array, wrongly - [issue #210](https://github.com/holo-rea/holo-rea/issues/210)) + +__Not Yet Implemented__ +- [ ] proposals - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] satisfactions - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] fulfillments - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] intents - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] commitments - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] processes - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] productBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] productBatches - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) and [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] units - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] processSpecifications - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] resourceSpecifications - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] agreements - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] organization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] organizations - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] person - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] people - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationships - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationshipRoles - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) + +### Resolvers + +(https://www.apollographql.com/docs/apollo-server/data/resolvers/) +Connor todo \ No newline at end of file From 781ae1d4295cea6a38f02c51d5b17a3f7f058d98 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 24 Mar 2022 12:14:41 -0700 Subject: [PATCH 076/181] rename happs folder to dna_bundles --- .github/workflows/release.yml | 12 ++++---- .gitignore | 2 +- README.md | 2 +- _templates/init-zome/new-index-zome/index.js | 2 +- _templates/init-zome/new-zome/index.js | 2 +- bundles/full_suite/happ.yaml | 6 ++-- bundles/obs_and_agent/happ.yaml | 4 +-- completions.md | 28 +++++++++---------- {happs => dna_bundles}/agent/dna.yaml | 0 {happs => dna_bundles}/agreement/dna.yaml | 0 {happs => dna_bundles}/observation/dna.yaml | 0 {happs => dna_bundles}/planning/dna.yaml | 0 {happs => dna_bundles}/proposal/dna.yaml | 0 {happs => dna_bundles}/specification/dna.yaml | 0 docs/README.md | 2 +- docs/Workflow-automation.md | 4 +-- modules/vf-graphql-holochain/README.md | 2 +- package.json | 4 +-- scripts/clean-build.sh | 4 +-- scripts/package-dnas.sh | 4 +-- test/init.js | 12 ++++---- 21 files changed, 45 insertions(+), 45 deletions(-) rename {happs => dna_bundles}/agent/dna.yaml (100%) rename {happs => dna_bundles}/agreement/dna.yaml (100%) rename {happs => dna_bundles}/observation/dna.yaml (100%) rename {happs => dna_bundles}/planning/dna.yaml (100%) rename {happs => dna_bundles}/proposal/dna.yaml (100%) rename {happs => dna_bundles}/specification/dna.yaml (100%) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f62168638..db7596b14 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -79,9 +79,9 @@ jobs: run: | gh release upload "${GITHUB_REF#refs/tags/}" "webhapp/hrea.webhapp" --clobber gh release upload "${GITHUB_REF#refs/tags/}" "bundles/full_suite_release_template/hrea_suite.happ" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/agent/hrea_agent.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/agreement/hrea_agreement.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/observation/hrea_observation.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/planning/hrea_planning.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/proposal/hrea_proposal.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "happs/specification/hrea_specification.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/agent/hrea_agent.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/agreement/hrea_agreement.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/observation/hrea_observation.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/planning/hrea_planning.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/proposal/hrea_proposal.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/specification/hrea_specification.dna" --clobber diff --git a/.gitignore b/.gitignore index ab42bb90b..6cfce64eb 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,7 @@ node_modules/ # Rust build files /target/ # Holochain build files -/happs/**/*.dna +/dna_bundles/**/*.dna /bundles/**/.hc /bundles/**/*.happ /webhapp/**/*.webhapp diff --git a/README.md b/README.md index b3aa22951..8ccd4c8f9 100644 --- a/README.md +++ b/README.md @@ -138,7 +138,7 @@ There are a few sets of `*.yaml` configuration files used by Holochain in its bu These bundles are used by the project scripts to run the application locally from this repository. There are also [**`bundles_templates/`**](bundles_templates/) which are used by the Github releases process to build pre-packaged binaries for end-user installation into the [Holochain Launcher](https://github.com/holochain/launcher); in combination with the [**`webhapp/`**](webhapp/) configuration which also packages & associates a user interface. If you aren't developing hREA yourself this is a much easier way to setup the app— simply download the `*.webhapp` file from the [releases page](https://github.com/holo-rea/holo-rea/releases) and open it with the Holochain Launcher. -[**`happs/`**](happs/) configuration files group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" into Holochain DNAs. +[**`dna_bundles/`**](dna_bundles/) configuration files group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" into Holochain DNAs. DNAs are the highest-level units of functionality available in the system. One is available for each of the [modules in the hREA framework](https://github.com/holo-rea/ecosystem/wiki/Modules-in-the-HoloREA-framework). diff --git a/_templates/init-zome/new-index-zome/index.js b/_templates/init-zome/new-index-zome/index.js index 356e37ffd..8c9cfd018 100644 --- a/_templates/init-zome/new-index-zome/index.js +++ b/_templates/init-zome/new-index-zome/index.js @@ -5,7 +5,7 @@ module.exports = [ { type: 'input', name: 'dna_path', - message: 'Project-relative directory name of the destination DNA? (eg. `happs/observation`)', + message: 'Project-relative directory name of the destination DNA? (eg. `dna_bundles/observation`)', required: true, }, { type: 'input', diff --git a/_templates/init-zome/new-zome/index.js b/_templates/init-zome/new-zome/index.js index 6b040f920..505fd6d96 100644 --- a/_templates/init-zome/new-zome/index.js +++ b/_templates/init-zome/new-zome/index.js @@ -5,7 +5,7 @@ module.exports = [ { type: 'input', name: 'dna_path', - message: 'Project-relative directory name of the destination DNA? (eg. `happs/observation`)', + message: 'Project-relative directory name of the destination DNA? (eg. `dna_bundles/observation`)', required: true, }, { type: 'input', diff --git a/bundles/full_suite/happ.yaml b/bundles/full_suite/happ.yaml index 0fa45f978..94c962ae4 100644 --- a/bundles/full_suite/happ.yaml +++ b/bundles/full_suite/happ.yaml @@ -7,19 +7,19 @@ roles: strategy: create deferred: false dna: - bundled: "../../happs/observation/hrea_observation.dna" + bundled: "../../dna_bundles/observation/hrea_observation.dna" clone_limit: 0 - id: hrea_planning_1 provisioning: strategy: create deferred: false dna: - bundled: "../../happs/planning/hrea_planning.dna" + bundled: "../../dna_bundles/planning/hrea_planning.dna" clone_limit: 0 - id: hrea_agreement_1 provisioning: strategy: create deferred: false dna: - bundled: "../../happs/agreement/hrea_agreement.dna" + bundled: "../../dna_bundles/agreement/hrea_agreement.dna" clone_limit: 0 diff --git a/bundles/obs_and_agent/happ.yaml b/bundles/obs_and_agent/happ.yaml index 322695fe9..d68620ff0 100644 --- a/bundles/obs_and_agent/happ.yaml +++ b/bundles/obs_and_agent/happ.yaml @@ -7,12 +7,12 @@ roles: strategy: create deferred: false dna: - bundled: "../../happs/observation/hrea_observation.dna" + bundled: "../../dna_bundles/observation/hrea_observation.dna" clone_limit: 0 - id: hrea_agent_1 provisioning: strategy: create deferred: false dna: - bundled: "../../happs/agent/hrea_agent.dna" + bundled: "../../dna_bundles/agent/hrea_agent.dna" clone_limit: 0 diff --git a/completions.md b/completions.md index 8cc5dae3e..f1872f3cb 100644 --- a/completions.md +++ b/completions.md @@ -40,9 +40,9 @@ You may notice there is no specification of an Agent. This is because the Valuef | layer | RDF object | vf-schemas file | zome | hrea "module" or DNA | comments | | ------| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | K | [Scenario Definition](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L124) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L44) | - | - | | -| K | [Process Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L104) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L70) | [rea_process_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/happs/specification) | | -| K | [Resource Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L92) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L45) | [rea_resource_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_resource_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/happs/specification) | zome: Missing `resource_classified_as`, `default_unit_of_resource`. | -| K | [Action](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L32) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L19) | [rea_action](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_action/zome) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/happs/specification) | vf-schema: Missing `containedEffect`, `locationEffect`. zome: Same as vf-schema. | +| K | [Process Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L104) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L70) | [rea_process_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | | +| K | [Resource Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L92) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L45) | [rea_resource_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_resource_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | zome: Missing `resource_classified_as`, `default_unit_of_resource`. | +| K | [Action](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L32) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L19) | [rea_action](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_action/zome) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | vf-schema: Missing `containedEffect`, `locationEffect`. zome: Same as vf-schema. | | K | [Agent Relationship Role](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L74) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L126) | - | - | vf-schema: Missing `roleBehavior`. | | K | [Role Behavior](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L80) | - | - | - | vf-schema: This doesn't seem to be implemented yet. | | K | [Recipe Exchange](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L118) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L106) | - | - | | @@ -51,20 +51,20 @@ You may notice there is no specification of an Agent. This is because the Valuef | K | [Recipe Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L18) | - | - | | | P | [Scenario](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L16) | - | - | | | P | [Plan](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L133) | [plan](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/plan.gql#L16) | - | - | vf-schema: has extra fields `deletable` and `inScopeOf` are these for internal use? | -| P, O | [Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L196) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L155) | [rea_process](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | vf-schema: Missing `plannedIn` What is `unplannedEvents`? For the inverse relationships, do we want to group all `Intent`s, `Commitment`s, and `EconomicEvent`s together in the `inputs` and `outputs`? How is `track` and `trace` being handled? dna: Has extra `before` and `after` fields. `planned_within` is present, despite no implementation (because it just points to an `entryHash`.) This is often placed in with Observation layer, or on the line between Observation and Planning. | -| P | [Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L139) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L94) | [rea_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_intent) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/happs/planning) | vf-schema: Missing `provider`, `reciever`, `atLocation`. Has a `satisfiedBy` inverse map to `Satisfaction`'s `satisfies`. | -| P | [Proposed Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L151) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L49) | [rea_proposed_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_intent) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/happs/proposal) | | -| P | [Proposal](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L145) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L16) | [rea_proposal](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposal) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/happs/proposal) | vf-schema: Missing `eligibleLocation`. Has a `publishes` inverse map to `ProposedIntent`'s `publishedIn`. zome: same. | -| P | [Proposed To](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L157) | [proposal.agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/bridging/proposal.agent.gql#L20) | [rea_proposed_to](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_to) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/happs/proposal) | | -| P | [Commitment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L163) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L24) | [rea_commitment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_commitment) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/happs/planning) | vf-schema: Missing `atLocation` and `clauseOf`. Has `fullfilledBy` and `satisfies` inverse maps to `Fulfillment`'s`fulfill` and `Satisfation`'s `satisfiedBy`. zome: has `plan` instead of `planed_within`. | -| P | [Satisfaction](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L169) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L188) | [rea_satisfaction](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_satisfaction) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/happs/planning) | zome: allows `satisfied_by` to only be either one `EconomicEvent` or `Commitment`. Is this correct? | -| P | [Agreement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [agreement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agreement.gql#L19) | [rea_agreement](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_agreement) | [agreement](https://github.com/holo-rea/holo-rea/tree/sprout/happs/agreement) | | +| P, O | [Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L196) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L155) | [rea_process](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `plannedIn` What is `unplannedEvents`? For the inverse relationships, do we want to group all `Intent`s, `Commitment`s, and `EconomicEvent`s together in the `inputs` and `outputs`? How is `track` and `trace` being handled? dna: Has extra `before` and `after` fields. `planned_within` is present, despite no implementation (because it just points to an `entryHash`.) This is often placed in with Observation layer, or on the line between Observation and Planning. | +| P | [Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L139) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L94) | [rea_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_intent) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | vf-schema: Missing `provider`, `reciever`, `atLocation`. Has a `satisfiedBy` inverse map to `Satisfaction`'s `satisfies`. | +| P | [Proposed Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L151) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L49) | [rea_proposed_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_intent) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | | +| P | [Proposal](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L145) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L16) | [rea_proposal](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposal) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | vf-schema: Missing `eligibleLocation`. Has a `publishes` inverse map to `ProposedIntent`'s `publishedIn`. zome: same. | +| P | [Proposed To](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L157) | [proposal.agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/bridging/proposal.agent.gql#L20) | [rea_proposed_to](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_to) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | | +| P | [Commitment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L163) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L24) | [rea_commitment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_commitment) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | vf-schema: Missing `atLocation` and `clauseOf`. Has `fullfilledBy` and `satisfies` inverse maps to `Fulfillment`'s`fulfill` and `Satisfation`'s `satisfiedBy`. zome: has `plan` instead of `planed_within`. | +| P | [Satisfaction](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L169) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L188) | [rea_satisfaction](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_satisfaction) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | zome: allows `satisfied_by` to only be either one `EconomicEvent` or `Commitment`. Is this correct? | +| P | [Agreement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [agreement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agreement.gql#L19) | [rea_agreement](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_agreement) | [agreement](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/agreement) | | | P | [Claim](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L18) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | -| O | [Economic Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L190) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L83) | [rea_economic_resource](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_resource) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | vf-schema: Missing `currentLocation`. Has `contains`, `track`, `trace` maps as additions. | +| O | [Economic Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L190) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L83) | [rea_economic_resource](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_resource) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `currentLocation`. Has `contains`, `track`, `trace` maps as additions. | | O | [dfc:ProductBatch](http://www.virtual-assembly.org/DataFoodConsortium/BusinessOntology) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L139) | - | - | vf-schema: Missing links to `identifies`, but that probably doesn't matter for our use case. | -| O | [Economic Event](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L202) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L19) | [rea_economic_event](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_event) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | vf-schema: Missing `realizationOf`, `image`, `provider`, `receiver`, `atLocation`, `toLocation`. Has `track` and `trace` going to `ProductionFlowItem`s. zome: Missing `to_location`. | +| O | [Economic Event](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L202) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L19) | [rea_economic_event](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_event) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `realizationOf`, `image`, `provider`, `receiver`, `atLocation`, `toLocation`. Has `track` and `trace` going to `ProductionFlowItem`s. zome: Missing `to_location`. | | O | [Appreciation](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L232) | [appreciation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/appreciation.gql#L17) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | -| P, O | [Fulfillment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L214) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L166) | [rea_fulfillment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_fulfillment) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/happs/observation) | !! Discrepancy between "layer" and "vf-schema" files. FIXME | +| P, O | [Fulfillment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L214) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L166) | [rea_fulfillment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_fulfillment) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | !! Discrepancy between "layer" and "vf-schema" files. FIXME | | O | [Settlement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L226) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L61) | - | - | | | O | [Agent Relationship](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L208) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L104) | - | - | | diff --git a/happs/agent/dna.yaml b/dna_bundles/agent/dna.yaml similarity index 100% rename from happs/agent/dna.yaml rename to dna_bundles/agent/dna.yaml diff --git a/happs/agreement/dna.yaml b/dna_bundles/agreement/dna.yaml similarity index 100% rename from happs/agreement/dna.yaml rename to dna_bundles/agreement/dna.yaml diff --git a/happs/observation/dna.yaml b/dna_bundles/observation/dna.yaml similarity index 100% rename from happs/observation/dna.yaml rename to dna_bundles/observation/dna.yaml diff --git a/happs/planning/dna.yaml b/dna_bundles/planning/dna.yaml similarity index 100% rename from happs/planning/dna.yaml rename to dna_bundles/planning/dna.yaml diff --git a/happs/proposal/dna.yaml b/dna_bundles/proposal/dna.yaml similarity index 100% rename from happs/proposal/dna.yaml rename to dna_bundles/proposal/dna.yaml diff --git a/happs/specification/dna.yaml b/dna_bundles/specification/dna.yaml similarity index 100% rename from happs/specification/dna.yaml rename to dna_bundles/specification/dna.yaml diff --git a/docs/README.md b/docs/README.md index d0e7d3d22..46d508e9b 100644 --- a/docs/README.md +++ b/docs/README.md @@ -75,7 +75,7 @@ Scripts in this repository respond to the following env vars: Execution parameters: - `HOLOCHAIN_APP_PORT` sets the websocket port for the app interface when running the conductor in a development sandbox. See the `dht:conductor` script in `package.json`. -- `HOLOCHAIN_DNA_UTIL_PATH` works similarly to `TRYORAMA_HOLOCHAIN_PATH`, but for the `hc` binary that ships with Holochain. It is called to finalise packaging the DNA bundles in `happs/` and to run the dev environment conductor. +- `HOLOCHAIN_DNA_UTIL_PATH` works similarly to `TRYORAMA_HOLOCHAIN_PATH`, but for the `hc` binary that ships with Holochain. It is called to finalise packaging the DNA bundles in `dna_bundles/` and to run the dev environment conductor. Build parameters: diff --git a/docs/Workflow-automation.md b/docs/Workflow-automation.md index b1bef15e3..80798509f 100644 --- a/docs/Workflow-automation.md +++ b/docs/Workflow-automation.md @@ -26,13 +26,13 @@ Whenever you find yourself doing something repetitive, consider adding a Hygen t ## Creating new DNAs -1. `cd happs/` +1. `cd dna_bundles/` 2. `hc init ` scaffolds a new DNA folder named `NEW_DNA_NAME`. 3. Edit `app.json` in the newly created folder as appropriate. 4. Remove these generated files from the newly created directory: - `test/` (integration tests are all contained in the top-level `test` directory) - `.gitignore` (already taken care of via project-global ignore file) -5. Wire up a new `build` sub-command in the toplevel `package.json`; eg. `"build:dna_obs": "cd happs/observation && hc package"`. Do not forget to add the new build step to the base NPM `build` script. +5. Wire up a new `build` sub-command in the toplevel `package.json`; eg. `"build:dna_obs": "cd dna_bundles/observation && hc package"`. Do not forget to add the new build step to the base NPM `build` script. 6. Edit `conductor-config.toml` as appropriate to include instance configuration & bridging for any new DHTs to be loaded from this DNA in the local test environment. diff --git a/modules/vf-graphql-holochain/README.md b/modules/vf-graphql-holochain/README.md index 44e842403..e00af6afd 100644 --- a/modules/vf-graphql-holochain/README.md +++ b/modules/vf-graphql-holochain/README.md @@ -48,7 +48,7 @@ The `enabledVFModules` option, if specified, [controls the subset of ValueFlows ### Multiple collaboration spaces -The `dnaConfig` option allows the callee to specify custom DNA identifiers to bind GraphQL functions to. For each hREA module ID (see the directory names under `/happs` in this repository), a runtime `CellId` must be provided as an instance of that DNA to bind to. +The `dnaConfig` option allows the callee to specify custom DNA identifiers to bind GraphQL functions to. For each hREA module ID (see the directory names under `/dna_bundles` in this repository), a runtime `CellId` must be provided as an instance of that DNA to bind to. By targeting multiple sets of DNAs, multiple "collaboration spaces" can be initialised for a single client application. Several GraphQL APIs can be interacted with via the standard ValueFlows specification. User interfaces should make explicit the scope of data and destination networks to perform query and mutation operations against. diff --git a/package.json b/package.json index f4272c558..9381edb0c 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ "test:integration": "cd test && npm test", "clean": "npm-run-all --parallel clean:modules clean:build", "clean:modules": "scripts/clean-modules.sh", - "clean:build": "nix-shell --run hn-node-flush && nix-shell --run hn-rust-flush && rm happs/**/*.dna" + "clean:build": "nix-shell --run hn-node-flush && nix-shell --run hn-rust-flush && rm dna_bundles/**/*.dna" }, "watch": { "test:unit": { @@ -43,7 +43,7 @@ "test:integration": { "patterns": [ "test", - "happs", + "dna_bundles", "modules/vf-graphql-holochain/build", "modules/graphql-client/build" ], diff --git a/scripts/clean-build.sh b/scripts/clean-build.sh index 5a0d289cb..dcc8746c0 100755 --- a/scripts/clean-build.sh +++ b/scripts/clean-build.sh @@ -7,8 +7,8 @@ # ## -rm -Rf happs/**/dist -rm -Rf happs/**/zomes/**/code/target +rm -Rf dna_bundles/**/dist +rm -Rf dna_bundles/**/zomes/**/code/target # :IMPORTANT: after updating Holochain this can be needed to avoid unmet dependency errors cargo update diff --git a/scripts/package-dnas.sh b/scripts/package-dnas.sh index 05ef4064b..d48cefa42 100755 --- a/scripts/package-dnas.sh +++ b/scripts/package-dnas.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash # -# Runs the Holochain DNA bundler utility against all configurations in the `happs` dir +# Runs the Holochain DNA bundler utility against all configurations in the `dna_bundles` dir # # @package: Holo-REA # @since: 2021-02-09 @@ -18,7 +18,7 @@ if [ $RUN_WASM_OPT -ne "0" ]; then fi # compile DNAs by concatenating WASMs with properties -for DIR in happs/*; do +for DIR in dna_bundles/*; do if [[ -d "$DIR" ]]; then echo -e "\e[1mCompiling DNA in $DIR\e[0m" if "$UTIL" dna pack "$DIR" 2>/dev/null; then diff --git a/test/init.js b/test/init.js index 862c82190..43047547c 100644 --- a/test/init.js +++ b/test/init.js @@ -31,12 +31,12 @@ process.on('unhandledRejection', error => { // DNA loader, to be used with `buildTestScenario` when constructing DNAs for testing const getDNA = ((dnas) => (name) => (dnas[name]))({ - 'agent': path.resolve(__dirname, '../happs/agent/hrea_agent.dna'), - 'agreement': path.resolve(__dirname, '../happs/agreement/hrea_agreement.dna'), - 'observation': path.resolve(__dirname, '../happs/observation/hrea_observation.dna'), - 'planning': path.resolve(__dirname, '../happs/planning/hrea_planning.dna'), - 'proposal': path.resolve(__dirname, '../happs/proposal/hrea_proposal.dna'), - 'specification': path.resolve(__dirname, '../happs/specification/hrea_specification.dna'), + 'agent': path.resolve(__dirname, '../dna_bundles/agent/hrea_agent.dna'), + 'agreement': path.resolve(__dirname, '../dna_bundles/agreement/hrea_agreement.dna'), + 'observation': path.resolve(__dirname, '../dna_bundles/observation/hrea_observation.dna'), + 'planning': path.resolve(__dirname, '../dna_bundles/planning/hrea_planning.dna'), + 'proposal': path.resolve(__dirname, '../dna_bundles/proposal/hrea_proposal.dna'), + 'specification': path.resolve(__dirname, '../dna_bundles/specification/hrea_specification.dna'), }) /** From eab164b03d4cfcc57c09c1f4885fdf9a47b66290 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 14:12:14 +1000 Subject: [PATCH 077/181] fix missing Agreement module in Holochain Cell mappings, make all DNAs optional originally as part of #224 --- modules/vf-graphql-holochain/types.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index ea875517c..cf695ea98 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -17,11 +17,12 @@ import { Kind } from 'graphql/language' // Configuration object to allow specifying custom conductor DNA IDs to bind to. // Default is to use a DNA with the same ID as the mapping ID (ie. agent = "agent") export interface DNAIdMappings { - agent: CellId, - observation: CellId, - planning: CellId, - proposal: CellId, - specification: CellId, + agent?: CellId, + agreement?: CellId, + observation?: CellId, + planning?: CellId, + proposal?: CellId, + specification?: CellId, } export { CellId } From 65f4210dbce302275df469e8783fb1c90fd6742c Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 14:25:23 +1000 Subject: [PATCH 078/181] reorg bundle folders into single toplevel directory --- bundles/{ => app}/full_suite/happ.yaml | 0 bundles/{ => app}/obs_and_agent/happ.yaml | 0 {dna_bundles => bundles/dna}/agent/dna.yaml | 0 {dna_bundles => bundles/dna}/agreement/dna.yaml | 0 {dna_bundles => bundles/dna}/observation/dna.yaml | 0 {dna_bundles => bundles/dna}/planning/dna.yaml | 0 {dna_bundles => bundles/dna}/proposal/dna.yaml | 0 {dna_bundles => bundles/dna}/specification/dna.yaml | 0 {webhapp => bundles/web-app}/web-happ.yaml | 0 9 files changed, 0 insertions(+), 0 deletions(-) rename bundles/{ => app}/full_suite/happ.yaml (100%) rename bundles/{ => app}/obs_and_agent/happ.yaml (100%) rename {dna_bundles => bundles/dna}/agent/dna.yaml (100%) rename {dna_bundles => bundles/dna}/agreement/dna.yaml (100%) rename {dna_bundles => bundles/dna}/observation/dna.yaml (100%) rename {dna_bundles => bundles/dna}/planning/dna.yaml (100%) rename {dna_bundles => bundles/dna}/proposal/dna.yaml (100%) rename {dna_bundles => bundles/dna}/specification/dna.yaml (100%) rename {webhapp => bundles/web-app}/web-happ.yaml (100%) diff --git a/bundles/full_suite/happ.yaml b/bundles/app/full_suite/happ.yaml similarity index 100% rename from bundles/full_suite/happ.yaml rename to bundles/app/full_suite/happ.yaml diff --git a/bundles/obs_and_agent/happ.yaml b/bundles/app/obs_and_agent/happ.yaml similarity index 100% rename from bundles/obs_and_agent/happ.yaml rename to bundles/app/obs_and_agent/happ.yaml diff --git a/dna_bundles/agent/dna.yaml b/bundles/dna/agent/dna.yaml similarity index 100% rename from dna_bundles/agent/dna.yaml rename to bundles/dna/agent/dna.yaml diff --git a/dna_bundles/agreement/dna.yaml b/bundles/dna/agreement/dna.yaml similarity index 100% rename from dna_bundles/agreement/dna.yaml rename to bundles/dna/agreement/dna.yaml diff --git a/dna_bundles/observation/dna.yaml b/bundles/dna/observation/dna.yaml similarity index 100% rename from dna_bundles/observation/dna.yaml rename to bundles/dna/observation/dna.yaml diff --git a/dna_bundles/planning/dna.yaml b/bundles/dna/planning/dna.yaml similarity index 100% rename from dna_bundles/planning/dna.yaml rename to bundles/dna/planning/dna.yaml diff --git a/dna_bundles/proposal/dna.yaml b/bundles/dna/proposal/dna.yaml similarity index 100% rename from dna_bundles/proposal/dna.yaml rename to bundles/dna/proposal/dna.yaml diff --git a/dna_bundles/specification/dna.yaml b/bundles/dna/specification/dna.yaml similarity index 100% rename from dna_bundles/specification/dna.yaml rename to bundles/dna/specification/dna.yaml diff --git a/webhapp/web-happ.yaml b/bundles/web-app/web-happ.yaml similarity index 100% rename from webhapp/web-happ.yaml rename to bundles/web-app/web-happ.yaml From 4aa142de84405c6e9af1d49c149ed66819fe6ffa Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 14:45:24 +1000 Subject: [PATCH 079/181] update readmes for new paths --- README.md | 10 +++++++--- docs/README.md | 2 +- docs/Workflow-automation.md | 4 ++-- modules/vf-graphql-holochain/README.md | 2 +- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 8ccd4c8f9..74fc65a8d 100644 --- a/README.md +++ b/README.md @@ -134,11 +134,15 @@ For more information on usage and options, see the [GraphQL Client](modules/grap There are a few sets of `*.yaml` configuration files used by Holochain in its build processes. -[**`bundles/`**](bundles/) contains configuration files which group Holochain 'DNA' modules into 'hApp bundles'. A *hApp bundle* contains all backend components accessible by a single UI application. +[**`bundles/`**](bundles/) contains configuration files for: -These bundles are used by the project scripts to run the application locally from this repository. There are also [**`bundles_templates/`**](bundles_templates/) which are used by the Github releases process to build pre-packaged binaries for end-user installation into the [Holochain Launcher](https://github.com/holochain/launcher); in combination with the [**`webhapp/`**](webhapp/) configuration which also packages & associates a user interface. If you aren't developing hREA yourself this is a much easier way to setup the app— simply download the `*.webhapp` file from the [releases page](https://github.com/holo-rea/holo-rea/releases) and open it with the Holochain Launcher. +- `dna`s which group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" (compiled WASM files) into Holochain DNAs. +- `app`s which group Holochain 'DNA' modules into 'hApp bundles'. A *hApp bundle* contains all backend components accessible by a single UI application; and +- `web-app`s which bind a 'hApp bundle' with a (zipped) JavaScript single-page web application that talks to the Holochain backend. -[**`dna_bundles/`**](dna_bundles/) configuration files group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" into Holochain DNAs. +These bundles are used by the project scripts to run the application locally from this repository. There are also [**`bundles_templates/`**](bundles_templates/) which are used by the Github releases process to build pre-packaged binaries for end-user installation into the [Holochain Launcher](https://github.com/holochain/launcher); in combination with a bundled `web-app` artifact. + +If you aren't developing hREA yourself this is a much easier way to setup the app— simply download the `*.webhapp` file from the [releases page](https://github.com/holo-rea/holo-rea/releases) and open it with the Holochain Launcher. DNAs are the highest-level units of functionality available in the system. One is available for each of the [modules in the hREA framework](https://github.com/holo-rea/ecosystem/wiki/Modules-in-the-HoloREA-framework). diff --git a/docs/README.md b/docs/README.md index 46d508e9b..184a45357 100644 --- a/docs/README.md +++ b/docs/README.md @@ -75,7 +75,7 @@ Scripts in this repository respond to the following env vars: Execution parameters: - `HOLOCHAIN_APP_PORT` sets the websocket port for the app interface when running the conductor in a development sandbox. See the `dht:conductor` script in `package.json`. -- `HOLOCHAIN_DNA_UTIL_PATH` works similarly to `TRYORAMA_HOLOCHAIN_PATH`, but for the `hc` binary that ships with Holochain. It is called to finalise packaging the DNA bundles in `dna_bundles/` and to run the dev environment conductor. +- `HOLOCHAIN_DNA_UTIL_PATH` works similarly to `TRYORAMA_HOLOCHAIN_PATH`, but for the `hc` binary that ships with Holochain. It is called to finalise packaging the bundles in `bundles/` and to run the dev environment conductor. Build parameters: diff --git a/docs/Workflow-automation.md b/docs/Workflow-automation.md index 80798509f..74c0f5fe9 100644 --- a/docs/Workflow-automation.md +++ b/docs/Workflow-automation.md @@ -26,13 +26,13 @@ Whenever you find yourself doing something repetitive, consider adding a Hygen t ## Creating new DNAs -1. `cd dna_bundles/` +1. `cd bundles/dna/` 2. `hc init ` scaffolds a new DNA folder named `NEW_DNA_NAME`. 3. Edit `app.json` in the newly created folder as appropriate. 4. Remove these generated files from the newly created directory: - `test/` (integration tests are all contained in the top-level `test` directory) - `.gitignore` (already taken care of via project-global ignore file) -5. Wire up a new `build` sub-command in the toplevel `package.json`; eg. `"build:dna_obs": "cd dna_bundles/observation && hc package"`. Do not forget to add the new build step to the base NPM `build` script. +5. Wire up a new `build` sub-command in the toplevel `package.json`; eg. `"build:dna_obs": "cd bundles/dna/observation && hc package"`. Do not forget to add the new build step to the base NPM `build` script. 6. Edit `conductor-config.toml` as appropriate to include instance configuration & bridging for any new DHTs to be loaded from this DNA in the local test environment. diff --git a/modules/vf-graphql-holochain/README.md b/modules/vf-graphql-holochain/README.md index e00af6afd..4108dc372 100644 --- a/modules/vf-graphql-holochain/README.md +++ b/modules/vf-graphql-holochain/README.md @@ -48,7 +48,7 @@ The `enabledVFModules` option, if specified, [controls the subset of ValueFlows ### Multiple collaboration spaces -The `dnaConfig` option allows the callee to specify custom DNA identifiers to bind GraphQL functions to. For each hREA module ID (see the directory names under `/dna_bundles` in this repository), a runtime `CellId` must be provided as an instance of that DNA to bind to. +The `dnaConfig` option allows the callee to specify custom DNA identifiers to bind GraphQL functions to. For each hREA module ID (see the directory names under `/bundles/dna` in this repository), a runtime `CellId` must be provided as an instance of that DNA to bind to. By targeting multiple sets of DNAs, multiple "collaboration spaces" can be initialised for a single client application. Several GraphQL APIs can be interacted with via the standard ValueFlows specification. User interfaces should make explicit the scope of data and destination networks to perform query and mutation operations against. From 0552be11c450dbee742783b51982912836e8ff68 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 14:46:26 +1000 Subject: [PATCH 080/181] update ignore file --- .gitignore | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 6cfce64eb..27f654bd1 100644 --- a/.gitignore +++ b/.gitignore @@ -10,10 +10,9 @@ node_modules/ # Rust build files /target/ # Holochain build files -/dna_bundles/**/*.dna -/bundles/**/.hc -/bundles/**/*.happ -/webhapp/**/*.webhapp +/bundles/dna/**/*.dna +/bundles/app/**/*.happ +/webhapp/web-app/*.webhapp # Manifest Files Under Automation /bundles/*_template/* From 011f28772da4e36cf0359d276553d8eb4ad06bd5 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 14:57:59 +1000 Subject: [PATCH 081/181] update paths in all configs, helper scripts and test harness --- .github/workflows/release.yml | 22 ++++++++++---------- _templates/init-zome/new-index-zome/index.js | 2 +- _templates/init-zome/new-zome/index.js | 2 +- bundles/app/full_suite/happ.yaml | 6 +++--- bundles/app/obs_and_agent/happ.yaml | 4 ++-- bundles/dna/agent/dna.yaml | 2 +- bundles/dna/agreement/dna.yaml | 6 +++--- bundles/dna/observation/dna.yaml | 22 ++++++++++---------- bundles/dna/planning/dna.yaml | 16 +++++++------- bundles/dna/proposal/dna.yaml | 12 +++++------ bundles/dna/specification/dna.yaml | 12 +++++------ bundles/web-app/web-happ.yaml | 4 ++-- package.json | 4 ++-- scripts/package-dnas.sh | 4 ++-- scripts/package-webhapp.sh | 2 +- scripts/run-dev-conductor.sh | 2 +- test/init.js | 12 +++++------ 17 files changed, 67 insertions(+), 67 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index db7596b14..83179281b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -65,10 +65,10 @@ jobs: run: nix-shell --command "pnpm install --no-frozen-lockfile" - name: Set up release bundle from template run: | - cp -a bundles_templates/* bundles/ + cp -a bundles_templates/* bundles/app/ RELEASE="${GITHUB_REF#refs/tags/}" - sed -i "s//https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}/g" bundles/full_suite_release_template/happ.yaml - cat bundles/full_suite_release_template/happ.yaml + sed -i "s//https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}/g" bundles/app/full_suite_release_template/happ.yaml + cat bundles/app/full_suite_release_template/happ.yaml - name: Build WASM, dnas, happs run: nix-shell --run 'pnpm run build:crates' - name: Build explorer UI and webhapp package @@ -77,11 +77,11 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - gh release upload "${GITHUB_REF#refs/tags/}" "webhapp/hrea.webhapp" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "bundles/full_suite_release_template/hrea_suite.happ" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/agent/hrea_agent.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/agreement/hrea_agreement.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/observation/hrea_observation.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/planning/hrea_planning.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/proposal/hrea_proposal.dna" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "dna_bundles/specification/hrea_specification.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/web-app/hrea.webhapp" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/app/full_suite_release_template/hrea_suite.happ" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/agent/hrea_agent.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/agreement/hrea_agreement.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/observation/hrea_observation.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/planning/hrea_planning.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/proposal/hrea_proposal.dna" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/specification/hrea_specification.dna" --clobber diff --git a/_templates/init-zome/new-index-zome/index.js b/_templates/init-zome/new-index-zome/index.js index 8c9cfd018..1354ed580 100644 --- a/_templates/init-zome/new-index-zome/index.js +++ b/_templates/init-zome/new-index-zome/index.js @@ -5,7 +5,7 @@ module.exports = [ { type: 'input', name: 'dna_path', - message: 'Project-relative directory name of the destination DNA? (eg. `dna_bundles/observation`)', + message: 'Project-relative directory name of the destination DNA? (eg. `bundles/dna/observation`)', required: true, }, { type: 'input', diff --git a/_templates/init-zome/new-zome/index.js b/_templates/init-zome/new-zome/index.js index 505fd6d96..a7111c72c 100644 --- a/_templates/init-zome/new-zome/index.js +++ b/_templates/init-zome/new-zome/index.js @@ -5,7 +5,7 @@ module.exports = [ { type: 'input', name: 'dna_path', - message: 'Project-relative directory name of the destination DNA? (eg. `dna_bundles/observation`)', + message: 'Project-relative directory name of the destination DNA? (eg. `bundles/dna/observation`)', required: true, }, { type: 'input', diff --git a/bundles/app/full_suite/happ.yaml b/bundles/app/full_suite/happ.yaml index 94c962ae4..0d6204116 100644 --- a/bundles/app/full_suite/happ.yaml +++ b/bundles/app/full_suite/happ.yaml @@ -7,19 +7,19 @@ roles: strategy: create deferred: false dna: - bundled: "../../dna_bundles/observation/hrea_observation.dna" + bundled: "../../dna/observation/hrea_observation.dna" clone_limit: 0 - id: hrea_planning_1 provisioning: strategy: create deferred: false dna: - bundled: "../../dna_bundles/planning/hrea_planning.dna" + bundled: "../../dna/planning/hrea_planning.dna" clone_limit: 0 - id: hrea_agreement_1 provisioning: strategy: create deferred: false dna: - bundled: "../../dna_bundles/agreement/hrea_agreement.dna" + bundled: "../../dna/agreement/hrea_agreement.dna" clone_limit: 0 diff --git a/bundles/app/obs_and_agent/happ.yaml b/bundles/app/obs_and_agent/happ.yaml index d68620ff0..3db544bc8 100644 --- a/bundles/app/obs_and_agent/happ.yaml +++ b/bundles/app/obs_and_agent/happ.yaml @@ -7,12 +7,12 @@ roles: strategy: create deferred: false dna: - bundled: "../../dna_bundles/observation/hrea_observation.dna" + bundled: "../../dna/observation/hrea_observation.dna" clone_limit: 0 - id: hrea_agent_1 provisioning: strategy: create deferred: false dna: - bundled: "../../dna_bundles/agent/hrea_agent.dna" + bundled: "../../dna/agent/hrea_agent.dna" clone_limit: 0 diff --git a/bundles/dna/agent/dna.yaml b/bundles/dna/agent/dna.yaml index ced7b289c..f33a4f7f8 100644 --- a/bundles/dna/agent/dna.yaml +++ b/bundles/dna/agent/dna.yaml @@ -4,4 +4,4 @@ uuid: "" properties: null zomes: - name: agent_registration - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_agent_registration_hrea.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_agent_registration_hrea.wasm" diff --git a/bundles/dna/agreement/dna.yaml b/bundles/dna/agreement/dna.yaml index b698fe638..5c89e5fc3 100644 --- a/bundles/dna/agreement/dna.yaml +++ b/bundles/dna/agreement/dna.yaml @@ -13,10 +13,10 @@ zomes: # application zomes - name: agreement - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement.wasm" - name: agreement_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement_index_agreement.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement_index_agreement.wasm" # utility zomes - name: remote_auth - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/dna/observation/dna.yaml b/bundles/dna/observation/dna.yaml index 7bbaaaea1..b83077399 100644 --- a/bundles/dna/observation/dna.yaml +++ b/bundles/dna/observation/dna.yaml @@ -52,27 +52,27 @@ properties: zomes: # application zomes - name: economic_event - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event.wasm" - name: economic_resource - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource.wasm" - name: process - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_process.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_process.wasm" - name: fulfillment - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_observation.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_observation.wasm" - name: satisfaction - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_observation.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_observation.wasm" - name: economic_event_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event_index_observation.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event_index_observation.wasm" - name: economic_resource_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource_index_observation.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource_index_observation.wasm" - name: process_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_index_observation.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_index_observation.wasm" - name: fulfillment_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_observation.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_observation.wasm" - name: satisfaction_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_observation.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_observation.wasm" # utility zomes - name: remote_auth - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/dna/planning/dna.yaml b/bundles/dna/planning/dna.yaml index 92dfe65bf..c9229cfd2 100644 --- a/bundles/dna/planning/dna.yaml +++ b/bundles/dna/planning/dna.yaml @@ -30,19 +30,19 @@ zomes: # application zomes - name: commitment - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment.wasm" - name: intent - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent.wasm" - name: fulfillment - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_planning.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_planning.wasm" - name: satisfaction - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_planning.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_planning.wasm" - name: commitment_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment_index_planning.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment_index_planning.wasm" - name: intent_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent_index_planning.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent_index_planning.wasm" - name: fulfillment_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_planning.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_planning.wasm" - name: satisfaction_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_planning.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_planning.wasm" diff --git a/bundles/dna/proposal/dna.yaml b/bundles/dna/proposal/dna.yaml index 1f57c32b6..0df42a5c6 100644 --- a/bundles/dna/proposal/dna.yaml +++ b/bundles/dna/proposal/dna.yaml @@ -8,14 +8,14 @@ properties: record_storage_zome: proposal zomes: - name: proposal - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal.wasm" - name: proposal_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal_index_proposal.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal_index_proposal.wasm" - name: proposed_intent - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent.wasm" - name: proposed_intent_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent_index_proposal.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent_index_proposal.wasm" - name: proposed_to - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to.wasm" - name: proposed_to_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to_index_proposal.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to_index_proposal.wasm" diff --git a/bundles/dna/specification/dna.yaml b/bundles/dna/specification/dna.yaml index 85967edbd..9f077af8b 100644 --- a/bundles/dna/specification/dna.yaml +++ b/bundles/dna/specification/dna.yaml @@ -13,16 +13,16 @@ properties: zomes: # application zomes - name: action - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_actions.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_actions.wasm" - name: process_specification - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_specification.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_specification.wasm" - name: resource_specification - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification.wasm" - name: resource_specification_index - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification_index_specification.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification_index_specification.wasm" - name: unit - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_rea_unit.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_unit.wasm" # utility zomes - name: remote_auth - bundled: "../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" + bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/web-app/web-happ.yaml b/bundles/web-app/web-happ.yaml index 74ad3f08c..0ae96fc30 100644 --- a/bundles/web-app/web-happ.yaml +++ b/bundles/web-app/web-happ.yaml @@ -2,6 +2,6 @@ manifest_version: "1" name: hrea ui: - bundled: "../apps/holorea-graphql-explorer/ui.zip" + bundled: "../../apps/holorea-graphql-explorer/ui.zip" happ_manifest: - bundled: "../bundles/full_suite_release_template/hrea_suite.happ" + bundled: "../app/full_suite_release_template/hrea_suite.happ" diff --git a/package.json b/package.json index 9381edb0c..27b8888ca 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,7 @@ "test:integration": { "patterns": [ "test", - "dna_bundles", + "bundles/dna", "modules/vf-graphql-holochain/build", "modules/graphql-client/build" ], @@ -51,7 +51,7 @@ }, "dht": { "patterns": [ - "bundles" + "bundles/app" ], "extensions": ".happ" } diff --git a/scripts/package-dnas.sh b/scripts/package-dnas.sh index d48cefa42..0efb09339 100755 --- a/scripts/package-dnas.sh +++ b/scripts/package-dnas.sh @@ -18,7 +18,7 @@ if [ $RUN_WASM_OPT -ne "0" ]; then fi # compile DNAs by concatenating WASMs with properties -for DIR in dna_bundles/*; do +for DIR in bundles/dna/*; do if [[ -d "$DIR" ]]; then echo -e "\e[1mCompiling DNA in $DIR\e[0m" if "$UTIL" dna pack "$DIR" 2>/dev/null; then @@ -30,7 +30,7 @@ for DIR in dna_bundles/*; do done # compile hApp bundles by concatenating DNAs and specifying any config -for DIR in bundles/*; do +for DIR in bundles/app/*; do if [[ -d "$DIR" ]]; then echo -e "\e[1mBundling hApp in $DIR\e[0m" if "$UTIL" app pack "$DIR" 2>/dev/null; then diff --git a/scripts/package-webhapp.sh b/scripts/package-webhapp.sh index 172b6a040..84582c074 100755 --- a/scripts/package-webhapp.sh +++ b/scripts/package-webhapp.sh @@ -10,7 +10,7 @@ UTIL="${HOLOCHAIN_DNA_UTIL_PATH:-hc}" echo -e "\e[1mPacking webhapp\e[0m" -if "$UTIL" web-app pack webhapp 2>/dev/null; then +if "$UTIL" web-app pack bundles/web-app 2>/dev/null; then echo -e "\e[1;32m packing succeeded.\e[0m" else echo -e "\e[1;31m [FAIL]\e[0m" diff --git a/scripts/run-dev-conductor.sh b/scripts/run-dev-conductor.sh index 87b0cb0bc..360929200 100755 --- a/scripts/run-dev-conductor.sh +++ b/scripts/run-dev-conductor.sh @@ -16,5 +16,5 @@ APP="${HOLOCHAIN_APP_PORT:-4000}" "$UTIL" s clean "$UTIL" s create -n 1 -d hrea_tester network quic -"$UTIL" s call install-app-bundle ./bundles/obs_and_agent/hrea_obs_agent.happ +"$UTIL" s call install-app-bundle ./bundles/app/obs_and_agent/hrea_obs_agent.happ "$UTIL" s run --all -p $APP diff --git a/test/init.js b/test/init.js index 43047547c..75f2241fa 100644 --- a/test/init.js +++ b/test/init.js @@ -31,12 +31,12 @@ process.on('unhandledRejection', error => { // DNA loader, to be used with `buildTestScenario` when constructing DNAs for testing const getDNA = ((dnas) => (name) => (dnas[name]))({ - 'agent': path.resolve(__dirname, '../dna_bundles/agent/hrea_agent.dna'), - 'agreement': path.resolve(__dirname, '../dna_bundles/agreement/hrea_agreement.dna'), - 'observation': path.resolve(__dirname, '../dna_bundles/observation/hrea_observation.dna'), - 'planning': path.resolve(__dirname, '../dna_bundles/planning/hrea_planning.dna'), - 'proposal': path.resolve(__dirname, '../dna_bundles/proposal/hrea_proposal.dna'), - 'specification': path.resolve(__dirname, '../dna_bundles/specification/hrea_specification.dna'), + 'agent': path.resolve(__dirname, '../bundles/dna/agent/hrea_agent.dna'), + 'agreement': path.resolve(__dirname, '../bundles/dna/agreement/hrea_agreement.dna'), + 'observation': path.resolve(__dirname, '../bundles/dna/observation/hrea_observation.dna'), + 'planning': path.resolve(__dirname, '../bundles/dna/planning/hrea_planning.dna'), + 'proposal': path.resolve(__dirname, '../bundles/dna/proposal/hrea_proposal.dna'), + 'specification': path.resolve(__dirname, '../bundles/dna/specification/hrea_specification.dna'), }) /** From a40b1995cac5fc2c1ffcfb2d989a0b99bde1bb42 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 14:58:31 +1000 Subject: [PATCH 082/181] clean up build clean command --- package.json | 2 +- scripts/clean-build.sh | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 27b8888ca..2e0d43156 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ "test:integration": "cd test && npm test", "clean": "npm-run-all --parallel clean:modules clean:build", "clean:modules": "scripts/clean-modules.sh", - "clean:build": "nix-shell --run hn-node-flush && nix-shell --run hn-rust-flush && rm dna_bundles/**/*.dna" + "clean:build": "scripts/clean-build.sh" }, "watch": { "test:unit": { diff --git a/scripts/clean-build.sh b/scripts/clean-build.sh index dcc8746c0..352a6c543 100755 --- a/scripts/clean-build.sh +++ b/scripts/clean-build.sh @@ -7,8 +7,9 @@ # ## -rm -Rf dna_bundles/**/dist -rm -Rf dna_bundles/**/zomes/**/code/target +nix-shell --run hn-node-flush +nix-shell --run hn-rust-flush -# :IMPORTANT: after updating Holochain this can be needed to avoid unmet dependency errors -cargo update +rm bundles/dna/**/*.dna +rm bundles/app/**/*.happ +rm bundles/web-app/*.webhapp From f71eb2184c2de2977172cce8e99465786e7acf80 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 16:58:55 +1000 Subject: [PATCH 083/181] move DNA bundle files in prep for templating --- bundles/{dna => dna_templates}/agent/dna.yaml | 0 bundles/{dna => dna_templates}/agreement/dna.yaml | 0 bundles/{dna => dna_templates}/observation/dna.yaml | 0 bundles/{dna => dna_templates}/planning/dna.yaml | 0 bundles/{dna => dna_templates}/proposal/dna.yaml | 0 bundles/{dna => dna_templates}/specification/dna.yaml | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename bundles/{dna => dna_templates}/agent/dna.yaml (100%) rename bundles/{dna => dna_templates}/agreement/dna.yaml (100%) rename bundles/{dna => dna_templates}/observation/dna.yaml (100%) rename bundles/{dna => dna_templates}/planning/dna.yaml (100%) rename bundles/{dna => dna_templates}/proposal/dna.yaml (100%) rename bundles/{dna => dna_templates}/specification/dna.yaml (100%) diff --git a/bundles/dna/agent/dna.yaml b/bundles/dna_templates/agent/dna.yaml similarity index 100% rename from bundles/dna/agent/dna.yaml rename to bundles/dna_templates/agent/dna.yaml diff --git a/bundles/dna/agreement/dna.yaml b/bundles/dna_templates/agreement/dna.yaml similarity index 100% rename from bundles/dna/agreement/dna.yaml rename to bundles/dna_templates/agreement/dna.yaml diff --git a/bundles/dna/observation/dna.yaml b/bundles/dna_templates/observation/dna.yaml similarity index 100% rename from bundles/dna/observation/dna.yaml rename to bundles/dna_templates/observation/dna.yaml diff --git a/bundles/dna/planning/dna.yaml b/bundles/dna_templates/planning/dna.yaml similarity index 100% rename from bundles/dna/planning/dna.yaml rename to bundles/dna_templates/planning/dna.yaml diff --git a/bundles/dna/proposal/dna.yaml b/bundles/dna_templates/proposal/dna.yaml similarity index 100% rename from bundles/dna/proposal/dna.yaml rename to bundles/dna_templates/proposal/dna.yaml diff --git a/bundles/dna/specification/dna.yaml b/bundles/dna_templates/specification/dna.yaml similarity index 100% rename from bundles/dna/specification/dna.yaml rename to bundles/dna_templates/specification/dna.yaml From ecb020478a813908e75f397aae63cb412ca2e027 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 18:24:38 +1000 Subject: [PATCH 084/181] move app bundle template, use single template & delete old dev-specific config --- bundles/app/full_suite/happ.yaml | 25 ------------------- bundles/app/obs_and_agent/happ.yaml | 18 ------------- .../app_templates/full_suite}/happ.yaml | 0 3 files changed, 43 deletions(-) delete mode 100644 bundles/app/full_suite/happ.yaml delete mode 100644 bundles/app/obs_and_agent/happ.yaml rename {bundles_templates/full_suite_release_template => bundles/app_templates/full_suite}/happ.yaml (100%) diff --git a/bundles/app/full_suite/happ.yaml b/bundles/app/full_suite/happ.yaml deleted file mode 100644 index 0d6204116..000000000 --- a/bundles/app/full_suite/happ.yaml +++ /dev/null @@ -1,25 +0,0 @@ -manifest_version: "1" -name: hrea_suite -description: Complete configuration of all modules in the hREA suite. -roles: - - id: hrea_observation_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../dna/observation/hrea_observation.dna" - clone_limit: 0 - - id: hrea_planning_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../dna/planning/hrea_planning.dna" - clone_limit: 0 - - id: hrea_agreement_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../dna/agreement/hrea_agreement.dna" - clone_limit: 0 diff --git a/bundles/app/obs_and_agent/happ.yaml b/bundles/app/obs_and_agent/happ.yaml deleted file mode 100644 index 3db544bc8..000000000 --- a/bundles/app/obs_and_agent/happ.yaml +++ /dev/null @@ -1,18 +0,0 @@ -manifest_version: "1" -name: hrea_obs_agent -description: Observation and agent modules (eg. anonymized public supply chain) -roles: - - id: hrea_observation_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../dna/observation/hrea_observation.dna" - clone_limit: 0 - - id: hrea_agent_1 - provisioning: - strategy: create - deferred: false - dna: - bundled: "../../dna/agent/hrea_agent.dna" - clone_limit: 0 diff --git a/bundles_templates/full_suite_release_template/happ.yaml b/bundles/app_templates/full_suite/happ.yaml similarity index 100% rename from bundles_templates/full_suite_release_template/happ.yaml rename to bundles/app_templates/full_suite/happ.yaml From 6a1081dc27e774faaf15dba0a162250f0ae27b7e Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 18:25:20 +1000 Subject: [PATCH 085/181] change HC app ID to full suite now the sandbox can support it --- apps/holorea-graphql-explorer/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/holorea-graphql-explorer/package.json b/apps/holorea-graphql-explorer/package.json index 6c6784894..d9df7e435 100644 --- a/apps/holorea-graphql-explorer/package.json +++ b/apps/holorea-graphql-explorer/package.json @@ -24,7 +24,7 @@ "rimraf": "^3.0.2" }, "scripts": { - "start": "REACT_APP_HC_APP_ID='hrea_obs_agent' REACT_APP_HC_CONN_URL='ws://localhost:4000' BROWSER=none react-scripts start", + "start": "REACT_APP_HC_APP_ID='hrea_suite' REACT_APP_HC_CONN_URL='ws://localhost:4000' BROWSER=none react-scripts start", "build": "rimraf ui.zip && rimraf build && react-scripts build && cd ./build && bestzip ../ui.zip * ", "test": "react-scripts test", "eject": "react-scripts eject" From 529f036c14282e77a05015ad01554c8b8ddda620 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 18:26:41 +1000 Subject: [PATCH 086/181] update manifest files & build scripts to allow split dev/release config using same template files --- bundles/app_templates/full_suite/happ.yaml | 12 ++++---- bundles/dna_templates/agent/dna.yaml | 2 +- bundles/dna_templates/agreement/dna.yaml | 6 ++-- bundles/dna_templates/observation/dna.yaml | 22 +++++++------- bundles/dna_templates/planning/dna.yaml | 16 +++++----- bundles/dna_templates/proposal/dna.yaml | 12 ++++---- bundles/dna_templates/specification/dna.yaml | 12 ++++---- bundles/web-app/web-happ.yaml | 2 +- package.json | 6 ++-- scripts/clean-build.sh | 4 +-- scripts/package-dnas.sh | 32 +++++++++++++++++++- scripts/run-dev-conductor.sh | 2 +- 12 files changed, 80 insertions(+), 48 deletions(-) diff --git a/bundles/app_templates/full_suite/happ.yaml b/bundles/app_templates/full_suite/happ.yaml index d05f52ffe..5e4dcc279 100644 --- a/bundles/app_templates/full_suite/happ.yaml +++ b/bundles/app_templates/full_suite/happ.yaml @@ -7,40 +7,40 @@ roles: strategy: create deferred: false dna: - url: "/hrea_specification.dna" + bundled: "/specification/hrea_specification.dna" clone_limit: 0 - id: hrea_observation_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_observation.dna" + bundled: "/observation/hrea_observation.dna" clone_limit: 0 - id: hrea_planning_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_planning.dna" + bundled: "/planning/hrea_planning.dna" clone_limit: 0 - id: hrea_agreement_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_agreement.dna" + bundled: "/agreement/hrea_agreement.dna" clone_limit: 0 - id: hrea_proposal_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_proposal.dna" + bundled: "/proposal/hrea_proposal.dna" clone_limit: 0 - id: hrea_agent_1 provisioning: strategy: create deferred: false dna: - url: "/hrea_agent.dna" + bundled: "/agent/hrea_agent.dna" clone_limit: 0 diff --git a/bundles/dna_templates/agent/dna.yaml b/bundles/dna_templates/agent/dna.yaml index f33a4f7f8..58d9b5007 100644 --- a/bundles/dna_templates/agent/dna.yaml +++ b/bundles/dna_templates/agent/dna.yaml @@ -4,4 +4,4 @@ uuid: "" properties: null zomes: - name: agent_registration - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_agent_registration_hrea.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_agent_registration_hrea.wasm" diff --git a/bundles/dna_templates/agreement/dna.yaml b/bundles/dna_templates/agreement/dna.yaml index 5c89e5fc3..dbba050a5 100644 --- a/bundles/dna_templates/agreement/dna.yaml +++ b/bundles/dna_templates/agreement/dna.yaml @@ -13,10 +13,10 @@ zomes: # application zomes - name: agreement - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_agreement.wasm" - name: agreement_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_agreement_index_agreement.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_agreement_index_agreement.wasm" # utility zomes - name: remote_auth - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/dna_templates/observation/dna.yaml b/bundles/dna_templates/observation/dna.yaml index b83077399..24c993902 100644 --- a/bundles/dna_templates/observation/dna.yaml +++ b/bundles/dna_templates/observation/dna.yaml @@ -52,27 +52,27 @@ properties: zomes: # application zomes - name: economic_event - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event.wasm" - name: economic_resource - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource.wasm" - name: process - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_process.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_process.wasm" - name: fulfillment - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_observation.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_observation.wasm" - name: satisfaction - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_observation.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_observation.wasm" - name: economic_event_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event_index_observation.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_event_index_observation.wasm" - name: economic_resource_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource_index_observation.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_economic_resource_index_observation.wasm" - name: process_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_index_observation.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_process_index_observation.wasm" - name: fulfillment_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_observation.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_observation.wasm" - name: satisfaction_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_observation.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_observation.wasm" # utility zomes - name: remote_auth - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/dna_templates/planning/dna.yaml b/bundles/dna_templates/planning/dna.yaml index c9229cfd2..6568fc2a6 100644 --- a/bundles/dna_templates/planning/dna.yaml +++ b/bundles/dna_templates/planning/dna.yaml @@ -30,19 +30,19 @@ zomes: # application zomes - name: commitment - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_commitment.wasm" - name: intent - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_intent.wasm" - name: fulfillment - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_planning.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_planning.wasm" - name: satisfaction - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_planning.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_planning.wasm" - name: commitment_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_commitment_index_planning.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_commitment_index_planning.wasm" - name: intent_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_intent_index_planning.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_intent_index_planning.wasm" - name: fulfillment_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_planning.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_planning.wasm" - name: satisfaction_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_planning.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_planning.wasm" diff --git a/bundles/dna_templates/proposal/dna.yaml b/bundles/dna_templates/proposal/dna.yaml index 0df42a5c6..a242cfec3 100644 --- a/bundles/dna_templates/proposal/dna.yaml +++ b/bundles/dna_templates/proposal/dna.yaml @@ -8,14 +8,14 @@ properties: record_storage_zome: proposal zomes: - name: proposal - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposal.wasm" - name: proposal_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposal_index_proposal.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposal_index_proposal.wasm" - name: proposed_intent - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent.wasm" - name: proposed_intent_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent_index_proposal.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_intent_index_proposal.wasm" - name: proposed_to - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to.wasm" - name: proposed_to_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to_index_proposal.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposed_to_index_proposal.wasm" diff --git a/bundles/dna_templates/specification/dna.yaml b/bundles/dna_templates/specification/dna.yaml index 9f077af8b..3f15dda3c 100644 --- a/bundles/dna_templates/specification/dna.yaml +++ b/bundles/dna_templates/specification/dna.yaml @@ -13,16 +13,16 @@ properties: zomes: # application zomes - name: action - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_actions.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_actions.wasm" - name: process_specification - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_process_specification.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_process_specification.wasm" - name: resource_specification - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification.wasm" - name: resource_specification_index - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification_index_specification.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_resource_specification_index_specification.wasm" - name: unit - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_rea_unit.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_unit.wasm" # utility zomes - name: remote_auth - bundled: "../../../target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" diff --git a/bundles/web-app/web-happ.yaml b/bundles/web-app/web-happ.yaml index 0ae96fc30..b21a80def 100644 --- a/bundles/web-app/web-happ.yaml +++ b/bundles/web-app/web-happ.yaml @@ -4,4 +4,4 @@ name: hrea ui: bundled: "../../apps/holorea-graphql-explorer/ui.zip" happ_manifest: - bundled: "../app/full_suite_release_template/hrea_suite.happ" + bundled: "../app/full_suite/hrea_suite.happ" diff --git a/package.json b/package.json index 2e0d43156..e3538900b 100644 --- a/package.json +++ b/package.json @@ -9,8 +9,10 @@ "postinstall": "scripts/postinstall.sh", "shell": "nix-shell", "start": "pnpm run build; npm-run-all --parallel dev:graphql:adapter dev:graphql:client dev:graphql:explorer dht", - "build": "npm-run-all --parallel build:graphql build:crates", - "build:crates": "CARGO_TARGET_DIR=target cargo build --release --target wasm32-unknown-unknown && RUN_WASM_OPT=0 scripts/package-dnas.sh", + "build": "npm-run-all --parallel build:graphql build:holochain:dev", + "build:crates": "CARGO_TARGET_DIR=target cargo build --release --target wasm32-unknown-unknown", + "build:holochain:dev": "npm run build:crates && RUN_WASM_OPT=0 scripts/package-dnas.sh", + "build:holochain:release": "npm run build:crates && RUN_WASM_OPT=1 BUNDLE_ZOMES=1 scripts/package-dnas.sh", "build:graphql": "npm-run-all build:graphql:adapter build:graphql:client", "build:graphql:adapter": "cd modules/vf-graphql-holochain && pnpm run build", "build:graphql:client": "cd modules/graphql-client && pnpm run build", diff --git a/scripts/clean-build.sh b/scripts/clean-build.sh index 352a6c543..e6a2dd133 100755 --- a/scripts/clean-build.sh +++ b/scripts/clean-build.sh @@ -10,6 +10,6 @@ nix-shell --run hn-node-flush nix-shell --run hn-rust-flush -rm bundles/dna/**/*.dna -rm bundles/app/**/*.happ +rm -Rf bundles/dna/ +rm -Rf bundles/app/ rm bundles/web-app/*.webhapp diff --git a/scripts/package-dnas.sh b/scripts/package-dnas.sh index 0efb09339..946cfd566 100755 --- a/scripts/package-dnas.sh +++ b/scripts/package-dnas.sh @@ -9,17 +9,37 @@ UTIL="${HOLOCHAIN_DNA_UTIL_PATH:-hc}" +# determine repository root for substitution +ROOT_PATH=$(dirname "$0") +ROOT_PATH=$(cd "$ROOT_PATH" && pwd) +ROOT_PATH=$(dirname "$ROOT_PATH") +ROOT_PATH=$(printf '%s\n' "$ROOT_PATH" | sed -e 's/[\/&]/\\&/g') # make safe for sed + # optimise all WASMs first -if [ $RUN_WASM_OPT -ne "0" ]; then +if [[ $RUN_WASM_OPT -ne "0" ]]; then for WASM in target/wasm32-unknown-unknown/release/*.wasm; do echo -e "\e[1mOptimising $WASM\e[0m..." wasm-opt -Oz "$WASM" --output "$WASM" done fi +# remove any stale DNA & app bundle files; refresh from templates +rm -Rf bundles/dna +cp -a bundles/dna_templates bundles/dna +rm -Rf bundles/app +cp -a bundles/app_templates bundles/app + # compile DNAs by concatenating WASMs with properties for DIR in bundles/dna/*; do if [[ -d "$DIR" ]]; then + # @see https://github.com/holochain/holochain/issues/966 + # toggle `path`/`bundled` depending on build mode + if [[ $BUNDLE_ZOMES -eq "1" ]]; then + sed -i "s/path:/bundled:/g" "$DIR/dna.yaml" + fi + # substitute absolute paths for compatibility with `path` or `bundled` + sed -i "s//${ROOT_PATH}/g" "$DIR/dna.yaml" + echo -e "\e[1mCompiling DNA in $DIR\e[0m" if "$UTIL" dna pack "$DIR" 2>/dev/null; then echo -e "\e[1;32m packing succeeded.\e[0m" @@ -32,6 +52,16 @@ done # compile hApp bundles by concatenating DNAs and specifying any config for DIR in bundles/app/*; do if [[ -d "$DIR" ]]; then + # @see https://github.com/holochain/holochain/issues/966 + # toggle `url`/`bundled` and inject paths depending on defn of release download URL + if [[ -n "$RELEASE_DOWNLOAD_URL" ]]; then + RELEASE_DOWNLOAD_URL=$(printf '%s\n' "$RELEASE_DOWNLOAD_URL" | sed -e 's/[\/&]/\\&/g') # make safe for sed + sed -i "s/\\/\\w*/${RELEASE_DOWNLOAD_URL}/g" "$DIR/happ.yaml" + sed -i "s/bundled:/url:/g" "$DIR/happ.yaml" + else + sed -i "s//${ROOT_PATH}\/bundles\/dna/g" "$DIR/happ.yaml" + fi + echo -e "\e[1mBundling hApp in $DIR\e[0m" if "$UTIL" app pack "$DIR" 2>/dev/null; then echo -e "\e[1;32m packing succeeded.\e[0m" diff --git a/scripts/run-dev-conductor.sh b/scripts/run-dev-conductor.sh index 360929200..9e7939775 100755 --- a/scripts/run-dev-conductor.sh +++ b/scripts/run-dev-conductor.sh @@ -16,5 +16,5 @@ APP="${HOLOCHAIN_APP_PORT:-4000}" "$UTIL" s clean "$UTIL" s create -n 1 -d hrea_tester network quic -"$UTIL" s call install-app-bundle ./bundles/app/obs_and_agent/hrea_obs_agent.happ +"$UTIL" s call install-app-bundle ./bundles/app/full_suite/hrea_suite.happ "$UTIL" s run --all -p $APP From 48ff808b7e2c045a047bc3c671a3386704f1e406 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 18:26:54 +1000 Subject: [PATCH 087/181] update Github release script to use new command --- .github/workflows/release.yml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 83179281b..1fbce59fe 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -63,14 +63,11 @@ jobs: run: nix-shell --command "echo Completed" - name: Install PNPM and dependencies run: nix-shell --command "pnpm install --no-frozen-lockfile" - - name: Set up release bundle from template + - name: Build WASM, dnas, happs run: | - cp -a bundles_templates/* bundles/app/ RELEASE="${GITHUB_REF#refs/tags/}" - sed -i "s//https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}/g" bundles/app/full_suite_release_template/happ.yaml - cat bundles/app/full_suite_release_template/happ.yaml - - name: Build WASM, dnas, happs - run: nix-shell --run 'pnpm run build:crates' + RELEASE_DOWNLOAD_URL="https:\/\/github.com\/${GITHUB_REPOSITORY%/*}\/${GITHUB_REPOSITORY#*/}\/releases\/download\/${RELEASE}" + nix-shell --run 'pnpm run build:holochain:release' - name: Build explorer UI and webhapp package run: nix-shell --run 'pnpm run build:webhapp' - name: upload bundles @@ -78,7 +75,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | gh release upload "${GITHUB_REF#refs/tags/}" "bundles/web-app/hrea.webhapp" --clobber - gh release upload "${GITHUB_REF#refs/tags/}" "bundles/app/full_suite_release_template/hrea_suite.happ" --clobber + gh release upload "${GITHUB_REF#refs/tags/}" "bundles/app/full_suite/hrea_suite.happ" --clobber gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/agent/hrea_agent.dna" --clobber gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/agreement/hrea_agreement.dna" --clobber gh release upload "${GITHUB_REF#refs/tags/}" "bundles/dna/observation/hrea_observation.dna" --clobber From a5faa23c9ffc693607bf65e80f53ae7ba737d593 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 18:27:00 +1000 Subject: [PATCH 088/181] update ignore file --- .gitignore | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 27f654bd1..7b9a2bb08 100644 --- a/.gitignore +++ b/.gitignore @@ -10,12 +10,9 @@ node_modules/ # Rust build files /target/ # Holochain build files -/bundles/dna/**/*.dna -/bundles/app/**/*.happ +/bundles/dna/* +/bundles/app/* /webhapp/web-app/*.webhapp -# Manifest Files Under Automation -/bundles/*_template/* - # https://github.com/maxlath/backup-github-repo /repo-backup From 8fb63502b4845b204ca1b885e66236500dc3a406 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 18:27:22 +1000 Subject: [PATCH 089/181] update docs to reflect new bundle manifest file structure & commands --- README.md | 8 ++++---- docs/README.md | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 74fc65a8d..8381ff8de 100644 --- a/README.md +++ b/README.md @@ -136,13 +136,13 @@ There are a few sets of `*.yaml` configuration files used by Holochain in its bu [**`bundles/`**](bundles/) contains configuration files for: -- `dna`s which group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" (compiled WASM files) into Holochain DNAs. -- `app`s which group Holochain 'DNA' modules into 'hApp bundles'. A *hApp bundle* contains all backend components accessible by a single UI application; and +- `dna_templates`s which group assemblages of "[zomes](#zome-modules-inner-holochain-layer)" (compiled WASM files) into Holochain DNAs. +- `app_templates`s which group Holochain 'DNA' modules into 'hApp bundles'. A *hApp bundle* contains all backend components accessible by a single UI application; and - `web-app`s which bind a 'hApp bundle' with a (zipped) JavaScript single-page web application that talks to the Holochain backend. -These bundles are used by the project scripts to run the application locally from this repository. There are also [**`bundles_templates/`**](bundles_templates/) which are used by the Github releases process to build pre-packaged binaries for end-user installation into the [Holochain Launcher](https://github.com/holochain/launcher); in combination with a bundled `web-app` artifact. +These bundles are used by the project scripts to run the application locally from this repository, and to build for release. The `*_templates` are first copied to non-`_template` locations and some substitutions made- see `scripts/package-dnas.sh`. In development mode, zome WASMs are referenced; in release mode everything is bundled together into a much larger file which is too big for the Holochain development Sandbox or Tryorama test runner. -If you aren't developing hREA yourself this is a much easier way to setup the app— simply download the `*.webhapp` file from the [releases page](https://github.com/holo-rea/holo-rea/releases) and open it with the Holochain Launcher. +If you aren't developing hREA yourself the bundled release is a much easier way to setup the app— simply download the `*.webhapp` file from the [releases page](https://github.com/holo-rea/holo-rea/releases) and open it with the [Holochain Launcher](https://github.com/holochain/launcher). DNAs are the highest-level units of functionality available in the system. One is available for each of the [modules in the hREA framework](https://github.com/holo-rea/ecosystem/wiki/Modules-in-the-HoloREA-framework). diff --git a/docs/README.md b/docs/README.md index 184a45357..cd06a3e0f 100644 --- a/docs/README.md +++ b/docs/README.md @@ -92,7 +92,7 @@ Test parameters: Most of the time during development, you won't want to run the whole test suite but rather just those tests you're currently working on. The usual workflow when developing a module in isolation is: -1. `npm run build:crates` from the repository root to rebuild the module(s) you are working on. +1. `npm run build:holochain:dev` from the repository root to rebuild the module(s) you are working on. 2. `WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 npx tape test/**/*.js` from the `test` directory to run specific tests, substituting a path to an individual file. Note the [env vars](#environment-variables) used here are needed to obtain debug output from the zome code. Getting debug output printed to the screen depends on where you are logging from. From fbf4cc01a42d9155c33701056d6db8f95b935655 Mon Sep 17 00:00:00 2001 From: pospi Date: Sat, 26 Mar 2022 18:29:43 +1000 Subject: [PATCH 090/181] move completions file for tracking MVP features to docs dir --- completions.md => docs/completions.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename completions.md => docs/completions.md (100%) diff --git a/completions.md b/docs/completions.md similarity index 100% rename from completions.md rename to docs/completions.md From d3ae97e285e8b51d7cb5145cd56a9e6fb2d54a49 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Sun, 27 Mar 2022 19:38:26 -0700 Subject: [PATCH 091/181] add the VfModule enum, use it throughout, and export it --- modules/vf-graphql-holochain/README.md | 6 ++-- modules/vf-graphql-holochain/index.ts | 4 +-- .../vf-graphql-holochain/mutations/index.ts | 18 +++++----- .../mutations/proposal.ts | 2 +- .../mutations/proposedIntent.ts | 2 +- .../mutations/proposedTo.ts | 2 +- .../queries/economicEvent.ts | 2 +- modules/vf-graphql-holochain/queries/index.ts | 19 +++++------ .../vf-graphql-holochain/resolvers/agent.ts | 4 +-- .../resolvers/agreement.ts | 8 ++--- .../resolvers/commitment.ts | 12 +++---- .../resolvers/economicEvent.ts | 12 +++---- .../resolvers/economicResource.ts | 8 ++--- .../resolvers/fulfillment.ts | 6 ++-- .../vf-graphql-holochain/resolvers/index.ts | 16 ++++----- .../vf-graphql-holochain/resolvers/intent.ts | 12 +++---- .../vf-graphql-holochain/resolvers/measure.ts | 4 +-- .../vf-graphql-holochain/resolvers/process.ts | 8 ++--- .../resolvers/proposal.ts | 4 +-- .../resolvers/proposedIntent.ts | 6 ++-- .../resolvers/proposedTo.ts | 6 ++-- .../resolvers/resourceSpecification.ts | 8 ++--- .../resolvers/satisfaction.ts | 6 ++-- modules/vf-graphql-holochain/types.ts | 34 ++++++++++++++++--- 24 files changed, 115 insertions(+), 94 deletions(-) diff --git a/modules/vf-graphql-holochain/README.md b/modules/vf-graphql-holochain/README.md index 4108dc372..e2b123f92 100644 --- a/modules/vf-graphql-holochain/README.md +++ b/modules/vf-graphql-holochain/README.md @@ -63,10 +63,10 @@ In some cases, tooling may require low-level access to the GraphQL resolver call ```js import { makeExecutableSchema } from '@graphql-tools/schema' -import { generateResolvers } from '@valueflows/vf-graphql-holochain' +import { generateResolvers, VfModule } from '@valueflows/vf-graphql-holochain' const { buildSchema, printSchema } = require('@valueflows/vf-graphql') -const enabledVFModules = ['measurement', 'knowledge', 'observation'] +const enabledVFModules = [VfModule.Measurement, VfModule.Knowledge,VfModule.Observation] const resolvers = generateResolvers({ enabledVFModules }) @@ -76,7 +76,7 @@ const schema = makeExecutableSchema({ }) ``` -Note that the IDs of ValueFlows modules in `enabledVFModules` above do not map exactly 1:1 with the hREA DNA identifiers in `dnaConfig`. For example, the "knowledge" VF module determines the presence of the `ResourceSpecification` and `ProcessSpecification` resolvers, which actually map to an hREA *specification* DNA. +Note that the IDs of ValueFlows modules in `enabledVFModules` above do not map exactly 1:1 with the hREA DNA identifiers in `dnaConfig`. For example, `VfModule.Knowledge` determines the presence of the `ResourceSpecification` and `ProcessSpecification` resolvers, which actually map to an hREA *specification* DNA. ## Repository structure diff --git a/modules/vf-graphql-holochain/index.ts b/modules/vf-graphql-holochain/index.ts index 5f0584468..f4a6783ae 100644 --- a/modules/vf-graphql-holochain/index.ts +++ b/modules/vf-graphql-holochain/index.ts @@ -10,7 +10,7 @@ import { makeExecutableSchema } from '@graphql-tools/schema' -import { APIOptions, ResolverOptions, DEFAULT_VF_MODULES, DNAIdMappings, CellId } from './types' +import { APIOptions, ResolverOptions, DEFAULT_VF_MODULES, DNAIdMappings, CellId, VfModule } from './types' import generateResolvers from './resolvers' import { mapZomeFn, autoConnect, openConnection, sniffHolochainAppCells } from './connection' const { buildSchema, printSchema } = require('@valueflows/vf-graphql') @@ -23,7 +23,7 @@ export { // direct access to Holochain zome method bindings for authoring own custom resolvers bound to non-REA DNAs mapZomeFn, // types that wrapper libraries may need to manage conductor DNA connection logic - DNAIdMappings, CellId, APIOptions, + DNAIdMappings, CellId, APIOptions, VfModule } /** diff --git a/modules/vf-graphql-holochain/mutations/index.ts b/modules/vf-graphql-holochain/mutations/index.ts index 55f287fff..b2b250b28 100644 --- a/modules/vf-graphql-holochain/mutations/index.ts +++ b/modules/vf-graphql-holochain/mutations/index.ts @@ -5,7 +5,7 @@ * @since: 2019-05-22 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import ResourceSpecification from './resourceSpecification' import ProcessSpecification from './processSpecification' @@ -30,15 +30,13 @@ import Agreement from './agreement' // generic deletion calling format used by all mutations export type deleteHandler = (root: any, args: { revisionId: string }) => Promise -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const VFmodules = enabledVFModules || [] - const hasAgent = -1 !== VFmodules.indexOf("agent") - const hasMeasurement = -1 !== VFmodules.indexOf("measurement") - const hasKnowledge = -1 !== VFmodules.indexOf("knowledge") - const hasObservation = -1 !== VFmodules.indexOf("observation") - const hasPlanning = -1 !== VFmodules.indexOf("planning") - const hasProposal = -1 !== VFmodules.indexOf("proposal") - const hasAgreement = -1 !== VFmodules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) return Object.assign( (hasMeasurement ? { ...Unit(dnaConfig, conductorUri) } : {}), diff --git a/modules/vf-graphql-holochain/mutations/proposal.ts b/modules/vf-graphql-holochain/mutations/proposal.ts index 0edbdf040..959409185 100644 --- a/modules/vf-graphql-holochain/mutations/proposal.ts +++ b/modules/vf-graphql-holochain/mutations/proposal.ts @@ -1,5 +1,5 @@ /** - * Mutations for manipulating process specification + * Mutations for manipulating proposals * * @package: HoloREA * @since: 2019-09-12 diff --git a/modules/vf-graphql-holochain/mutations/proposedIntent.ts b/modules/vf-graphql-holochain/mutations/proposedIntent.ts index 3612e3141..3479be950 100644 --- a/modules/vf-graphql-holochain/mutations/proposedIntent.ts +++ b/modules/vf-graphql-holochain/mutations/proposedIntent.ts @@ -1,5 +1,5 @@ /** - * Mutations for manipulating process specification + * Mutations for manipulating proposed intents * * @package: HoloREA * @since: 2019-09-12 diff --git a/modules/vf-graphql-holochain/mutations/proposedTo.ts b/modules/vf-graphql-holochain/mutations/proposedTo.ts index 08ee8134c..5d49ac031 100644 --- a/modules/vf-graphql-holochain/mutations/proposedTo.ts +++ b/modules/vf-graphql-holochain/mutations/proposedTo.ts @@ -1,5 +1,5 @@ /** - * Mutations for manipulating process specification + * Mutations for manipulating proposed to * * @package: HoloREA * @since: 2019-09-12 diff --git a/modules/vf-graphql-holochain/queries/economicEvent.ts b/modules/vf-graphql-holochain/queries/economicEvent.ts index 908d4200c..6584ad05c 100644 --- a/modules/vf-graphql-holochain/queries/economicEvent.ts +++ b/modules/vf-graphql-holochain/queries/economicEvent.ts @@ -5,7 +5,7 @@ * @since: 2019-05-27 */ -import { DNAIdMappings, injectTypename, addTypename } from '../types' +import { DNAIdMappings, injectTypename } from '../types' import { mapZomeFn } from '../connection' import { diff --git a/modules/vf-graphql-holochain/queries/index.ts b/modules/vf-graphql-holochain/queries/index.ts index 65fd71d32..18fc172d1 100644 --- a/modules/vf-graphql-holochain/queries/index.ts +++ b/modules/vf-graphql-holochain/queries/index.ts @@ -5,7 +5,7 @@ * @since: 2019-05-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import Action from './action' import Unit from './unit' @@ -28,15 +28,14 @@ import Proposal from './proposal' import Agreement from './agreement' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const VFmodules = enabledVFModules || [] - const hasAgent = -1 !== VFmodules.indexOf("agent") - const hasMeasurement = -1 !== VFmodules.indexOf("measurement") - const hasKnowledge = -1 !== VFmodules.indexOf("knowledge") - const hasObservation = -1 !== VFmodules.indexOf("observation") - const hasPlanning = -1 !== VFmodules.indexOf("planning") - const hasProposal = -1 !== VFmodules.indexOf("proposal") - const hasAgreement = -1 !== VFmodules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) return Object.assign({ ...Action(dnaConfig, conductorUri), diff --git a/modules/vf-graphql-holochain/resolvers/agent.ts b/modules/vf-graphql-holochain/resolvers/agent.ts index 1270744be..403d3b04d 100644 --- a/modules/vf-graphql-holochain/resolvers/agent.ts +++ b/modules/vf-graphql-holochain/resolvers/agent.ts @@ -5,9 +5,9 @@ * @since: 2020-05-28 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { return { __resolveType: (obj, ctx, info) => obj.__typename } diff --git a/modules/vf-graphql-holochain/resolvers/agreement.ts b/modules/vf-graphql-holochain/resolvers/agreement.ts index 3389cb06e..1be14665e 100644 --- a/modules/vf-graphql-holochain/resolvers/agreement.ts +++ b/modules/vf-graphql-holochain/resolvers/agreement.ts @@ -5,7 +5,7 @@ * @since: 2020-06-19 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -14,9 +14,9 @@ import { EconomicEvent, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) const queryCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') const queryEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') diff --git a/modules/vf-graphql-holochain/resolvers/commitment.ts b/modules/vf-graphql-holochain/resolvers/commitment.ts index 0018daf28..d6e8de780 100644 --- a/modules/vf-graphql-holochain/resolvers/commitment.ts +++ b/modules/vf-graphql-holochain/resolvers/commitment.ts @@ -5,7 +5,7 @@ * @since: 2019-08-28 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -22,11 +22,11 @@ import { import agentQueries from '../queries/agent' import agreementQueries from '../queries/agreement' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasAgreement = -1 !== enabledVFModules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) const readFulfillments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'fulfillment_index', 'query_fulfillments') const readSatisfactions = mapZomeFn(dnaConfig, conductorUri, 'planning', 'satisfaction_index', 'query_satisfactions') diff --git a/modules/vf-graphql-holochain/resolvers/economicEvent.ts b/modules/vf-graphql-holochain/resolvers/economicEvent.ts index 02d898091..78ebb00b6 100644 --- a/modules/vf-graphql-holochain/resolvers/economicEvent.ts +++ b/modules/vf-graphql-holochain/resolvers/economicEvent.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -24,11 +24,11 @@ import agentQueries from '../queries/agent' import agreementQueries from '../queries/agreement' import resourceQueries from '../queries/economicResource' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") - const hasAgreement = -1 !== enabledVFModules.indexOf("agreement") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) const readFulfillments = mapZomeFn(dnaConfig, conductorUri, 'observation', 'fulfillment_index', 'query_fulfillments') const readSatisfactions = mapZomeFn(dnaConfig, conductorUri, 'observation', 'satisfaction_index', 'query_satisfactions') diff --git a/modules/vf-graphql-holochain/resolvers/economicResource.ts b/modules/vf-graphql-holochain/resolvers/economicResource.ts index cde8698e7..23bfa807c 100644 --- a/modules/vf-graphql-holochain/resolvers/economicResource.ts +++ b/modules/vf-graphql-holochain/resolvers/economicResource.ts @@ -5,7 +5,7 @@ * @since: 2019-10-31 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -17,9 +17,9 @@ import { Maybe, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasMeasurement = -1 !== enabledVFModules.indexOf("measurement") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) const readResources = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_resource_index', 'query_economic_resources') const readUnit = mapZomeFn(dnaConfig, conductorUri, 'specification', 'unit', 'get_unit') diff --git a/modules/vf-graphql-holochain/resolvers/fulfillment.ts b/modules/vf-graphql-holochain/resolvers/fulfillment.ts index d93d16c98..a901b7831 100644 --- a/modules/vf-graphql-holochain/resolvers/fulfillment.ts +++ b/modules/vf-graphql-holochain/resolvers/fulfillment.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -14,8 +14,8 @@ import { Commitment, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasObservation = -1 !== enabledVFModules.indexOf("observation") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) const readEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') const readCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') diff --git a/modules/vf-graphql-holochain/resolvers/index.ts b/modules/vf-graphql-holochain/resolvers/index.ts index 2073dff50..6f8cb70e5 100644 --- a/modules/vf-graphql-holochain/resolvers/index.ts +++ b/modules/vf-graphql-holochain/resolvers/index.ts @@ -5,7 +5,7 @@ * @since: 2019-05-20 */ -import { DNAIdMappings, ResolverOptions, URI, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, ResolverOptions, URI, DEFAULT_VF_MODULES, VfModule } from '../types' import { DateTimeResolver as DateTime } from 'graphql-scalars' import { openConnection } from '../connection' @@ -53,13 +53,13 @@ export default async (options: ResolverOptions) => { traceAppSignals = undefined, } = options - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasMeasurement = -1 !== enabledVFModules.indexOf("measurement") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") - const hasProposal = -1 !== enabledVFModules.indexOf("proposal") - const hasAgreement = -1 !== enabledVFModules.indexOf("agreement") + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) + const hasAgreement = -1 !== enabledVFModules.indexOf(VfModule.Agreement) // prefetch connection for this API schema await openConnection(conductorUri, traceAppSignals) diff --git a/modules/vf-graphql-holochain/resolvers/intent.ts b/modules/vf-graphql-holochain/resolvers/intent.ts index 6c649ff22..46948ad90 100644 --- a/modules/vf-graphql-holochain/resolvers/intent.ts +++ b/modules/vf-graphql-holochain/resolvers/intent.ts @@ -5,7 +5,7 @@ * @since: 2019-08-31 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -23,11 +23,11 @@ import agentQueries from '../queries/agent' const extractProposedIntent = (data): ProposedIntent => data.proposedIntent -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") - const hasProposal = -1 !== enabledVFModules.indexOf("proposal") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) + const hasProposal = -1 !== enabledVFModules.indexOf(VfModule.Proposal) const readSatisfactions = mapZomeFn(dnaConfig, conductorUri, 'planning', 'satisfaction_index', 'query_satisfactions') const readProcesses = mapZomeFn(dnaConfig, conductorUri, 'observation', 'process_index', 'query_processes') diff --git a/modules/vf-graphql-holochain/resolvers/measure.ts b/modules/vf-graphql-holochain/resolvers/measure.ts index 60966dde7..b3ae00c79 100644 --- a/modules/vf-graphql-holochain/resolvers/measure.ts +++ b/modules/vf-graphql-holochain/resolvers/measure.ts @@ -5,7 +5,7 @@ * @since: 2019-12-24 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -14,7 +14,7 @@ import { Unit, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { const readUnit = mapZomeFn(dnaConfig, conductorUri, 'specification', 'unit', 'get_unit') return { diff --git a/modules/vf-graphql-holochain/resolvers/process.ts b/modules/vf-graphql-holochain/resolvers/process.ts index 18856b00c..4b5e39a87 100644 --- a/modules/vf-graphql-holochain/resolvers/process.ts +++ b/modules/vf-graphql-holochain/resolvers/process.ts @@ -5,7 +5,7 @@ * @since: 2019-09-12 */ -import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -16,9 +16,9 @@ import { ProcessSpecification } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasKnowledge = -1 !== enabledVFModules.indexOf("knowledge") - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasKnowledge = -1 !== enabledVFModules.indexOf(VfModule.Knowledge) + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) const readEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') const readCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') diff --git a/modules/vf-graphql-holochain/resolvers/proposal.ts b/modules/vf-graphql-holochain/resolvers/proposal.ts index ce44b4c4f..4c75b1c18 100644 --- a/modules/vf-graphql-holochain/resolvers/proposal.ts +++ b/modules/vf-graphql-holochain/resolvers/proposal.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -17,7 +17,7 @@ import { const extractProposedTo = (data): ProposedTo => data.proposedTo const extractProposedIntent = (data): ProposedIntent => data.proposedIntent -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { const readProposedTo = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposed_to', 'get_proposed_to') const readProposedIntent = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposed_intent', 'get_proposed_intent') diff --git a/modules/vf-graphql-holochain/resolvers/proposedIntent.ts b/modules/vf-graphql-holochain/resolvers/proposedIntent.ts index 7ac764d00..898d222d5 100644 --- a/modules/vf-graphql-holochain/resolvers/proposedIntent.ts +++ b/modules/vf-graphql-holochain/resolvers/proposedIntent.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -14,8 +14,8 @@ import { ProposedIntent, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasPlanning = -1 !== enabledVFModules.indexOf("planning") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasPlanning = -1 !== enabledVFModules.indexOf(VfModule.Planning) const readProposal = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposal', 'get_proposal') const readIntent = mapZomeFn(dnaConfig, conductorUri, 'planning', 'intent', 'get_intent') diff --git a/modules/vf-graphql-holochain/resolvers/proposedTo.ts b/modules/vf-graphql-holochain/resolvers/proposedTo.ts index b27b16dcd..8b5e7366a 100644 --- a/modules/vf-graphql-holochain/resolvers/proposedTo.ts +++ b/modules/vf-graphql-holochain/resolvers/proposedTo.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -16,8 +16,8 @@ import { import agentQueries from '../queries/agent' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasAgent = -1 !== enabledVFModules.indexOf("agent") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasAgent = -1 !== enabledVFModules.indexOf(VfModule.Agent) const readProposal = mapZomeFn(dnaConfig, conductorUri, 'proposal', 'proposal', 'get_proposal') const readAgent = agentQueries(dnaConfig, conductorUri)['agent'] diff --git a/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts b/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts index 6fa42c95e..2811b3834 100644 --- a/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts +++ b/modules/vf-graphql-holochain/resolvers/resourceSpecification.ts @@ -5,7 +5,7 @@ * @since: 2019-08-27 */ -import { DNAIdMappings, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -15,9 +15,9 @@ import { Unit, } from '@valueflows/vf-graphql' -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasMeasurement = -1 !== enabledVFModules.indexOf("measurement") - const hasObservation = -1 !== enabledVFModules.indexOf("observation") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasMeasurement = -1 !== enabledVFModules.indexOf(VfModule.Measurement) + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) const queryResources = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_resource_index', 'query_economic_resources') const readUnit = mapZomeFn(dnaConfig, conductorUri, 'specification', 'unit', 'get_unit') diff --git a/modules/vf-graphql-holochain/resolvers/satisfaction.ts b/modules/vf-graphql-holochain/resolvers/satisfaction.ts index 777bea2df..c50637800 100644 --- a/modules/vf-graphql-holochain/resolvers/satisfaction.ts +++ b/modules/vf-graphql-holochain/resolvers/satisfaction.ts @@ -5,7 +5,7 @@ * @since: 2019-08-31 */ -import { DNAIdMappings, addTypename, DEFAULT_VF_MODULES } from '../types' +import { DNAIdMappings, addTypename, DEFAULT_VF_MODULES, VfModule } from '../types' import { mapZomeFn } from '../connection' import { @@ -22,8 +22,8 @@ async function extractRecordsOrFail (query, subfieldId: string): Promise { return val[0][subfieldId] } -export default (enabledVFModules: string[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { - const hasObservation = -1 !== enabledVFModules.indexOf("observation") +export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { + const hasObservation = -1 !== enabledVFModules.indexOf(VfModule.Observation) const readEvents = mapZomeFn(dnaConfig, conductorUri, 'observation', 'economic_event_index', 'query_economic_events') const readCommitments = mapZomeFn(dnaConfig, conductorUri, 'planning', 'commitment_index', 'query_commitments') diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index cf695ea98..e298d59ea 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -1,6 +1,7 @@ /** * base types for GraphQL query layer * + * * @see https://github.com/valueflows/vf-graphql/blob/master/schemas/structs.gql * * @package: HoloREA prototype @@ -31,7 +32,7 @@ export { CellId } export interface ResolverOptions { // Array of ValueFlows module names to include in the schema // @see https://lab.allmende.io/valueflows/vf-schemas/vf-graphql#generating-schemas - enabledVFModules?: string[], + enabledVFModules?: VfModule[], // Mapping of DNA identifiers to runtime `CellId`s to bind to. dnaConfig: DNAIdMappings, @@ -81,13 +82,36 @@ export function injectTypename (name: string, fn: Resolver): Resolver { } } +// enum containing all the possible VF modules, including +// the ones that haven't been implemented within holo-rea yet +// -> https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/tree/sprout/lib/schemas +export enum VfModule { + Agent, + Agreement, + Appreciation, + Claim, + Geolocation, + Knowledge, + Measurement, + Observation, + Plan, + Planning, + Proposal, + Recipe, + Scenario, + Util, +} + // default 'full suite' VF module set supported by Holo-REA export const DEFAULT_VF_MODULES = [ - 'knowledge', 'measurement', - 'agent', - 'observation', 'planning', - 'proposal', 'agreement', + VfModule.Knowledge, + VfModule.Measurement, + VfModule.Agent, + VfModule.Observation, + VfModule.Planning, + VfModule.Proposal, + VfModule.Agreement ] // scalar types From 957e2b05e60dce8447fc3f7e5986bc2f749b3114 Mon Sep 17 00:00:00 2001 From: HackMD Date: Wed, 30 Mar 2022 04:07:14 +0000 Subject: [PATCH 092/181] switched order --- docs/completions.md | 222 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 222 insertions(+) create mode 100644 docs/completions.md diff --git a/docs/completions.md b/docs/completions.md new file mode 100644 index 000000000..5d90ee500 --- /dev/null +++ b/docs/completions.md @@ -0,0 +1,222 @@ +# holo-rea Graphql API Completions & Statuses + +[![hackmd-github-sync-badge](https://hackmd.io/CWcN1gbER9ioLVy8xGUj1Q/badge)](https://hackmd.io/CWcN1gbER9ioLVy8xGUj1Q) + +From the point-of-view of someone calling through Graphql, what is the overall status of each function that exists in the graphql schema. Many functions are not yet implemented so it is important to know that upfront, which are, and which aren't. + +A filtered list of related github issues for tracking these work statuses, so that you can contribute, or report or discuss issues, can be found here: https://github.com/holo-rea/holo-rea/labels/graphql-api + +## GraphQL Implementation + +### Mutations + +#### Implemented & Tested +- [x] createEconomicEvent + - [x] fixed - `newInventoriedResource` `name` property is not persisted - [issue #202](https://github.com/holo-rea/holo-rea/issues/202) +- [x] createUnit +- [x] createProcess + +#### Implemented & Not Yet Tested +- [x] createAgreement +- [x] updateAgreement +- [x] deleteAgreement +- [x] createCommitment +- [x] updateCommitment +- [x] deleteCommitment +- [x] updateEconomicEvent +- [x] deleteEconomicEvent +- [x] createFulfillment +- [x] updateFulfillment +- [x] deleteFulfillment +- [x] updateEconomicResource +- [x] createIntent +- [x] updateIntent +- [x] deleteIntent +- [x] updateProcess +- [x] deleteProcess +- [x] createProcessSpecification +- [x] updateProcessSpecification +- [x] deleteProcessSpecification +- [x] createProposal +- [x] updateProposal +- [x] deleteProposal +- [x] proposeIntent +- [x] deleteProposedIntent +- [x] proposeTo +- [x] deleteProposedTo +- [x] updateResourceSpecification +- [x] deleteResourceSpecification +- [x] createSatisfaction +- [x] updateSatisfaction +- [x] deleteSatisfaction +- [x] updateUnit +- [x] deleteUnit + +#### Partially Implemented +- [x] createResourceSpecification + - [ ] lacking `defaultUnitOfResource` - [issue #155](https://github.com/holo-rea/holo-rea/issues/155) + +#### Has Minor Bug + +#### Has Fatal Bug + +#### Not Yet Implemented +- [ ] deleteEconomicResource - [issue #67](https://github.com/holo-rea/holo-rea/issues/67) +- [ ] createProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] updateProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] deleteProductBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] createPerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updatePerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deletePerson - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteOrganization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteAgentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] createAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] updateAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] deleteAgentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) + + + +### Queries + +#### Implemented & Tested +- [x] action +- [x] actions +- [x] unit +- [x] economicEvent + +#### Implemented & Not Yet Tested +- [x] agreement +- [x] commitment +- [x] resourceSpecification +- [x] processSpecification +- [x] process +- [x] intent +- [x] fulfillment +- [x] satisfaction +- [x] proposal + +__Has Partial Implementation__ +- [x] myAgent + - [ ] TODO: define what's lacking +- [x] agent + - [ ] TODO: define what's lacking +- [x] economicResources + - [ ] lacking pagination - [issue #85](https://github.com/holo-rea/holo-rea/issues/85) +- [x] economicEvents + - [ ] lacking pagination - [issue #85](https://github.com/holo-rea/holo-rea/issues/85) +- [x] economicResource + - [ ] `primaryAccountable` is not implemented - [issue #133](https://github.com/holo-rea/holo-rea/issues/133) + +__Has Minor Bug__ + + +__Has Fatal Bug__ +- [ ] agents (response always gives empty array, wrongly - [issue #210](https://github.com/holo-rea/holo-rea/issues/210)) + +__Not Yet Implemented__ +- [ ] proposals - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] satisfactions - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] fulfillments - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] intents - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] commitments - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] processes - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] productBatch - [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] productBatches - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) and [issue #134](https://github.com/holo-rea/holo-rea/issues/134) +- [ ] units - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] processSpecifications - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] resourceSpecifications - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] agreements - [issue #84](https://github.com/holo-rea/holo-rea/issues/84) +- [ ] organization - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] organizations - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] person - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] people - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationship - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationships - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationshipRole - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) +- [ ] agentRelationshipRoles - [issue #172](https://github.com/holo-rea/holo-rea/issues/172) + +### Resolvers + +(https://www.apollographql.com/docs/apollo-server/data/resolvers/) +Connor todo + + + +## System of Record Comparison + +All of the implementation details should be sourced from the [Valueflows RDF Turtle file](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL) (here's a [Formatted View](http://150.146.207.114/lode/extract?owlapi=true&url=https://lab.allmende.io/valueflows/valueflows/-/raw/master/release-doc-in-process/all_vf.TTL)), which is the system of record. While you are looking around, please note that the objects themselves don't have property definitions. The properties themselves define which objects the apply to in the `rdfs:domain` field. The range of values the properties can take is defined by the `rdfs:range` field This is because RDF views these things like arrows or maps, going from the domain to the range. + +The top level objects found in the spec are: + +**Key** +| symbol | meaning | +| ------------------- | --------------------- | +| :grey_exclamation: | Not used | +| - | Not found/not started | +| :hammer_and_wrench: | In progress | +| :heavy_check_mark: | Done | +| K | Knowledge Layer | +| P | Planning Layer | +| O | Observation Layer | + +**Outside Ontologies** +| RDF Object | vf-schema file | zome | comments | +| --------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------- | +| [foaf:Agent](http://xmlns.com/foaf/spec/) | :grey_exclamation: [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql) | :grey_exclamation: | | +| [org:Organization](https://www.w3.org/TR/vocab-org/) | :grey_exclamation: | :grey_exclamation: | | +| [om2:Measure](https://raw.githubusercontent.com/HajoRijgersberg/OM/master/om-2.0.rdf) | [measurement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/measurement.gql#L64) | [lib/vf_measurement](https://github.com/holo-rea/holo-rea/blob/sprout/lib/vf_measurement/src/lib.rs#L19) | | +| [om2:Unit](https://raw.githubusercontent.com/HajoRijgersberg/OM/master/om-2.0.rdf) | :grey_exclamation: [measurement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/measurement.gql#L48) | :grey_exclamation: [rea_unit](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_unit) | This is a technicality. The general shape of it is correct, however the ontology represents a hierarchy of units that are not correctly reflected in the backend since it only stores a label and a symbol. The full ontology allows for more flexibility with prefixes, dimension, exponent, etc.; it has enough information to allow conversion between units. It would be hard to implement without a triple-store. | +| [geo:SpatialThing](https://www.w3.org/2003/01/geo/) | [geolocation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/geolocation.gql#L15) | - | | +| [time](https://www.w3.org/2006/time#) | :grey_exclamation: | :grey_exclamation: | vf-schema: The GraphQL spec only uses the `DateTime and Duration` scalars. | +| [cd:created](https://www.dublincore.org/specifications/dublin-core/dcmi-terms/#created) | :grey_exclamation: | :grey_exclamation: | vf-schema: GraphQL spec only uses the `DateTime` scalar. | +| [skos:note](https://www.w3.org/TR/skos-reference/#note) | :grey_exclamation: | :grey_exclamation: | vf-schema: Just a `String`. | +| [dtype:numericUnion](http://www.linkedmodel.org/schema/dtype#numericUnion) | :grey_exclamation: | :grey_exclamation: | This is only needed for the [`om2:hasNumericalValue`](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L549), so it's only internal. | + + +You may notice there is no specification of an Agent. This is because the Valueflows RDF spec uses the [FOAF Vocabulary](http://xmlns.com/foaf/spec/) and the [Organization Ontology](https://www.w3.org/TR/vocab-org/). The holo-rea project has [it's own set of concepts right now](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql). + +| layer | RDF object | vf-schemas file | zome | hrea "module" or DNA | comments | +| ------| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| K | [Scenario Definition](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L124) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L44) | - | - | | +| K | [Process Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L104) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L70) | [rea_process_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | | +| K | [Resource Specification](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L92) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L45) | [rea_resource_specification](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_resource_specification) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | zome: Missing `resource_classified_as`, `default_unit_of_resource`. | +| K | [Action](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L32) | [knowledge](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/knowledge.gql#L19) | [rea_action](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_action/zome) | [specification](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/specification) | vf-schema: Missing `containedEffect`, `locationEffect`. zome: Same as vf-schema. | +| K | [Agent Relationship Role](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L74) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L126) | - | - | vf-schema: Missing `roleBehavior`. | +| K | [Role Behavior](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L80) | - | - | - | vf-schema: This doesn't seem to be implemented yet. | +| K | [Recipe Exchange](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L118) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L106) | - | - | | +| K | [Recipe Flow](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L112) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L53) | - | - | | +| K | [Recipe Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L98) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L84) | - | - | | +| K | [Recipe Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [recipe](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/recipe.gql#L18) | - | - | | +| P | [Scenario](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L86) | [scenario](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/scenario.gql#L16) | - | - | | +| P | [Plan](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L133) | [plan](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/plan.gql#L16) | - | - | vf-schema: has extra fields `deletable` and `inScopeOf` are these for internal use? | +| P, O | [Process](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L196) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L155) | [rea_process](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_process) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `plannedIn` What is `unplannedEvents`? For the inverse relationships, do we want to group all `Intent`s, `Commitment`s, and `EconomicEvent`s together in the `inputs` and `outputs`? How is `track` and `trace` being handled? dna: Has extra `before` and `after` fields. `planned_within` is present, despite no implementation (because it just points to an `entryHash`.) This is often placed in with Observation layer, or on the line between Observation and Planning. | +| P | [Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L139) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L94) | [rea_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_intent) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | vf-schema: Missing `provider`, `reciever`, `atLocation`. Has a `satisfiedBy` inverse map to `Satisfaction`'s `satisfies`. | +| P | [Proposed Intent](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L151) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L49) | [rea_proposed_intent](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_intent) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | | +| P | [Proposal](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L145) | [proposal](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/proposal.gql#L16) | [rea_proposal](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposal) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | vf-schema: Missing `eligibleLocation`. Has a `publishes` inverse map to `ProposedIntent`'s `publishedIn`. zome: same. | +| P | [Proposed To](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L157) | [proposal.agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/bridging/proposal.agent.gql#L20) | [rea_proposed_to](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_proposed_to) | [proposal](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/proposal) | | +| P | [Commitment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L163) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L24) | [rea_commitment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_commitment) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | vf-schema: Missing `atLocation` and `clauseOf`. Has `fullfilledBy` and `satisfies` inverse maps to `Fulfillment`'s`fulfill` and `Satisfation`'s `satisfiedBy`. zome: has `plan` instead of `planed_within`. | +| P | [Satisfaction](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L169) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L188) | [rea_satisfaction](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_satisfaction) | [planning](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/planning) | zome: allows `satisfied_by` to only be either one `EconomicEvent` or `Commitment`. Is this correct? | +| P | [Agreement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [agreement](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agreement.gql#L19) | [rea_agreement](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_agreement) | [agreement](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/agreement) | | +| P | [Claim](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L175) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L18) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | +| O | [Economic Resource](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L190) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L83) | [rea_economic_resource](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_resource) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `currentLocation`. Has `contains`, `track`, `trace` maps as additions. | +| O | [dfc:ProductBatch](http://www.virtual-assembly.org/DataFoodConsortium/BusinessOntology) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L139) | - | - | vf-schema: Missing links to `identifies`, but that probably doesn't matter for our use case. | +| O | [Economic Event](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L202) | [observation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/observation.gql#L19) | [rea_economic_event](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_economic_event) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | vf-schema: Missing `realizationOf`, `image`, `provider`, `receiver`, `atLocation`, `toLocation`. Has `track` and `trace` going to `ProductionFlowItem`s. zome: Missing `to_location`. | +| O | [Appreciation](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L232) | [appreciation](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/appreciation.gql#L17) | - | - | Pospi has mentioned to me (Connor) that this has been de-prioritized due to lack of pull for it from use cases ... is more speculative. Hence lack of implementation. | +| P, O | [Fulfillment](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L214) | [planning](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/planning.gql#L166) | [rea_fulfillment](https://github.com/holo-rea/holo-rea/tree/sprout/zomes/rea_fulfillment) | [observation](https://github.com/holo-rea/holo-rea/tree/sprout/dna_bundles/observation) | !! Discrepancy between "layer" and "vf-schema" files. FIXME | +| O | [Settlement](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L226) | [claim](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/claim.gql#L61) | - | - | | +| O | [Agent Relationship](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L208) | [agent](https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/blob/sprout/lib/schemas/agent.gql#L104) | - | - | | + +There are internal system objects used to help specify the rules of logic around the actions: + +* [Resource Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L1278) +* [Contained Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L68) +* [Location Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L62) +* [Onhand Effect](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L56) +* [Input/Output](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L44) +* [Pairs With](https://lab.allmende.io/valueflows/valueflows/-/blob/master/release-doc-in-process/all_vf.TTL#L38) + +In the gql version, these are just strings (need to learn more specifics). From bfde90bd5e634ca6a4a0b36eaa4e4d9fcd9ff472 Mon Sep 17 00:00:00 2001 From: pospi Date: Sun, 3 Apr 2022 18:01:21 +1000 Subject: [PATCH 093/181] add temporary workaround for Tryorama timeout so integration tests can pass --- scripts/fixTryoramaTimeout.js | 21 +++++++++++++++++++++ scripts/postinstall.sh | 2 ++ 2 files changed, 23 insertions(+) create mode 100755 scripts/fixTryoramaTimeout.js diff --git a/scripts/fixTryoramaTimeout.js b/scripts/fixTryoramaTimeout.js new file mode 100755 index 000000000..7b9c579b6 --- /dev/null +++ b/scripts/fixTryoramaTimeout.js @@ -0,0 +1,21 @@ +/// Temporary script to fix Tryorama websocket timeouts +/// for tests involving DNAs in excess of 16MB.of + +const fs = require('fs') +const path = require('path') + +const filePath = path.resolve(__dirname, '../node_modules/.pnpm/@holochain+client@0.3.2/node_modules/@holochain/client/lib/websocket/common.js') + +if (!fs.existsSync(filePath)) { + console.error('Unable to find Tryorama websocket file for patching. Was it updated? Is this script still needed?') + process.exit(1) +} + +const contents = fs.readFileSync(filePath) + '' + +fs.writeFileSync(filePath, contents.replace( + /exports\.DEFAULT_TIMEOUT\s*=\s*\d+/, + 'exports.DEFAULT_TIMEOUT = 50000' +)) + +console.log('Tryorama websocket timeout patched successfully!') diff --git a/scripts/postinstall.sh b/scripts/postinstall.sh index 09e78f0b0..a597147c5 100755 --- a/scripts/postinstall.sh +++ b/scripts/postinstall.sh @@ -50,3 +50,5 @@ else exit 1 } fi + +node scripts/fixTryoramaTimeout.js From 8675452cafbfa7b48987860d86d238edf7106c1e Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 8 Apr 2022 15:02:48 +1000 Subject: [PATCH 094/181] remove comment --- modules/vf-graphql-holochain/types.ts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index e298d59ea..fa982c2dd 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -1,11 +1,6 @@ /** * base types for GraphQL query layer * - * - * @see https://github.com/valueflows/vf-graphql/blob/master/schemas/structs.gql - * - * @package: HoloREA prototype - * @since: 2019-01-03 * @package: HoloREA * @since: 2019-05-20 */ From 163126645f6db48c20fdecc5facd5fa8a883d050 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 8 Apr 2022 15:04:48 +1000 Subject: [PATCH 095/181] remove util schema from modules list, it always gets included automatically --- modules/vf-graphql-holochain/types.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index fa982c2dd..71f6c1795 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -94,7 +94,6 @@ export enum VfModule { Proposal, Recipe, Scenario, - Util, } // default 'full suite' VF module set supported by Holo-REA From d02fb9eddf973d59ee4ee4ce287bae1a3c670c78 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 8 Apr 2022 15:07:05 +1000 Subject: [PATCH 096/181] alphabetise default modules for clarity --- modules/vf-graphql-holochain/types.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index 71f6c1795..fa5f3174c 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -86,6 +86,7 @@ export enum VfModule { Appreciation, Claim, Geolocation, + History, Knowledge, Measurement, Observation, @@ -99,13 +100,13 @@ export enum VfModule { // default 'full suite' VF module set supported by Holo-REA export const DEFAULT_VF_MODULES = [ + VfModule.Agent, + VfModule.Agreement, VfModule.Knowledge, VfModule.Measurement, - VfModule.Agent, VfModule.Observation, VfModule.Planning, VfModule.Proposal, - VfModule.Agreement ] // scalar types From 6c4719c59bc729411c7a6ab695c4360bbe110ef7 Mon Sep 17 00:00:00 2001 From: pospi Date: Fri, 8 Apr 2022 15:19:34 +1000 Subject: [PATCH 097/181] define VFModule enums as strings to avoid breakage when passed to vf-graphql-holochain (which is a pure-js module) --- modules/vf-graphql-holochain/types.ts | 28 +++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/modules/vf-graphql-holochain/types.ts b/modules/vf-graphql-holochain/types.ts index fa5f3174c..7a40e0fe8 100644 --- a/modules/vf-graphql-holochain/types.ts +++ b/modules/vf-graphql-holochain/types.ts @@ -81,20 +81,20 @@ export function injectTypename (name: string, fn: Resolver): Resolver { // the ones that haven't been implemented within holo-rea yet // -> https://lab.allmende.io/valueflows/vf-schemas/vf-graphql/-/tree/sprout/lib/schemas export enum VfModule { - Agent, - Agreement, - Appreciation, - Claim, - Geolocation, - History, - Knowledge, - Measurement, - Observation, - Plan, - Planning, - Proposal, - Recipe, - Scenario, + Agent = 'agent', + Agreement = 'agreement', + Appreciation = 'appreciation', + Claim = 'claim', + Geolocation = 'geolocation', + History = 'history', + Knowledge = 'knowledge', + Measurement = 'measurement', + Observation = 'observation', + Plan = 'plan', + Planning = 'planning', + Proposal = 'proposal', + Recipe = 'recipe', + Scenario = 'scenario', } // default 'full suite' VF module set supported by Holo-REA From e034d68292756030da83b6416187b7988d7d5cef Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 8 Apr 2022 10:57:34 -0700 Subject: [PATCH 098/181] make sed command work cross platform mac and linux --- scripts/package-dnas.sh | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/scripts/package-dnas.sh b/scripts/package-dnas.sh index 946cfd566..db7768533 100755 --- a/scripts/package-dnas.sh +++ b/scripts/package-dnas.sh @@ -29,16 +29,20 @@ cp -a bundles/dna_templates bundles/dna rm -Rf bundles/app cp -a bundles/app_templates bundles/app +# sed -i.bak works on both mac and linux +# https://stackoverflow.com/a/22084103/2132755 + # compile DNAs by concatenating WASMs with properties for DIR in bundles/dna/*; do if [[ -d "$DIR" ]]; then # @see https://github.com/holochain/holochain/issues/966 # toggle `path`/`bundled` depending on build mode if [[ $BUNDLE_ZOMES -eq "1" ]]; then - sed -i "s/path:/bundled:/g" "$DIR/dna.yaml" + sed -i.bak "s/path:/bundled:/g" "$DIR/dna.yaml" fi # substitute absolute paths for compatibility with `path` or `bundled` - sed -i "s//${ROOT_PATH}/g" "$DIR/dna.yaml" + sed -i.bak "s//${ROOT_PATH}/g" "$DIR/dna.yaml" + rm "$DIR/dna.yaml.bak" echo -e "\e[1mCompiling DNA in $DIR\e[0m" if "$UTIL" dna pack "$DIR" 2>/dev/null; then @@ -56,11 +60,12 @@ for DIR in bundles/app/*; do # toggle `url`/`bundled` and inject paths depending on defn of release download URL if [[ -n "$RELEASE_DOWNLOAD_URL" ]]; then RELEASE_DOWNLOAD_URL=$(printf '%s\n' "$RELEASE_DOWNLOAD_URL" | sed -e 's/[\/&]/\\&/g') # make safe for sed - sed -i "s/\\/\\w*/${RELEASE_DOWNLOAD_URL}/g" "$DIR/happ.yaml" - sed -i "s/bundled:/url:/g" "$DIR/happ.yaml" + sed -i.bak "s/\\/\\w*/${RELEASE_DOWNLOAD_URL}/g" "$DIR/happ.yaml" + sed -i.bak "s/bundled:/url:/g" "$DIR/happ.yaml" else - sed -i "s//${ROOT_PATH}\/bundles\/dna/g" "$DIR/happ.yaml" + sed -i.bak "s//${ROOT_PATH}\/bundles\/dna/g" "$DIR/happ.yaml" fi + rm "$DIR/happ.yaml.bak" echo -e "\e[1mBundling hApp in $DIR\e[0m" if "$UTIL" app pack "$DIR" 2>/dev/null; then From a38175ad7fa11809072735132d27a549feeb3050 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 11 Apr 2022 19:20:12 -0700 Subject: [PATCH 099/181] fix basic error blocking test running --- test/agreement/test_agreement_links.js | 5 ++--- test/init.js | 1 - 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/test/agreement/test_agreement_links.js b/test/agreement/test_agreement_links.js index 8d7cf04ea..c4cb95667 100644 --- a/test/agreement/test_agreement_links.js +++ b/test/agreement/test_agreement_links.js @@ -3,7 +3,6 @@ const { buildConfig, buildRunner, buildPlayer, - bridge, } = require('../init') const runner = buildRunner() @@ -13,8 +12,8 @@ const config = buildConfig({ planning: getDNA('planning'), agreement: getDNA('agreement'), }, [ - bridge('vf_agreement', 'planning', 'agreement'), - bridge('vf_agreement', 'observation', 'agreement'), + // bridge('vf_agreement', 'planning', 'agreement'), + // bridge('vf_agreement', 'observation', 'agreement'), ]) const testEventProps = { diff --git a/test/init.js b/test/init.js index 75f2241fa..e3af54124 100644 --- a/test/init.js +++ b/test/init.js @@ -167,7 +167,6 @@ module.exports = { buildPlayer, buildGraphQL, buildRunner, - bridge: Config.bridge, buildConfig: Config.gen, seralizeId, From be89de7311c3c9ca7ec4c3280822d8c23a8d70ad Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 12:28:52 -0700 Subject: [PATCH 100/181] make the integration tests more usable, and more informative --- package.json | 2 +- pnpm-lock.yaml | 75 +++++++++++++++++++++++++++++ scripts/integration-tests.sh | 92 ++++++++++++++++++++++++++++++++++++ test/package.json | 6 +-- 4 files changed, 171 insertions(+), 4 deletions(-) create mode 100755 scripts/integration-tests.sh diff --git a/package.json b/package.json index e3538900b..b1e14a249 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "watch": "npm-watch", "test": "npm-run-all test:unit test:integration", "test:unit": "CARGO_TARGET_DIR=target cargo test --target wasm32-unknown-unknown", - "test:integration": "cd test && npm test", + "test:integration": "scripts/integration-tests.sh", "clean": "npm-run-all --parallel clean:modules clean:build", "clean:modules": "scripts/clean-modules.sh", "clean:build": "scripts/clean-build.sh" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ba5406fc0..63bb0e942 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -128,6 +128,7 @@ importers: json3: ^3.3.2 randombytes: ^2.1.0 source-map-support: ^0.5.16 + tap-dot: 2.0.0 tape: ^4.9.2 dependencies: js-base64: 3.7.2 @@ -150,6 +151,7 @@ importers: is-function: 1.0.2 json3: 3.3.3 source-map-support: 0.5.21 + tap-dot: 2.0.0 tape: 4.15.0 packages: @@ -3216,6 +3218,11 @@ packages: hasBin: true dev: true + /ansi-regex/2.1.1: + resolution: {integrity: sha1-w7M6te42DYbg5ijwRorn7yfWVN8=} + engines: {node: '>=0.10.0'} + dev: true + /ansi-regex/3.0.0: resolution: {integrity: sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=} engines: {node: '>=4'} @@ -3236,6 +3243,11 @@ packages: engines: {node: '>=12'} dev: true + /ansi-styles/2.2.1: + resolution: {integrity: sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=} + engines: {node: '>=0.10.0'} + dev: true + /ansi-styles/3.2.1: resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} engines: {node: '>=4'} @@ -4000,6 +4012,17 @@ packages: engines: {node: '>=4'} dev: true + /chalk/1.1.3: + resolution: {integrity: sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=} + engines: {node: '>=0.10.0'} + dependencies: + ansi-styles: 2.2.1 + escape-string-regexp: 1.0.5 + has-ansi: 2.0.0 + strip-ansi: 3.0.1 + supports-color: 2.0.0 + dev: true + /chalk/2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} @@ -6825,6 +6848,13 @@ packages: resolution: {integrity: sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g==} dev: true + /has-ansi/2.0.0: + resolution: {integrity: sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=} + engines: {node: '>=0.10.0'} + dependencies: + ansi-regex: 2.1.1 + dev: true + /has-bigints/1.0.1: resolution: {integrity: sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==} dev: true @@ -10455,6 +10485,10 @@ packages: strip-json-comments: 2.0.1 dev: true + /re-emitter/1.1.4: + resolution: {integrity: sha512-C0SIXdXDSus2yqqvV7qifnb4NoWP7mEBXJq3axci301mXHCZb8Djwm4hrEZo4UeXRaEnfjH98uQ8EBppk2oNWA==} + dev: true + /react-app-polyfill/3.0.0: resolution: {integrity: sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w==} engines: {node: '>=14'} @@ -11438,6 +11472,12 @@ packages: extend-shallow: 3.0.2 dev: true + /split/1.0.1: + resolution: {integrity: sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==} + dependencies: + through: 2.3.8 + dev: true + /sprintf-js/1.0.3: resolution: {integrity: sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=} dev: true @@ -11600,6 +11640,13 @@ packages: is-regexp: 1.0.0 dev: true + /strip-ansi/3.0.1: + resolution: {integrity: sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=} + engines: {node: '>=0.10.0'} + dependencies: + ansi-regex: 2.1.1 + dev: true + /strip-ansi/4.0.0: resolution: {integrity: sha1-qEeQIusaw2iocTibY1JixQXuNo8=} engines: {node: '>=4'} @@ -11703,6 +11750,11 @@ packages: - utf-8-validate dev: false + /supports-color/2.0.0: + resolution: {integrity: sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=} + engines: {node: '>=0.8.0'} + dev: true + /supports-color/5.5.0: resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} engines: {node: '>=4'} @@ -11848,6 +11900,25 @@ packages: - ts-node dev: true + /tap-dot/2.0.0: + resolution: {integrity: sha512-7N1yPcRDgdfHCUbG6lZ0hXo53NyXhKIjJNhqKBixl9HVEG4QasG16Nlvr8wRnqr2ZRYVWmbmxwF3NOBbTLtQLQ==} + hasBin: true + dependencies: + chalk: 1.1.3 + tap-out: 1.4.2 + through2: 2.0.5 + dev: true + + /tap-out/1.4.2: + resolution: {integrity: sha1-yQfsG/lAURHQiCY+kvVgi4jLs3o=} + hasBin: true + dependencies: + re-emitter: 1.1.4 + readable-stream: 2.3.7 + split: 1.0.1 + trim: 0.0.1 + dev: true + /tap-parser/0.4.3: resolution: {integrity: sha1-pOrhkMENdsehEZIf84u+TVjwnuo=} dependencies: @@ -12181,6 +12252,10 @@ packages: punycode: 2.1.1 dev: true + /trim/0.0.1: + resolution: {integrity: sha1-WFhUf2spB1fulczMZm+1AITEYN0=} + dev: true + /triple-beam/1.3.0: resolution: {integrity: sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==} dev: true diff --git a/scripts/integration-tests.sh b/scripts/integration-tests.sh new file mode 100755 index 000000000..ef3ed499f --- /dev/null +++ b/scripts/integration-tests.sh @@ -0,0 +1,92 @@ +#!/usr/bin/env bash +# +# Run nodejs integration tests +# +# @package: hREA +# @since: 2022-04-12 +# +## + +cd test + +WASM_LOG=debug +RUST_LOG=error +RUST_BACKTRACE=1 +GRAPHQL_DEBUG=1 + +# 3 tests +# 0 passed +# 3 failed +# echo 'running agent tests' +# npx tape agent/*.js | npx tap-dot + +# 4 tests +# 47 passed +# 1 failed +# ..........................................x..... +# echo 'running economic-resource tests' +# npx tape economic-resource/*.js | npx tap-dot + +# 3 tests +# 0 passed +# 3 failed +# echo 'running proposal tests' +# npx tape proposal/*.js | npx tap-dot + +# 2 tests +# 0 passed +# 2 failed +# echo 'running agreement tests' +# npx tape agreement/*.js | npx tap-dot + +# 1 tests +# 0 passed +# 1 failed +# echo 'running flows tests' +# npx tape flows/*.js | npx tap-dot + +# 1 tests +# 0 passed +# 1 failed +# echo 'running satisfaction tests' +# npx tape satisfaction/*.js | npx tap-dot + +# 11 tests +# 30 passed +# 6 failed +# .....................x.....xx....xxx +# echo 'running core-architecture tests' +# npx tape core-architecture/*.js | npx tap-dot + +# 1 tests +# 2 passed +# 1 failed +# ..x +# echo 'running fulfillment tests' +# npx tape fulfillment/*.js | npx tap-dot + +# 1 tests +# 0 passed +# 1 failed +# echo 'running social-architectures tests' +# npx tape social-architectures/*.js | npx tap-dot + +# 2 tests +# 13 passed +# echo 'running economic-event tests' +# npx tape economic-event/*.js | npx tap-dot + +# 2 tests +# 26 passed +# 24 failed +# .........xxxxxxxx.............xxxxxxxx....xxxxxxxx +# echo 'running process tests' +# npx tape process/*.js | npx tap-dot + + +# 5 tests +# 9 passed +# 4 failed +# ..x..x..x...x +# echo 'running specification tests' +# npx tape specification/*.js | npx tap-dot \ No newline at end of file diff --git a/test/package.json b/test/package.json index ba10672b7..34ab2163e 100644 --- a/test/package.json +++ b/test/package.json @@ -5,8 +5,7 @@ "description": "Integration tests for HoloREA DHTs", "main": "index.js", "scripts": { - "playground": "holochain-playground", - "test": "WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape test_*.js **/*.js | faucet" + "playground": "holochain-playground" }, "devDependencies": { "@holochain/tryorama": "0.4.10", @@ -26,7 +25,8 @@ "is-function": "^1.0.1", "json3": "^3.3.2", "source-map-support": "^0.5.16", - "tape": "^4.9.2" + "tape": "^4.9.2", + "tap-dot": "2.0.0" }, "repository": { "type": "git", From fae09c87383962be1dd34c26a2442e0868a34157 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 12:41:46 -0700 Subject: [PATCH 101/181] run them all --- scripts/integration-tests.sh | 48 ++++++++++++++++++------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/scripts/integration-tests.sh b/scripts/integration-tests.sh index ef3ed499f..62c909d40 100755 --- a/scripts/integration-tests.sh +++ b/scripts/integration-tests.sh @@ -17,76 +17,76 @@ GRAPHQL_DEBUG=1 # 3 tests # 0 passed # 3 failed -# echo 'running agent tests' -# npx tape agent/*.js | npx tap-dot +echo 'running agent tests' +npx tape agent/*.js | npx tap-dot # 4 tests # 47 passed # 1 failed # ..........................................x..... -# echo 'running economic-resource tests' -# npx tape economic-resource/*.js | npx tap-dot +echo 'running economic-resource tests' +npx tape economic-resource/*.js | npx tap-dot # 3 tests # 0 passed # 3 failed -# echo 'running proposal tests' -# npx tape proposal/*.js | npx tap-dot +echo 'running proposal tests' +npx tape proposal/*.js | npx tap-dot # 2 tests # 0 passed # 2 failed -# echo 'running agreement tests' -# npx tape agreement/*.js | npx tap-dot +echo 'running agreement tests' +npx tape agreement/*.js | npx tap-dot # 1 tests # 0 passed # 1 failed -# echo 'running flows tests' -# npx tape flows/*.js | npx tap-dot +echo 'running flows tests' +npx tape flows/*.js | npx tap-dot # 1 tests # 0 passed # 1 failed -# echo 'running satisfaction tests' -# npx tape satisfaction/*.js | npx tap-dot +echo 'running satisfaction tests' +npx tape satisfaction/*.js | npx tap-dot # 11 tests # 30 passed # 6 failed # .....................x.....xx....xxx -# echo 'running core-architecture tests' -# npx tape core-architecture/*.js | npx tap-dot +echo 'running core-architecture tests' +npx tape core-architecture/*.js | npx tap-dot # 1 tests # 2 passed # 1 failed # ..x -# echo 'running fulfillment tests' -# npx tape fulfillment/*.js | npx tap-dot +echo 'running fulfillment tests' +npx tape fulfillment/*.js | npx tap-dot # 1 tests # 0 passed # 1 failed -# echo 'running social-architectures tests' -# npx tape social-architectures/*.js | npx tap-dot +echo 'running social-architectures tests' +npx tape social-architectures/*.js | npx tap-dot # 2 tests # 13 passed -# echo 'running economic-event tests' -# npx tape economic-event/*.js | npx tap-dot +echo 'running economic-event tests' +npx tape economic-event/*.js | npx tap-dot # 2 tests # 26 passed # 24 failed # .........xxxxxxxx.............xxxxxxxx....xxxxxxxx -# echo 'running process tests' -# npx tape process/*.js | npx tap-dot +echo 'running process tests' +npx tape process/*.js | npx tap-dot # 5 tests # 9 passed # 4 failed # ..x..x..x...x -# echo 'running specification tests' -# npx tape specification/*.js | npx tap-dot \ No newline at end of file +echo 'running specification tests' +npx tape specification/*.js | npx tap-dot \ No newline at end of file From 3b74c7562a4c764ed23f7aeb285df311d3d21058 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 15:00:35 -0700 Subject: [PATCH 102/181] fix economic-resource broken test --- lib/hdk_records/src/record_helpers.rs | 12 +++++++++--- test/economic-resource/resource_logic.js | 11 +++++++++++ 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index 4585e887d..b340a7a6b 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -45,7 +45,7 @@ fn get_header_hash(shh: element::SignedHeaderHashed) -> HeaderHash { /// than a multi-branching tree. This should be updated during other 'conflict resolution' related /// changes outlined in issue https://github.com/holo-rea/holo-rea/issues/196 pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult { - match get_details(entry_hash, GetOptions { strategy: GetStrategy::Latest })? { + match get_details(entry_hash.clone(), GetOptions { strategy: GetStrategy::Latest })? { Some(Details::Entry(details)) => match details.entry_dht_status { metadata::EntryDhtStatus::Live => match details.updates.len() { 0 => { @@ -57,8 +57,14 @@ pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult Err(DataIntegrityError::EntryNotFound), diff --git a/test/economic-resource/resource_logic.js b/test/economic-resource/resource_logic.js index 36d6bdb8f..814d92484 100644 --- a/test/economic-resource/resource_logic.js +++ b/test/economic-resource/resource_logic.js @@ -20,6 +20,17 @@ const testEventProps = { runner.registerScenario('EconomicResource & EconomicEvent record interactions', async (s, t) => { const { cells: [observation, specification] } = await buildPlayer(s, config, ['observation', 'specification']) + // HACK: this prevents a flaky issue + // where it says 'process_specification' zome doesn't exist. + // investigate. + await s.consistency() + await s.consistency() + await s.consistency() + await s.consistency() + await s.consistency() + await s.consistency() + await s.consistency() + // SCENARIO: write initial records const resourceUnitId = mockIdentifier(false) From 8aa352f4d30fb9607ace9311acfa7e3eeb2bc652 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 15:21:40 -0700 Subject: [PATCH 103/181] update test report --- scripts/integration-tests.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/integration-tests.sh b/scripts/integration-tests.sh index 62c909d40..86a1547e9 100755 --- a/scripts/integration-tests.sh +++ b/scripts/integration-tests.sh @@ -21,9 +21,7 @@ echo 'running agent tests' npx tape agent/*.js | npx tap-dot # 4 tests -# 47 passed -# 1 failed -# ..........................................x..... +# 77 passed echo 'running economic-resource tests' npx tape economic-resource/*.js | npx tap-dot From 12afd7b215404ce6027257c9f81dbc945aca3779 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 17:19:17 -0700 Subject: [PATCH 104/181] hot fix the recursion bug in get_latest_header_hash --- lib/hdk_records/src/record_helpers.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/hdk_records/src/record_helpers.rs b/lib/hdk_records/src/record_helpers.rs index 4585e887d..b340a7a6b 100644 --- a/lib/hdk_records/src/record_helpers.rs +++ b/lib/hdk_records/src/record_helpers.rs @@ -45,7 +45,7 @@ fn get_header_hash(shh: element::SignedHeaderHashed) -> HeaderHash { /// than a multi-branching tree. This should be updated during other 'conflict resolution' related /// changes outlined in issue https://github.com/holo-rea/holo-rea/issues/196 pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult { - match get_details(entry_hash, GetOptions { strategy: GetStrategy::Latest })? { + match get_details(entry_hash.clone(), GetOptions { strategy: GetStrategy::Latest })? { Some(Details::Entry(details)) => match details.entry_dht_status { metadata::EntryDhtStatus::Live => match details.updates.len() { 0 => { @@ -57,8 +57,14 @@ pub fn get_latest_header_hash(entry_hash: EntryHash) -> RecordAPIResult Err(DataIntegrityError::EntryNotFound), From f86b5f92c0d70f6bd5da190b5798cd7a07f6342d Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 13 Apr 2022 10:58:35 +1000 Subject: [PATCH 105/181] update agent tests init logic for new tryorama --- test/agent/test_agent_core.js | 18 ++++++++---------- test/agent/test_agent_links.js | 19 ++++++------------- test/agent/test_agent_registration.js | 11 +++++------ 3 files changed, 19 insertions(+), 29 deletions(-) diff --git a/test/agent/test_agent_core.js b/test/agent/test_agent_core.js index a33741ef2..b2dfa59ba 100644 --- a/test/agent/test_agent_core.js +++ b/test/agent/test_agent_core.js @@ -7,16 +7,14 @@ const { const runner = buildRunner() -const config = buildConfig({ - agent: getDNA('agent'), -}, { -}) +const config = buildConfig() runner.registerScenario('REA economic agent functionality', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) - const aliceAddr = alice.instance('agent').agentAddress + const alice = await buildPlayer(s, config, ['agent']) + const graphQL = alice.graphQL + const aliceAddr = alice.instance('agent').agentAddress // :TODO: update for latest tryorama - let res = await alice.graphQL(`{ + let res = await graphQL(`{ myAgent { id name @@ -28,7 +26,7 @@ runner.registerScenario('REA economic agent functionality', async (s, t) => { t.ok(res.data.myAgent.id, 'agent A can retrieve own agent ID') t.ok(res.data.myAgent.name, 'agent A can retrieve own agent name') - res = await alice.graphQL(`{ + res = await graphQL(`{ agents { id name @@ -51,7 +49,7 @@ runner.registerScenario('REA economic agent functionality', async (s, t) => { await s.consistency() // wait for Bob's join to propagate to Alice - res = await alice.graphQL(`{ + res = await graphQL(`{ agents { id name @@ -61,7 +59,7 @@ runner.registerScenario('REA economic agent functionality', async (s, t) => { t.equal(res.data.agents[1].id, aliceAddr, 'own agent ID returned in list') t.equal(res.data.agents[0].id, bobAddr, 'new agent ID returned in list') - res = await alice.graphQL(`{ + res = await graphQL(`{ agent(id: "${bobAddr}") { id name diff --git a/test/agent/test_agent_links.js b/test/agent/test_agent_links.js index 855de8e6d..a2f73d7f9 100644 --- a/test/agent/test_agent_links.js +++ b/test/agent/test_agent_links.js @@ -7,15 +7,8 @@ const { const runner = buildRunner() -const config = buildConfig({ - agent: getDNA('agent'), - observation: getDNA('observation'), - planning: getDNA('planning'), - proposal: getDNA('proposal'), -}, { - vf_observation: ['planning', 'observation'], - vf_planning: ['proposal', 'planning'], -}) +const config = buildConfig() +const config2 = buildConfig() // required attributes, not involved with test logic const testEventProps = { @@ -24,10 +17,10 @@ const testEventProps = { } runner.registerScenario('Agent relationship traversal', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) - const aliceAddr = alice.instance('agent').agentAddress - const bob = await buildPlayer(s, 'bob', config) - const bobAddr = bob.instance('agent').agentAddress + const alice = await buildPlayer(s, config, ['agent', 'observation', 'planning', 'proposal']) + const aliceAddr = alice.instance('agent').agentAddress // :TODO: update for latest tryorama + const bob = await buildPlayer(s, config2, ['agent', 'observation', 'planning', 'proposal']) + const bobAddr = bob.instance('agent').agentAddress // :TODO: update for latest tryorama // event which shares provider & receiver diff --git a/test/agent/test_agent_registration.js b/test/agent/test_agent_registration.js index 813585e15..a0ef2c193 100644 --- a/test/agent/test_agent_registration.js +++ b/test/agent/test_agent_registration.js @@ -7,13 +7,12 @@ const { const runner = buildRunner() -const config = buildConfig({ - agents: getDNA('agent'), -}, {}) +const config = buildConfig() +const config2 = buildConfig() runner.registerScenario('Agent registration API (happ-agent-registration module)', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) - const aliceAddr = alice.instance('agents').agentAddress + const { cells: [alice] } = await buildPlayer(s, config, ['agent']) + const aliceAddr = alice.instance('agents').agentAddress // :TODO: update for latest tryorama await s.consistency() @@ -31,7 +30,7 @@ runner.registerScenario('Agent registration API (happ-agent-registration module) t.equal(resp.Ok, false, 'can check other registration statuses') // Load Bob - const bob = await buildPlayer(s, 'bob', config) + const { cells: [bob] } = await buildPlayer(s, config2, ['agent']) const bobAddr = bob.instance('agents').agentAddress // Bob hits the DNA for the first time From 06945fa1ff271d6ffc8b6f9d3d78a1ac4c6ad7a1 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 19:06:25 -0700 Subject: [PATCH 106/181] Update resource_logic.js --- test/economic-resource/resource_logic.js | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/test/economic-resource/resource_logic.js b/test/economic-resource/resource_logic.js index 814d92484..36d6bdb8f 100644 --- a/test/economic-resource/resource_logic.js +++ b/test/economic-resource/resource_logic.js @@ -20,17 +20,6 @@ const testEventProps = { runner.registerScenario('EconomicResource & EconomicEvent record interactions', async (s, t) => { const { cells: [observation, specification] } = await buildPlayer(s, config, ['observation', 'specification']) - // HACK: this prevents a flaky issue - // where it says 'process_specification' zome doesn't exist. - // investigate. - await s.consistency() - await s.consistency() - await s.consistency() - await s.consistency() - await s.consistency() - await s.consistency() - await s.consistency() - // SCENARIO: write initial records const resourceUnitId = mockIdentifier(false) From d0d2336d14fc55ed84b16b88e32c75f85c056877 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 19:06:51 -0700 Subject: [PATCH 107/181] Update package.json --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b1e14a249..e3538900b 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "watch": "npm-watch", "test": "npm-run-all test:unit test:integration", "test:unit": "CARGO_TARGET_DIR=target cargo test --target wasm32-unknown-unknown", - "test:integration": "scripts/integration-tests.sh", + "test:integration": "cd test && npm test", "clean": "npm-run-all --parallel clean:modules clean:build", "clean:modules": "scripts/clean-modules.sh", "clean:build": "scripts/clean-build.sh" From 31a4d390bb6c6e4156821fc513eefa496a92e6a6 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 19:07:14 -0700 Subject: [PATCH 108/181] Delete integration-tests.sh --- scripts/integration-tests.sh | 90 ------------------------------------ 1 file changed, 90 deletions(-) delete mode 100755 scripts/integration-tests.sh diff --git a/scripts/integration-tests.sh b/scripts/integration-tests.sh deleted file mode 100755 index 86a1547e9..000000000 --- a/scripts/integration-tests.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env bash -# -# Run nodejs integration tests -# -# @package: hREA -# @since: 2022-04-12 -# -## - -cd test - -WASM_LOG=debug -RUST_LOG=error -RUST_BACKTRACE=1 -GRAPHQL_DEBUG=1 - -# 3 tests -# 0 passed -# 3 failed -echo 'running agent tests' -npx tape agent/*.js | npx tap-dot - -# 4 tests -# 77 passed -echo 'running economic-resource tests' -npx tape economic-resource/*.js | npx tap-dot - -# 3 tests -# 0 passed -# 3 failed -echo 'running proposal tests' -npx tape proposal/*.js | npx tap-dot - -# 2 tests -# 0 passed -# 2 failed -echo 'running agreement tests' -npx tape agreement/*.js | npx tap-dot - -# 1 tests -# 0 passed -# 1 failed -echo 'running flows tests' -npx tape flows/*.js | npx tap-dot - -# 1 tests -# 0 passed -# 1 failed -echo 'running satisfaction tests' -npx tape satisfaction/*.js | npx tap-dot - -# 11 tests -# 30 passed -# 6 failed -# .....................x.....xx....xxx -echo 'running core-architecture tests' -npx tape core-architecture/*.js | npx tap-dot - -# 1 tests -# 2 passed -# 1 failed -# ..x -echo 'running fulfillment tests' -npx tape fulfillment/*.js | npx tap-dot - -# 1 tests -# 0 passed -# 1 failed -echo 'running social-architectures tests' -npx tape social-architectures/*.js | npx tap-dot - -# 2 tests -# 13 passed -echo 'running economic-event tests' -npx tape economic-event/*.js | npx tap-dot - -# 2 tests -# 26 passed -# 24 failed -# .........xxxxxxxx.............xxxxxxxx....xxxxxxxx -echo 'running process tests' -npx tape process/*.js | npx tap-dot - - -# 5 tests -# 9 passed -# 4 failed -# ..x..x..x...x -echo 'running specification tests' -npx tape specification/*.js | npx tap-dot \ No newline at end of file From 57db0c6df37cf4c1ea2afc7ddbe1ea78780b06c9 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 12 Apr 2022 19:07:54 -0700 Subject: [PATCH 109/181] Update package.json --- test/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/package.json b/test/package.json index 34ab2163e..32b9b7f39 100644 --- a/test/package.json +++ b/test/package.json @@ -5,7 +5,8 @@ "description": "Integration tests for HoloREA DHTs", "main": "index.js", "scripts": { - "playground": "holochain-playground" + "playground": "holochain-playground", + "test": "WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape test_*.js **/*.js | tap-dot" }, "devDependencies": { "@holochain/tryorama": "0.4.10", From 3e1a68043ff20c7482aedad5639da2f30bbcaad5 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 13 Apr 2022 13:15:09 +1000 Subject: [PATCH 110/181] bump package versions --- modules/vf-graphql-holochain/package.json | 2 +- test/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/vf-graphql-holochain/package.json b/modules/vf-graphql-holochain/package.json index 318bb98a1..605a577e7 100644 --- a/modules/vf-graphql-holochain/package.json +++ b/modules/vf-graphql-holochain/package.json @@ -41,7 +41,7 @@ "@graphql-tools/schema": "^8.3.1", "@graphql-tools/utils": "^8.6.1", "@holochain/client": "0.3.2", - "@valueflows/vf-graphql": "0.9.0-alpha.2", + "@valueflows/vf-graphql": "0.9.0-alpha.3", "dataloader": "^1.4.0", "deep-for-each": "^3.0.0", "fecha": "^4.1.0", diff --git a/test/package.json b/test/package.json index ba10672b7..f2eb931ae 100644 --- a/test/package.json +++ b/test/package.json @@ -11,7 +11,7 @@ "devDependencies": { "@holochain/tryorama": "0.4.10", "@holochain-playground/cli": "0.0.8", - "@valueflows/vf-graphql": "0.9.0-alpha.2", + "@valueflows/vf-graphql": "0.9.0-alpha.3", "@valueflows/vf-graphql-holochain": "workspace:*", "deep-for-each": "^3.0.0", "easygraphql-tester": "6.0.1", From 092947fad600c744e44c5a31ef7aca7cb82a4277 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 13 Apr 2022 13:15:59 +1000 Subject: [PATCH 111/181] update lockfile --- pnpm-lock.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ba5406fc0..371849cfb 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -75,7 +75,7 @@ importers: '@graphql-tools/schema': ^8.3.1 '@graphql-tools/utils': ^8.6.1 '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2 + '@valueflows/vf-graphql': 0.9.0-alpha.3 buffer: ^6.0.3 dataloader: ^1.4.0 deep-for-each: ^3.0.0 @@ -92,7 +92,7 @@ importers: '@graphql-tools/schema': 8.3.1_graphql@16.3.0 '@graphql-tools/utils': 8.6.1_graphql@16.3.0 '@holochain/client': 0.3.2 - '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@16.3.0 + '@valueflows/vf-graphql': 0.9.0-alpha.3_graphql@16.3.0 buffer: 6.0.3 dataloader: 1.4.0 deep-for-each: 3.0.0 @@ -111,7 +111,7 @@ importers: specifiers: '@holochain-playground/cli': 0.0.8 '@holochain/tryorama': 0.4.10 - '@valueflows/vf-graphql': 0.9.0-alpha.2 + '@valueflows/vf-graphql': 0.9.0-alpha.3 '@valueflows/vf-graphql-holochain': workspace:* deep-for-each: ^3.0.0 easygraphql-tester: 6.0.1 @@ -135,7 +135,7 @@ importers: devDependencies: '@holochain-playground/cli': 0.0.8 '@holochain/tryorama': 0.4.10 - '@valueflows/vf-graphql': 0.9.0-alpha.2_graphql@15.8.0 + '@valueflows/vf-graphql': 0.9.0-alpha.3_graphql@15.8.0 '@valueflows/vf-graphql-holochain': link:../modules/vf-graphql-holochain deep-for-each: 3.0.0 easygraphql-tester: 6.0.1_graphql@15.8.0 @@ -2872,8 +2872,8 @@ packages: eslint-visitor-keys: 3.2.0 dev: true - /@valueflows/vf-graphql/0.9.0-alpha.2_graphql@15.8.0: - resolution: {integrity: sha512-vwO+6srw8/uiQ/VjSNOJVWoDJnlUpLyrq/EWw7Q6OjR2mKmIsHdE8RDeOvJKs1fGuDiTfxM4Nw07jb2pjymbvg==} + /@valueflows/vf-graphql/0.9.0-alpha.3_graphql@15.8.0: + resolution: {integrity: sha512-yIfLba+KNA6A9e3jJcKACKRv05cmHB/76v5yykSn3gNUIpx9NhYA8ggDVev+PZL5B2smer4zUxwWHKc3LN16wQ==} peerDependencies: graphql: '>=14' dependencies: @@ -2881,8 +2881,8 @@ packages: graphql: 15.8.0 dev: true - /@valueflows/vf-graphql/0.9.0-alpha.2_graphql@16.3.0: - resolution: {integrity: sha512-vwO+6srw8/uiQ/VjSNOJVWoDJnlUpLyrq/EWw7Q6OjR2mKmIsHdE8RDeOvJKs1fGuDiTfxM4Nw07jb2pjymbvg==} + /@valueflows/vf-graphql/0.9.0-alpha.3_graphql@16.3.0: + resolution: {integrity: sha512-yIfLba+KNA6A9e3jJcKACKRv05cmHB/76v5yykSn3gNUIpx9NhYA8ggDVev+PZL5B2smer4zUxwWHKc3LN16wQ==} peerDependencies: graphql: '>=14' dependencies: From eda3a10cd052c93895082dd8e6b8b0fe08f93374 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 21 Apr 2022 16:33:52 -0700 Subject: [PATCH 112/181] fix test running so that we don't hit non-deterministic cell order --- test/init.js | 63 +++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 18 deletions(-) diff --git a/test/init.js b/test/init.js index e3af54124..8c84236b8 100644 --- a/test/init.js +++ b/test/init.js @@ -13,7 +13,7 @@ const { randomBytes } = require('crypto') const { Base64 } = require('js-base64') const readline = require('readline') -const { Orchestrator, Config, combine, tapeExecutor, localOnly } = require('@holochain/tryorama') +const { Cell, Orchestrator, Config, combine, tapeExecutor, localOnly } = require('@holochain/tryorama') const { GraphQLError } = require('graphql') const GQLTester = require('easygraphql-tester') @@ -52,12 +52,7 @@ const buildRunner = () => new Orchestrator({ /** * Create per-agent interfaces to the DNA */ -const buildGraphQL = async (player, apiOptions, appCellIds) => { - const appCells = await player.adminWs().listCellIds() - const appCellMapping = appCells.reduce((r, cell, idx) => { - r[appCellIds[idx]] = cell - return r - }, {}) +const buildGraphQL = async (player, apiOptions, appCellMapping) => { const tester = new GQLTester(schema, resolverLoggerMiddleware()(await generateResolvers({ ...apiOptions, @@ -88,23 +83,55 @@ const buildGraphQL = async (player, apiOptions, appCellIds) => { */ const buildPlayer = async (scenario, config, agentDNAs, graphQLAPIOptions) => { const [player] = await scenario.players([config]) - const [[firstHapp]] = await player.installAgentsHapps([[agentDNAs.map(getDNA)]]) - - // :SHONK: workaround nondeterministic return order for app cells, luckily nicknames are prefixed with numeric ID - // but :WARNING: this may also break if >10 DNAs running in the same player! - firstHapp.cells.sort((a, b) => { - if (a.cellNick === b.cellNick) return 0 - return a.cellNick > b.cellNick ? 1 : -1 + const agentPubKey = await player.adminWs().generateAgentPubKey() + const dnaSources = agentDNAs.map(getDNA) + const dnas = await Promise.all(dnaSources.map(async (dnaSource, index) => { + const dnaHash = await player.registerDna({ path: dnaSource }) + return { + hash: dnaHash, + role_id: agentDNAs[index] + } + })) + const installAppReq = { + installed_app_id: 'installed-app-id', + agent_key: agentPubKey, + dnas: dnas + } + await player._conductor.adminClient.installApp(installAppReq); + // must be enabled to be callable + const enabledAppResponse = await player._conductor.adminClient.enableApp({ + installed_app_id: installAppReq.installed_app_id + }); + if (enabledAppResponse.errors.length > 0) { + throw new Error(`Error - Failed to enable app: ${enabledAppResponse.errors}`); + } + const installedAppResponse = enabledAppResponse.app + // construct Cell instances which are the most useful class to the client + const rawCells = Object.entries(installedAppResponse.cell_data) + const cellsKeyedByRole = {} + const cellIdsKeyedByRole = {} + rawCells.forEach(([_, { cell_id, role_id }]) => { + cellsKeyedByRole[role_id] = new Cell({ + cellId: cell_id, + cellRole: role_id, + player: player + }) + cellIdsKeyedByRole[role_id] = cell_id + }) + // important: we should be returning Cells that + // occur in the same order as they were passed in via agentDNAs + // because the caller of this function assumes they can destructure the + // cells property of the response and call the right DNA/Cell + const cells = agentDNAs.map((dnaName) => { + return cellsKeyedByRole[dnaName] }) - - const appCellIds = firstHapp.cells.map(c => c.cellRole.match(/hrea_(\w+)\.dna/)[1]) shimConsistency(scenario) return { // :TODO: is it possible to derive GraphQL DNA binding config from underlying Tryorama `config`? - graphQL: await buildGraphQL(player, graphQLAPIOptions, appCellIds), - cells: firstHapp.cells, + graphQL: await buildGraphQL(player, graphQLAPIOptions, cellIdsKeyedByRole), + cells: cells, player, } } From ebbc643f8c2d90606adcb220cd82a32d502c687e Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 21 Apr 2022 17:03:32 -0700 Subject: [PATCH 113/181] add note to dev docs about RUST_LOG tuning --- docs/README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/README.md b/docs/README.md index cd06a3e0f..7bca8c88a 100644 --- a/docs/README.md +++ b/docs/README.md @@ -109,6 +109,12 @@ Getting debug output printed to the screen depends on where you are logging from Debug output from the Holochain conductor can be noisy, which is why all test scripts coded in `package.json` pipe the test output to [faucet](https://github.com/substack/faucet). Remember that you can always add nonsense strings to your debug output and pipe things into `| grep 'XXXX'` instead of `| npx faucet` if you need to locate something specific and the text is overwhelming. +Another way to reduce noice from the Holochain conductor logs, if you want to go down to debug level logs, is to use a config +var like the following for RUST_LOG, which `holochain` will respect: +``` +RUST_LOG="debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error" +``` + ### Advanced execution If you look at the commands in `package.json` you will see that they are namespaced into groups of functionality. You can also see which commands depend on each other. Most of the time it will be more efficient to understand the command structure and run individual commands than it will be to boot the whole system together. From f6ec1231e4ac9885247f55cb2bfc3bd5fa797792 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 21 Apr 2022 17:04:16 -0700 Subject: [PATCH 114/181] add a link to further external help --- docs/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/README.md b/docs/README.md index 7bca8c88a..d384317ea 100644 --- a/docs/README.md +++ b/docs/README.md @@ -114,6 +114,7 @@ var like the following for RUST_LOG, which `holochain` will respect: ``` RUST_LOG="debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error" ``` +You can [learn more here](https://rust-lang-nursery.github.io/rust-cookbook/development_tools/debugging/config_log.html). ### Advanced execution From d6c785fe85a01c2ab1ccb756091a1ccec76f4b35 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 22 Apr 2022 15:03:23 -0700 Subject: [PATCH 115/181] cleanup --- test/init.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/init.js b/test/init.js index 8c84236b8..f56d94d0d 100644 --- a/test/init.js +++ b/test/init.js @@ -97,19 +97,19 @@ const buildPlayer = async (scenario, config, agentDNAs, graphQLAPIOptions) => { agent_key: agentPubKey, dnas: dnas } - await player._conductor.adminClient.installApp(installAppReq); + await player.adminWs().installApp(installAppReq) // must be enabled to be callable - const enabledAppResponse = await player._conductor.adminClient.enableApp({ + const enabledAppResponse = await player.adminWs().enableApp({ installed_app_id: installAppReq.installed_app_id - }); + }) if (enabledAppResponse.errors.length > 0) { - throw new Error(`Error - Failed to enable app: ${enabledAppResponse.errors}`); + throw new Error(`Error - Failed to enable app: ${enabledAppResponse.errors}`) } const installedAppResponse = enabledAppResponse.app // construct Cell instances which are the most useful class to the client - const rawCells = Object.entries(installedAppResponse.cell_data) const cellsKeyedByRole = {} const cellIdsKeyedByRole = {} + const rawCells = Object.entries(installedAppResponse.cell_data) rawCells.forEach(([_, { cell_id, role_id }]) => { cellsKeyedByRole[role_id] = new Cell({ cellId: cell_id, From 2a326a1b0f980418ff9c5969adfa56c47e6f99f3 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 22 Apr 2022 16:36:39 -0700 Subject: [PATCH 116/181] get test/satisfaction to place where its failing on implementation logic --- .../lib_destination/src/lib.rs | 3 +++ zomes/rea_satisfaction/lib_origin/src/lib.rs | 13 +++++++---- .../zome_observation/src/lib.rs | 23 +++++++++++++++---- .../rea_satisfaction/zome_planning/src/lib.rs | 23 +++++++++++++++---- 4 files changed, 50 insertions(+), 12 deletions(-) diff --git a/zomes/rea_satisfaction/lib_destination/src/lib.rs b/zomes/rea_satisfaction/lib_destination/src/lib.rs index 4c480f400..1bc85473e 100644 --- a/zomes/rea_satisfaction/lib_destination/src/lib.rs +++ b/zomes/rea_satisfaction/lib_destination/src/lib.rs @@ -26,6 +26,9 @@ use hc_zome_rea_satisfaction_storage::*; use hc_zome_rea_satisfaction_rpc::*; use hc_zome_rea_satisfaction_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateRequest) -> RecordAPIResult where S: AsRef { diff --git a/zomes/rea_satisfaction/lib_origin/src/lib.rs b/zomes/rea_satisfaction/lib_origin/src/lib.rs index 5c29c84a8..5e918c7ec 100644 --- a/zomes/rea_satisfaction/lib_origin/src/lib.rs +++ b/zomes/rea_satisfaction/lib_origin/src/lib.rs @@ -30,6 +30,9 @@ use hc_zome_rea_satisfaction_storage::*; use hc_zome_rea_satisfaction_rpc::*; use hc_zome_rea_satisfaction_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateRequest) -> RecordAPIResult where S: AsRef { @@ -46,10 +49,12 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques } else { // links to remote event, ping associated foreign DNA & fail if there's an error // :TODO: consider the implications of this in loosely coordinated multi-network spaces - call_zome_method( - event_or_commitment, - &REPLICATE_CREATE_API_METHOD, - CreateParams { satisfaction: satisfaction.to_owned() }, + // we assign a type to the response so that call_zome_method can + // effectively deserialize the response without failing + let _result: ResponseData = call_zome_method( + event_or_commitment, + &REPLICATE_CREATE_API_METHOD, + CreateParams { satisfaction: satisfaction.to_owned() }, )?; } diff --git a/zomes/rea_satisfaction/zome_observation/src/lib.rs b/zomes/rea_satisfaction/zome_observation/src/lib.rs index bdf55a5ff..65ea63e4b 100644 --- a/zomes/rea_satisfaction/zome_observation/src/lib.rs +++ b/zomes/rea_satisfaction/zome_observation/src/lib.rs @@ -20,29 +20,44 @@ fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), SatisfactionAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: SATISFACTION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, crdt_type: CrdtType, required_validations: 1.into(), required_validation_type: RequiredValidationType::default(), - } + }, ])) } #[hdk_extern] fn satisfaction_created(CreateParams { satisfaction }: CreateParams) -> ExternResult { - Ok(handle_create_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_create_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] -fn get_satisfaction(ByAddress { address }: ByAddress) -> ExternResult { +fn get_satisfaction( + ByAddress { address }: ByAddress, +) -> ExternResult { Ok(handle_get_satisfaction(SATISFACTION_ENTRY_TYPE, address)?) } #[hdk_extern] fn satisfaction_updated(UpdateParams { satisfaction }: UpdateParams) -> ExternResult { - Ok(handle_update_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_update_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] diff --git a/zomes/rea_satisfaction/zome_planning/src/lib.rs b/zomes/rea_satisfaction/zome_planning/src/lib.rs index 747cb2f8b..370a538ce 100644 --- a/zomes/rea_satisfaction/zome_planning/src/lib.rs +++ b/zomes/rea_satisfaction/zome_planning/src/lib.rs @@ -18,29 +18,44 @@ fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), SatisfactionAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: SATISFACTION_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, crdt_type: CrdtType, required_validations: 1.into(), required_validation_type: RequiredValidationType::default(), - } + }, ])) } #[hdk_extern] fn create_satisfaction(CreateParams { satisfaction }: CreateParams) -> ExternResult { - Ok(handle_create_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_create_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] -fn get_satisfaction(ByAddress { address }: ByAddress) -> ExternResult { +fn get_satisfaction( + ByAddress { address }: ByAddress, +) -> ExternResult { Ok(handle_get_satisfaction(SATISFACTION_ENTRY_TYPE, address)?) } #[hdk_extern] fn update_satisfaction(UpdateParams { satisfaction }: UpdateParams) -> ExternResult { - Ok(handle_update_satisfaction(SATISFACTION_ENTRY_TYPE, satisfaction)?) + Ok(handle_update_satisfaction( + SATISFACTION_ENTRY_TYPE, + satisfaction, + )?) } #[hdk_extern] From e9eda04a1458896ddad4943a2186e79f23201032 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 22 Apr 2022 16:50:21 -0700 Subject: [PATCH 117/181] add comment in the file about the status of the tests in the file --- test/satisfaction/satisfaction_records_e2e.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/satisfaction/satisfaction_records_e2e.js b/test/satisfaction/satisfaction_records_e2e.js index e7c5b7ee7..0ad3f42db 100644 --- a/test/satisfaction/satisfaction_records_e2e.js +++ b/test/satisfaction/satisfaction_records_e2e.js @@ -64,6 +64,14 @@ runner.registerScenario('satisfactions can be written and read between DNAs by a // ASSERT: check event field refs readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) + // TESTS start to fail here, and continue to the end of the file + /* + not ok 8 EconomicEvent.satisfies value present + --- + operator: ok + expected: true + actual: undefined + */ t.ok(readResponse.economicEvent.satisfies, 'EconomicEvent.satisfies value present') t.equal(readResponse.economicEvent.satisfies.length, 1, 'EconomicEvent.satisfies reference saved') t.deepEqual(readResponse.economicEvent.satisfies[0], satisfactionId, 'EconomicEvent.satisfies reference OK') From c534d9917acecacde7ba702286478a796896fa5e Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 22 Apr 2022 16:51:35 -0700 Subject: [PATCH 118/181] extend comment --- test/satisfaction/satisfaction_records_e2e.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/satisfaction/satisfaction_records_e2e.js b/test/satisfaction/satisfaction_records_e2e.js index 0ad3f42db..6c1f151eb 100644 --- a/test/satisfaction/satisfaction_records_e2e.js +++ b/test/satisfaction/satisfaction_records_e2e.js @@ -65,6 +65,8 @@ runner.registerScenario('satisfactions can be written and read between DNAs by a // ASSERT: check event field refs readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) // TESTS start to fail here, and continue to the end of the file + // The zome calls themselves are not failing, it is that + // the response data does not match the assertions /* not ok 8 EconomicEvent.satisfies value present --- From e17277811152ab1ebe827946c87981534104aac9 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Fri, 22 Apr 2022 17:24:13 -0700 Subject: [PATCH 119/181] skip agent and social-architectures tests for now --- test/.skip_tests | 1 + test/package.json | 2 +- test/{agent => skip_agent}/test_agent_core.js | 0 test/{agent => skip_agent}/test_agent_links.js | 0 test/{agent => skip_agent}/test_agent_registration.js | 0 test/{agent => skip_agent}/test_group_memberships.js | 0 .../cross_border_trade.js | 0 7 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 test/.skip_tests rename test/{agent => skip_agent}/test_agent_core.js (100%) rename test/{agent => skip_agent}/test_agent_links.js (100%) rename test/{agent => skip_agent}/test_agent_registration.js (100%) rename test/{agent => skip_agent}/test_group_memberships.js (100%) rename test/{social-architectures => skip_social-architectures}/cross_border_trade.js (100%) diff --git a/test/.skip_tests b/test/.skip_tests new file mode 100644 index 000000000..e9476d3f5 --- /dev/null +++ b/test/.skip_tests @@ -0,0 +1 @@ +skip_*/*.js \ No newline at end of file diff --git a/test/package.json b/test/package.json index 106f67e54..367f8e5da 100644 --- a/test/package.json +++ b/test/package.json @@ -6,7 +6,7 @@ "main": "index.js", "scripts": { "playground": "holochain-playground", - "test": "WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape test_*.js **/*.js | tap-dot" + "test": "WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape --ignore .skip_tests **/*.js | tap-dot" }, "devDependencies": { "@holochain/tryorama": "0.4.10", diff --git a/test/agent/test_agent_core.js b/test/skip_agent/test_agent_core.js similarity index 100% rename from test/agent/test_agent_core.js rename to test/skip_agent/test_agent_core.js diff --git a/test/agent/test_agent_links.js b/test/skip_agent/test_agent_links.js similarity index 100% rename from test/agent/test_agent_links.js rename to test/skip_agent/test_agent_links.js diff --git a/test/agent/test_agent_registration.js b/test/skip_agent/test_agent_registration.js similarity index 100% rename from test/agent/test_agent_registration.js rename to test/skip_agent/test_agent_registration.js diff --git a/test/agent/test_group_memberships.js b/test/skip_agent/test_group_memberships.js similarity index 100% rename from test/agent/test_group_memberships.js rename to test/skip_agent/test_group_memberships.js diff --git a/test/social-architectures/cross_border_trade.js b/test/skip_social-architectures/cross_border_trade.js similarity index 100% rename from test/social-architectures/cross_border_trade.js rename to test/skip_social-architectures/cross_border_trade.js From 2116fdd006972117686856a8671e950da7a2546e Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Sat, 23 Apr 2022 09:42:48 -0700 Subject: [PATCH 120/181] fix the initial error by using mockIdentifier() --- test/flows/flow_records_graphql.js | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/test/flows/flow_records_graphql.js b/test/flows/flow_records_graphql.js index 6af89ebe5..df9289e56 100644 --- a/test/flows/flow_records_graphql.js +++ b/test/flows/flow_records_graphql.js @@ -4,6 +4,7 @@ const { buildRunner, buildPlayer, mockAgentId, + mockIdentifier, } = require('../init') const runner = buildRunner() @@ -86,7 +87,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "due": "2019-11-19T04:29:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "some input will be provided" }, @@ -96,7 +97,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "hasPointInTime": "2019-11-19T04:27:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "some input was used up" }, @@ -112,7 +113,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "due": "2019-11-19T04:29:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "I'll make the thing happen" }, @@ -122,7 +123,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { "provider": tempProviderAgentId, "receiver": tempReceiverAgentId, "hasPointInTime": "2019-11-19T04:27:55.056Z", - "resourceQuantity": { hasNumericalValue: 1, hasUnit: "todo-some-unit-id" }, + "resourceQuantity": { hasNumericalValue: 1, hasUnit: mockIdentifier() }, "resourceClassifiedAs": ["some-resource-type"], "note": "hooray, the thing happened!" }, From af35cfda093f759c84b0a4e683d4c9b36135986d Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 25 Apr 2022 15:37:27 +1000 Subject: [PATCH 121/181] reduce log verbosity in tests via #259 --- docs/README.md | 4 ++-- test/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/README.md b/docs/README.md index d384317ea..c15896155 100644 --- a/docs/README.md +++ b/docs/README.md @@ -85,7 +85,7 @@ Test parameters: - `TRYORAMA_HOLOCHAIN_PATH` determines the path to the `holochain` binary which will ultimately execute all tests. If unset, `holochain` will be presumed to be on the user's `$PATH`. - `GRAPHQL_DEBUG=1` will enable debug output for the parameters transmitted and received by the GraphQL connection used in tests. -- `WASM_LOG=debug` `RUST_LOG=error` `RUST_BACKTRACE=1` are all set when executing the integration test suite. +- `WASM_LOG=debug` `RUST_LOG="debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error,"` `RUST_BACKTRACE=1` are all set when executing the integration test suite. ### Debugging @@ -93,7 +93,7 @@ Test parameters: Most of the time during development, you won't want to run the whole test suite but rather just those tests you're currently working on. The usual workflow when developing a module in isolation is: 1. `npm run build:holochain:dev` from the repository root to rebuild the module(s) you are working on. -2. `WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 npx tape test/**/*.js` from the `test` directory to run specific tests, substituting a path to an individual file. Note the [env vars](#environment-variables) used here are needed to obtain debug output from the zome code. +2. `WASM_LOG=debug RUST_LOG="debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error," RUST_BACKTRACE=1 npx tape test/**/*.js` from the `test` directory to run specific tests, substituting a path to an individual file. Note the [env vars](#environment-variables) used here are needed to obtain debug output from the zome code. Getting debug output printed to the screen depends on where you are logging from. diff --git a/test/package.json b/test/package.json index 367f8e5da..109aff131 100644 --- a/test/package.json +++ b/test/package.json @@ -6,7 +6,7 @@ "main": "index.js", "scripts": { "playground": "holochain-playground", - "test": "WASM_LOG=debug RUST_LOG=error RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape --ignore .skip_tests **/*.js | tap-dot" + "test": "WASM_LOG=debug RUST_LOG=\"debug,wasmer_compiler_cranelift=error,holochain::core::workflow=error,\" RUST_BACKTRACE=1 GRAPHQL_DEBUG=1 tape --ignore .skip_tests **/*.js | tap-dot" }, "devDependencies": { "@holochain/tryorama": "0.4.10", From d93559be6a467588cfe2a917088bc04178a3da7a Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 25 Apr 2022 16:29:17 +1000 Subject: [PATCH 122/181] fix config and declarations to correctly wire up Agreement DNA with EconomicEvent & Commitment --- bundles/dna_templates/agreement/dna.yaml | 13 ++++++++----- zomes/rea_economic_event/lib/src/lib.rs | 2 +- .../zome_idx_observation/src/lib.rs | 2 +- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/bundles/dna_templates/agreement/dna.yaml b/bundles/dna_templates/agreement/dna.yaml index dbba050a5..c67e44108 100644 --- a/bundles/dna_templates/agreement/dna.yaml +++ b/bundles/dna_templates/agreement/dna.yaml @@ -2,13 +2,16 @@ manifest_version: "1" name: "hrea_agreement" uuid: "" properties: + agreement: + index_zome: agreement_index + agreement_index: + record_storage_zome: agreement remote_auth: permissions: - # :TODO: actually these need to be rearchitected for modular indexing behaviour - - extern_id: index_realized_events - allowed_method: [agreement_index, index_realized_events] - - extern_id: index_agreement_clauses - allowed_method: [agreement_index, index_agreement_clauses] + - extern_id: index_agreement_economic_events + allowed_method: [agreement_index, index_economic_events] + - extern_id: index_agreement_commitments + allowed_method: [agreement_index, index_commitments] zomes: # application zomes diff --git a/zomes/rea_economic_event/lib/src/lib.rs b/zomes/rea_economic_event/lib/src/lib.rs index 1eab40fe8..66375647f 100644 --- a/zomes/rea_economic_event/lib/src/lib.rs +++ b/zomes/rea_economic_event/lib/src/lib.rs @@ -192,7 +192,7 @@ fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEvent create_index!(economic_event.output_of(output_of), process.outputs(&base_address))?; }; if let EconomicEventCreateRequest { realization_of: MaybeUndefined::Some(realization_of), .. } = event { - create_index!(economic_event.realization_of(realization_of), agreement.realized(&base_address))?; + create_index!(economic_event.realization_of(realization_of), agreement.economic_events(&base_address))?; }; Ok((revision_id, base_address, entry_resp)) diff --git a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs index 2e5fa2562..6209981bf 100644 --- a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs @@ -11,7 +11,7 @@ use hc_zome_rea_economic_event_rpc::*; struct EconomicEvent { input_of: Local, output_of: Local, - realization_of: Remote, + realization_of: Local, satisfies: Remote, fulfills: Remote, From 79a5bebefe13488b7afb2f7a1250b53907203375 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 25 Apr 2022 16:45:38 +1000 Subject: [PATCH 123/181] fix config bugs in observation <> planning inter-DNA calls --- bundles/dna_templates/observation/dna.yaml | 16 ++++++++-------- bundles/dna_templates/specification/dna.yaml | 2 +- .../zome_idx_observation/src/lib.rs | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/bundles/dna_templates/observation/dna.yaml b/bundles/dna_templates/observation/dna.yaml index 24c993902..3673fc4be 100644 --- a/bundles/dna_templates/observation/dna.yaml +++ b/bundles/dna_templates/observation/dna.yaml @@ -27,14 +27,14 @@ properties: record_storage_zome: satisfaction remote_auth: permissions: - - extern_id: index_process_input_commitments - allowed_method: [process, index_input_commitments] - - extern_id: index_process_output_commitments - allowed_method: [process, index_output_commitments] - - extern_id: index_process_input_intents - allowed_method: [process, index_input_intents] - - extern_id: index_process_output_intents - allowed_method: [process, index_output_intents] + - extern_id: index_process_committed_inputs + allowed_method: [process, index_committed_inputs] + - extern_id: index_process_committed_outputs + allowed_method: [process, index_committed_outputs] + - extern_id: index_process_intended_inputs + allowed_method: [process, index_intended_inputs] + - extern_id: index_process_intended_outputs + allowed_method: [process, index_intended_outputs] - extern_id: create_fulfillment allowed_method: [fulfillment, fulfillment_created] diff --git a/bundles/dna_templates/specification/dna.yaml b/bundles/dna_templates/specification/dna.yaml index 3f15dda3c..e5a4835b4 100644 --- a/bundles/dna_templates/specification/dna.yaml +++ b/bundles/dna_templates/specification/dna.yaml @@ -7,7 +7,7 @@ properties: remote_auth: permissions: - extern_id: index_resource_specification_conforming_resources - allowed_method: [resource_specification_index, index_resource_specification_conforming_resources] + allowed_method: [resource_specification_index, index_conforming_resources] - extern_id: read_resource_specification allowed_method: [resource_specification, get_resource_specification] zomes: diff --git a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs index 6209981bf..5e135d76f 100644 --- a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs @@ -12,8 +12,8 @@ struct EconomicEvent { input_of: Local, output_of: Local, realization_of: Local, - satisfies: Remote, - fulfills: Remote, + satisfies: Local, + fulfills: Local, // internal indexes (not part of REA spec) affects: Local, From f06c63fa0c3eaa77bd538c51c10d1d4e429730f3 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 25 Apr 2022 16:49:29 +1000 Subject: [PATCH 124/181] fix missing index zome configs for Proposal DNA --- bundles/dna_templates/proposal/dna.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bundles/dna_templates/proposal/dna.yaml b/bundles/dna_templates/proposal/dna.yaml index a242cfec3..39da91010 100644 --- a/bundles/dna_templates/proposal/dna.yaml +++ b/bundles/dna_templates/proposal/dna.yaml @@ -6,6 +6,14 @@ properties: index_zome: proposal_index proposal_index: record_storage_zome: proposal + proposed_intent: + index_zome: proposed_intent_index + proposed_intent_index: + record_storage_zome: proposed_intent + proposed_to: + index_zome: proposed_to_index + proposed_to_index: + record_storage_zome: proposed_to zomes: - name: proposal path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_proposal.wasm" From 48a67e125d54c766b478064f19b69e5f2efb1c83 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 25 Apr 2022 16:57:43 +1000 Subject: [PATCH 125/181] fix more mis-wired indexes in default module arrangement --- bundles/dna_templates/observation/dna.yaml | 2 ++ bundles/dna_templates/proposal/dna.yaml | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/bundles/dna_templates/observation/dna.yaml b/bundles/dna_templates/observation/dna.yaml index 3673fc4be..6a1f784e8 100644 --- a/bundles/dna_templates/observation/dna.yaml +++ b/bundles/dna_templates/observation/dna.yaml @@ -19,10 +19,12 @@ properties: record_storage_zome: economic_resource fulfillment: index_zome: fulfillment_index + economic_event_index_zome: economic_event_index fulfillment_index: record_storage_zome: fulfillment satisfaction: index_zome: satisfaction_index + economic_event_index_zome: economic_event_index satisfaction_index: record_storage_zome: satisfaction remote_auth: diff --git a/bundles/dna_templates/proposal/dna.yaml b/bundles/dna_templates/proposal/dna.yaml index 39da91010..fe1030b8a 100644 --- a/bundles/dna_templates/proposal/dna.yaml +++ b/bundles/dna_templates/proposal/dna.yaml @@ -2,16 +2,16 @@ manifest_version: "1" name: "hrea_proposal" uuid: "" properties: - proposal: - index_zome: proposal_index proposal_index: record_storage_zome: proposal proposed_intent: index_zome: proposed_intent_index + proposal_index_zome: proposal_index proposed_intent_index: record_storage_zome: proposed_intent proposed_to: index_zome: proposed_to_index + proposal_index_zome: proposal_index proposed_to_index: record_storage_zome: proposed_to zomes: From 69415d328e08b16753ab7f6a489f1d659fec85e6 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 25 Apr 2022 17:41:55 +1000 Subject: [PATCH 126/181] fix process index zome names in config --- bundles/dna_templates/observation/dna.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bundles/dna_templates/observation/dna.yaml b/bundles/dna_templates/observation/dna.yaml index 6a1f784e8..04e73b8e4 100644 --- a/bundles/dna_templates/observation/dna.yaml +++ b/bundles/dna_templates/observation/dna.yaml @@ -30,13 +30,13 @@ properties: remote_auth: permissions: - extern_id: index_process_committed_inputs - allowed_method: [process, index_committed_inputs] + allowed_method: [process_index, index_committed_inputs] - extern_id: index_process_committed_outputs - allowed_method: [process, index_committed_outputs] + allowed_method: [process_index, index_committed_outputs] - extern_id: index_process_intended_inputs - allowed_method: [process, index_intended_inputs] + allowed_method: [process_index, index_intended_inputs] - extern_id: index_process_intended_outputs - allowed_method: [process, index_intended_outputs] + allowed_method: [process_index, index_intended_outputs] - extern_id: create_fulfillment allowed_method: [fulfillment, fulfillment_created] From f17bc767ad915acb8856f80243201d69b56d8d38 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 25 Apr 2022 09:20:23 -0700 Subject: [PATCH 127/181] fix for revisionId which was not getting serialized/deserialized by the mapZomeCall --- modules/vf-graphql-holochain/connection.ts | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index 92b3e37ef..e958e45cf 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -118,6 +118,7 @@ const HOLOHASH_PREFIX_ENTRY = [0x84, 0x21, 0x24] // uhCEk const HOLOHASH_PREFIX_HEADER = [0x84, 0x29, 0x24] // uhCkk const HOLOHASH_PREFIX_AGENT = [0x84, 0x20, 0x24] // uhCAk +const serializedHashMatchRegex = /^[A-Za-z0-9_+\-/]{53}={0,2}$/ const idMatchRegex = /^[A-Za-z0-9_+\-/]{53}={0,2}:[A-Za-z0-9_+\-/]{53}={0,2}$/ const stringIdRegex = /^\w+?:[A-Za-z0-9_+\-/]{53}={0,2}$/ @@ -134,7 +135,7 @@ function deserializeId(field: string): RecordId { ] } -function deserializeStringId(field: string): Array { +function deserializeStringId(field: string): [Buffer,string] { const matches = field.split(':') return [ Buffer.from(deserializeHash(matches[1])), @@ -151,8 +152,8 @@ function seralizeId(id: RecordId): string { return `${serializeHash(id[1])}:${serializeHash(id[0])}` } -function seralizeStringId(id: Array): string { - return `${id[1]}:${serializeHash(id[0] as Buffer)}` +function seralizeStringId(id: [Buffer,string]): string { + return `${id[1]}:${serializeHash(id[0])}` } const LONG_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ss.SSSZ' @@ -167,6 +168,12 @@ const isoDateRegex = /^\d{4}-\d\d-\d\d(T\d\d:\d\d:\d\d(\.\d\d\d)?)?([+-]\d\d:\d\ const decodeFields = (result: any): void => { deepForEach(result, (value, prop, subject) => { + // HeaderHash + if ((value instanceof Buffer || value instanceof Uint8Array) && value.length === HOLOCHAIN_IDENTIFIER_LEN && checkLeadingBytes(value, HOLOHASH_PREFIX_HEADER)) { + subject[prop] = serializeHash(value as unknown as Uint8Array) + } + + // RecordId | StringId (Agent, for now) if (Array.isArray(value) && value.length == 2 && (value[0] instanceof Buffer || value[0] instanceof Uint8Array) && value[0].length === HOLOCHAIN_IDENTIFIER_LEN && @@ -182,7 +189,7 @@ const decodeFields = (result: any): void => { // :TODO: This one probably isn't safe for regular ID field mixing. // Custom serde de/serializer would make bind this handling to the appropriate fields without duck-typing issues. } else { - subject[prop] = seralizeStringId(value) + subject[prop] = seralizeStringId(value as [Buffer, string]) } } @@ -218,6 +225,9 @@ const encodeFields = (args: any): any => { } // deserialise any identifiers back to their binary format + else if (args.match && args.match(serializedHashMatchRegex)) { + return deserializeHash(args) + } else if (args.match && args.match(idMatchRegex)) { return deserializeId(args) } From 13d4987c182fd1d7f08fa492c81e92005189b53a Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 25 Apr 2022 10:18:04 -0700 Subject: [PATCH 128/181] addresses #253 -> test_agreement_links.js needs updating to new tryorama (easy) --- test/agreement/test_agreement_links.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/test/agreement/test_agreement_links.js b/test/agreement/test_agreement_links.js index c4cb95667..4f92df7b2 100644 --- a/test/agreement/test_agreement_links.js +++ b/test/agreement/test_agreement_links.js @@ -3,6 +3,8 @@ const { buildConfig, buildRunner, buildPlayer, + mockIdentifier, + mockAgentId, } = require('../init') const runner = buildRunner() @@ -19,13 +21,13 @@ const config = buildConfig({ const testEventProps = { action: 'raise', resourceClassifiedAs: ['some-resource-type'], - resourceQuantity: { hasNumericalValue: 1, hasUnit: 'dangling-unit-todo-tidy-up' }, - provider: 'agentid-1-todo', - receiver: 'agentid-2-todo', + resourceQuantity: { hasNumericalValue: 1, hasUnit: mockIdentifier() }, + provider: mockAgentId(), + receiver: mockAgentId(), } runner.registerScenario('Agreement links & queries', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const alice = await buildPlayer(s, config, ['observation', 'planning', 'agreement']) let resp = await alice.graphQL(` mutation($rs: AgreementCreateParams!) { From 3833a5c537321e5b06a6f6ad9b2f690c34991eee Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 25 Apr 2022 12:48:08 -0700 Subject: [PATCH 129/181] fixes for #253, partial progress --- lib/hdk_semantic_indexes/client/src/lib.rs | 11 ++++++++++ .../resolvers/agreement.ts | 12 +++++++++-- test/agreement/test_agreement_links.js | 9 +-------- zomes/rea_economic_event/lib/src/lib.rs | 20 ++++++++++++------- zomes/rea_economic_event/zome/src/lib.rs | 7 +++++++ .../zome_idx_observation/src/lib.rs | 11 ++++++++++ 6 files changed, 53 insertions(+), 17 deletions(-) diff --git a/lib/hdk_semantic_indexes/client/src/lib.rs b/lib/hdk_semantic_indexes/client/src/lib.rs index 5188b965a..d4fb7096b 100644 --- a/lib/hdk_semantic_indexes/client/src/lib.rs +++ b/lib/hdk_semantic_indexes/client/src/lib.rs @@ -243,6 +243,17 @@ pub fn manage_index( F: Copy + Fn(C) -> Option, G: Copy + Fn(C) -> Option, { + + hdk::prelude::debug!("handling call to manage_index, here are the arguments:"); + hdk::prelude::debug!("source: {:?}", source); + hdk::prelude::debug!("dest_addresses: {:?}", dest_addresses); + hdk::prelude::debug!("remove_addresses: {:?}", remove_addresses); + // hdk::prelude::debug!("origin_zome_name_from_config: {:?}", origin_zome_name_from_config); + // hdk::prelude::debug!("origin_fn_name: {:?}", origin_fn_name); + // hdk::prelude::debug!("dest_zome_name_from_config: {:?}", dest_zome_name_from_config); + // hdk::prelude::debug!("dest_fn_name: {:?}", dest_fn_name); + // hdk::prelude::debug!("remote_permission_id: {:?}", remote_permission_id); + // altering an index with no targets is a no-op if dest_addresses.len() == 0 && remove_addresses.len() == 0 { return Ok(vec![]) diff --git a/modules/vf-graphql-holochain/resolvers/agreement.ts b/modules/vf-graphql-holochain/resolvers/agreement.ts index 1be14665e..6fb54c04d 100644 --- a/modules/vf-graphql-holochain/resolvers/agreement.ts +++ b/modules/vf-graphql-holochain/resolvers/agreement.ts @@ -24,12 +24,20 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return Object.assign( (hasPlanning ? { commitments: async (record: Agreement): Promise => { - return (await queryCommitments({ params: { clauseOf: record.id } })).map(({ commitment }) => commitment) + const commitments = await queryCommitments({ params: { clauseOf: record.id } }) + if (!commitments.edges || !commitments.edges.length) { + return [] + } + return commitments.edges.map(({ node }) => node) }, } : {}), (hasObservation ? { economicEvents: async (record: Agreement): Promise => { - return (await queryEvents({ params: { realizationOf: record.id } })).map(({ economicEvent }) => economicEvent) + const economicEvents = await queryEvents({ params: { realizationOf: record.id } }) + if (!economicEvents.edges || !economicEvents.edges.length) { + return [] + } + return economicEvents.edges.map(({ node }) => node) }, } : {}), ) diff --git a/test/agreement/test_agreement_links.js b/test/agreement/test_agreement_links.js index 4f92df7b2..40d4b59b4 100644 --- a/test/agreement/test_agreement_links.js +++ b/test/agreement/test_agreement_links.js @@ -9,14 +9,7 @@ const { const runner = buildRunner() -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), - agreement: getDNA('agreement'), -}, [ - // bridge('vf_agreement', 'planning', 'agreement'), - // bridge('vf_agreement', 'observation', 'agreement'), -]) +const config = buildConfig() const testEventProps = { action: 'raise', diff --git a/zomes/rea_economic_event/lib/src/lib.rs b/zomes/rea_economic_event/lib/src/lib.rs index 66375647f..17d98d227 100644 --- a/zomes/rea_economic_event/lib/src/lib.rs +++ b/zomes/rea_economic_event/lib/src/lib.rs @@ -46,6 +46,9 @@ use hc_zome_rea_economic_resource_lib::{ get_link_fields as get_resource_link_fields, }; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + /// Properties accessor for zome config. fn read_economic_resource_index_zome(conf: DnaConfigSlice) -> Option { @@ -186,13 +189,16 @@ fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEvent // handle link fields // :TODO: propagate errors if let EconomicEventCreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = event { - create_index!(economic_event.input_of(input_of), process.inputs(&base_address))?; - }; - if let EconomicEventCreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = event { - create_index!(economic_event.output_of(output_of), process.outputs(&base_address))?; - }; - if let EconomicEventCreateRequest { realization_of: MaybeUndefined::Some(realization_of), .. } = event { - create_index!(economic_event.realization_of(realization_of), agreement.economic_events(&base_address))?; + let e = create_index!(economic_event.input_of(input_of), process.inputs(&base_address))?; + hdk::prelude::debug!("input_of results: {:?}", e); + }; + if let EconomicEventCreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = event { + let e = create_index!(economic_event.output_of(output_of), process.outputs(&base_address))?; + hdk::prelude::debug!("output_of results: {:?}", e); + }; + if let EconomicEventCreateRequest { realization_of: MaybeUndefined::Some(realization_of), .. } = event { + let e = create_index!(economic_event.realization_of(realization_of), agreement.economic_events(&base_address))?; + hdk::prelude::debug!("realization_of results: {:?}", e); }; Ok((revision_id, base_address, entry_resp)) diff --git a/zomes/rea_economic_event/zome/src/lib.rs b/zomes/rea_economic_event/zome/src/lib.rs index c4641db2e..a4ae5d312 100644 --- a/zomes/rea_economic_event/zome/src/lib.rs +++ b/zomes/rea_economic_event/zome/src/lib.rs @@ -25,6 +25,13 @@ fn entry_defs(_: ()) -> ExternResult { crdt_type: CrdtType, required_validations: 2.into(), required_validation_type: RequiredValidationType::default(), + }, + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), } ])) } diff --git a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs index 5e135d76f..56ce0a768 100644 --- a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs @@ -7,6 +7,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_economic_event_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + AgreementAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} + #[index_zome] struct EconomicEvent { input_of: Local, From e2428b1cafb2f7cebcfcf089595371ab36e6e718 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 25 Apr 2022 15:43:35 -0700 Subject: [PATCH 130/181] fixes for #246 test/process --- Cargo.lock | 1 + .../vf-graphql-holochain/resolvers/process.ts | 12 ++++- test/process/process_records_e2e.js | 49 +++++++++---------- zomes/rea_commitment/lib/Cargo.toml | 2 + zomes/rea_commitment/lib/src/lib.rs | 18 ++++--- .../zome_idx_planning/src/lib.rs | 11 +++++ .../zome_idx_observation/src/lib.rs | 1 + .../zome_idx_observation/src/lib.rs | 11 +++++ 8 files changed, 71 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b8fe54848..53d245e0c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -378,6 +378,7 @@ version = "0.1.0" dependencies = [ "hc_zome_rea_commitment_rpc", "hc_zome_rea_commitment_storage", + "hdk", "hdk_records", "hdk_semantic_indexes_client_lib", "paste", diff --git a/modules/vf-graphql-holochain/resolvers/process.ts b/modules/vf-graphql-holochain/resolvers/process.ts index 4b5e39a87..a5e340d8c 100644 --- a/modules/vf-graphql-holochain/resolvers/process.ts +++ b/modules/vf-graphql-holochain/resolvers/process.ts @@ -28,11 +28,19 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return Object.assign( { inputs: injectTypename('EconomicEvent', async (record: Process): Promise => { - return (await readEvents({ params: { inputOf: record.id } })).map(({ economicEvent }) => economicEvent) + const economicEvents = await readEvents({ params: { inputOf: record.id } }) + if (!economicEvents.edges || !economicEvents.edges.length) { + return [] + } + return economicEvents.edges.map(({ node }) => node) }), outputs: injectTypename('EconomicEvent', async (record: Process): Promise => { - return (await readEvents({ params: { outputOf: record.id } })).map(({ economicEvent }) => economicEvent) + const economicEvents = await readEvents({ params: { outputOf: record.id } }) + if (!economicEvents.edges || !economicEvents.edges.length) { + return [] + } + return economicEvents.edges.map(({ node }) => node) }), }, (hasPlanning ? { diff --git a/test/process/process_records_e2e.js b/test/process/process_records_e2e.js index b7672c276..56e48aa60 100644 --- a/test/process/process_records_e2e.js +++ b/test/process/process_records_e2e.js @@ -64,24 +64,23 @@ runner.registerScenario('process local query indexes and relationships', async ( // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - console.log(readResponse) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'event input query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'event input query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'event input query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iEventId, 'event input query index created') // ASSERT: test event output query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { outputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'event output query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, oEventId, 'event output query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'event output query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, oEventId, 'event output query index created') // ASSERT: check process event input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.inputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.inputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.inputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node.id, processId, 'process.inputs query index created') // ASSERT: check process event output query edge readResponse = await observation.call('process_index', 'query_processes', { params: { outputs: oEventId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.outputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.outputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.outputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.outputs query index created') }) const runner2 = buildRunner() @@ -157,23 +156,23 @@ runner2.registerScenario('process remote query indexes and relationships', async // ASSERT: test commitment input query edge readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'commitment input query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].commitment && readResponse.results[0].commitment.id, iCommitmentId, 'commitment input query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'commitment input query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iCommitmentId, 'commitment input query index created') // ASSERT: test commitment output query edge readResponse = await planning.call('commitment_index', 'query_commitments', { params: { outputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'commitment output query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].commitment && readResponse.results[0].commitment.id, oCommitmentId, 'commitment output query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'commitment output query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, oCommitmentId, 'commitment output query index created') // ASSERT: check process commitment input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.committedInputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.committedInputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.committedInputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.committedInputs query index created') // ASSERT: check process commitment output query edge readResponse = await observation.call('process_index', 'query_processes', { params: { committedOutputs: oCommitmentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.committedOutputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.committedOutputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.committedOutputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.committedOutputs query index created') // ASSERT: check input intent index links readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) @@ -187,23 +186,23 @@ runner2.registerScenario('process remote query indexes and relationships', async // ASSERT: test intent input query edge readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'intent input query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].intent && readResponse.results[0].intent.id, iIntentId, 'intent input query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'intent input query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iIntentId, 'intent input query index created') // ASSERT: test intent output query edge readResponse = await planning.call('intent_index', 'query_intents', { params: { outputOf: processId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'intent output query index present') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].intent && readResponse.results[0].intent.id, oIntentId, 'intent output query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'intent output query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, oIntentId, 'intent output query index created') // ASSERT: check process intent input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.intendedInputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.intendedInputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.intendedInputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.intendedInputs query index created') // ASSERT: check process intent output query edge readResponse = await observation.call('process_index', 'query_processes', { params: { intendedOutputs: oIntentId } }) - t.deepEqual(readResponse && readResponse.results && readResponse.results.length, 1, 'process.intendedOutputs query succeeded') - t.deepEqual(readResponse.results && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'process.intendedOutputs query index created') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges.length, 1, 'process.intendedOutputs query succeeded') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'process.intendedOutputs query index created') // TODO: modify }) diff --git a/zomes/rea_commitment/lib/Cargo.toml b/zomes/rea_commitment/lib/Cargo.toml index 3ac5aadd4..114962217 100644 --- a/zomes/rea_commitment/lib/Cargo.toml +++ b/zomes/rea_commitment/lib/Cargo.toml @@ -6,6 +6,8 @@ edition = "2018" [dependencies] paste = "1.0" +# :DUPE: hdk-rust-revid +hdk = "0.0.124" hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index 6fb6b1d88..92fcae0f7 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -31,14 +31,18 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - let (header_addr, base_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, commitment.to_owned())?; // handle link fields + // :TODO: propogate errors! if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &commitment { - create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address))?; - }; - if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { - create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address))?; - }; - if let CreateRequest { clause_of: MaybeUndefined::Some(clause_of), .. } = &commitment { - create_index!(commitment.clause_of(clause_of), agreement.commitments(&base_address))?; + let e = create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address))?; + hdk::prelude::debug!("handle_create_commitment::input_of::create_index: {:?}", e); + }; + if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { + let e = create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address))?; + hdk::prelude::debug!("handle_create_commitment::output_of::create_index: {:?}", e); + }; + if let CreateRequest { clause_of: MaybeUndefined::Some(clause_of), .. } = &commitment { + let e = create_index!(commitment.clause_of(clause_of), agreement.commitments(&base_address))?; + hdk::prelude::debug!("handle_create_commitment::clause_of::create_index: {:?}", e); }; // :TODO: pass results from link creation rather than re-reading diff --git a/zomes/rea_commitment/zome_idx_planning/src/lib.rs b/zomes/rea_commitment/zome_idx_planning/src/lib.rs index be4718b67..f35bb0117 100644 --- a/zomes/rea_commitment/zome_idx_planning/src/lib.rs +++ b/zomes/rea_commitment/zome_idx_planning/src/lib.rs @@ -7,6 +7,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_commitment_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProcessAddress::entry_def(), + CommitmentAddress::entry_def(), + ])) +} + #[index_zome] struct Commitment { fulfilled_by: Local, diff --git a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs index 56ce0a768..fa00bee08 100644 --- a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs @@ -13,6 +13,7 @@ use hc_zome_rea_economic_event_rpc::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + ProcessAddress::entry_def(), AgreementAddress::entry_def(), EconomicEventAddress::entry_def(), ])) diff --git a/zomes/rea_process/zome_idx_observation/src/lib.rs b/zomes/rea_process/zome_idx_observation/src/lib.rs index c401215cb..a7ffa7b5d 100644 --- a/zomes/rea_process/zome_idx_observation/src/lib.rs +++ b/zomes/rea_process/zome_idx_observation/src/lib.rs @@ -7,6 +7,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_process_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProcessAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} + #[index_zome(query_fn_name="query_processes")] struct Process { inputs: Local, From ea2f282c31dbbbbe78c99b9e5a043e41ae4465d4 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 25 Apr 2022 16:07:26 -0700 Subject: [PATCH 131/181] add backlink to issue tracker --- zomes/rea_economic_event/lib/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zomes/rea_economic_event/lib/src/lib.rs b/zomes/rea_economic_event/lib/src/lib.rs index 17d98d227..7b5ee8819 100644 --- a/zomes/rea_economic_event/lib/src/lib.rs +++ b/zomes/rea_economic_event/lib/src/lib.rs @@ -187,7 +187,7 @@ fn handle_create_economic_event_record(entry_def_id: S, event: &EconomicEvent )?; // handle link fields - // :TODO: propagate errors + // :TODO: propagate errors https://github.com/h-REA/hREA/issues/264 if let EconomicEventCreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = event { let e = create_index!(economic_event.input_of(input_of), process.inputs(&base_address))?; hdk::prelude::debug!("input_of results: {:?}", e); From 68ef1a6f64ce7dc1251863760c7154d58b4dbb0b Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 25 Apr 2022 19:21:07 -0700 Subject: [PATCH 132/181] further fixes for test/fulfillment --- Cargo.lock | 1 + lib/hdk_semantic_indexes/client/src/lib.rs | 17 +++++++++++++---- test/fulfillment/fulfillment_records_e2e.js | 1 + zomes/rea_commitment/lib/src/lib.rs | 6 +++++- .../zome_idx_planning/src/lib.rs | 3 +++ .../zome_idx_observation/src/lib.rs | 1 + .../lib_destination/src/lib.rs | 3 +++ zomes/rea_fulfillment/lib_origin/Cargo.toml | 3 +++ zomes/rea_fulfillment/lib_origin/src/lib.rs | 19 ++++++++++++------- .../zome_idx_observation/src/lib.rs | 11 +++++++++++ .../zome_idx_planning/src/lib.rs | 11 +++++++++++ .../zome_observation/src/lib.rs | 7 +++++++ .../rea_fulfillment/zome_planning/src/lib.rs | 9 +++++++++ 13 files changed, 80 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 53d245e0c..6e336c21f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -649,6 +649,7 @@ dependencies = [ "hc_zome_rea_fulfillment_rpc", "hc_zome_rea_fulfillment_storage", "hc_zome_rea_fulfillment_storage_consts", + "hdk", "hdk_records", "hdk_semantic_indexes_client_lib", "paste", diff --git a/lib/hdk_semantic_indexes/client/src/lib.rs b/lib/hdk_semantic_indexes/client/src/lib.rs index d4fb7096b..5d97f29c8 100644 --- a/lib/hdk_semantic_indexes/client/src/lib.rs +++ b/lib/hdk_semantic_indexes/client/src/lib.rs @@ -248,11 +248,18 @@ pub fn manage_index( hdk::prelude::debug!("source: {:?}", source); hdk::prelude::debug!("dest_addresses: {:?}", dest_addresses); hdk::prelude::debug!("remove_addresses: {:?}", remove_addresses); - // hdk::prelude::debug!("origin_zome_name_from_config: {:?}", origin_zome_name_from_config); - // hdk::prelude::debug!("origin_fn_name: {:?}", origin_fn_name); - // hdk::prelude::debug!("dest_zome_name_from_config: {:?}", dest_zome_name_from_config); - // hdk::prelude::debug!("dest_fn_name: {:?}", dest_fn_name); // hdk::prelude::debug!("remote_permission_id: {:?}", remote_permission_id); + let zome_props = hdk::prelude::dna_info()?.properties + .try_into()?; + let zome_props_2 = hdk::prelude::dna_info()?.properties + .try_into()?; + let oznfc = origin_zome_name_from_config.clone()(zome_props_2); + hdk::prelude::debug!("origin_zome_name_from_config: {:?}", oznfc); + hdk::prelude::debug!("origin_fn_name: {:?}", origin_fn_name.as_ref().to_string()); + let dznfc = dest_zome_name_from_config.clone()(zome_props); + hdk::prelude::debug!("dest_zome_name_from_config: {:?}", dznfc); + hdk::prelude::debug!("dest_fn_name: {:?}", dest_fn_name.as_ref().to_string()); + hdk::prelude::debug!("remote_permission_id: {:?}", remote_permission_id.as_ref().to_string()); // altering an index with no targets is a no-op if dest_addresses.len() == 0 && remove_addresses.len() == 0 { @@ -262,6 +269,8 @@ pub fn manage_index( let sources = vec![source.clone()]; let targets = prefilter_target_dnas(dest_addresses, remove_addresses)?; + hdk::prelude::debug!("manage_index::targets: {:?}", targets); + // Manage local index creation / removal let local_forward_add = targets.local_dests.0.iter() diff --git a/test/fulfillment/fulfillment_records_e2e.js b/test/fulfillment/fulfillment_records_e2e.js index dac985295..418ad8a03 100644 --- a/test/fulfillment/fulfillment_records_e2e.js +++ b/test/fulfillment/fulfillment_records_e2e.js @@ -59,6 +59,7 @@ runner.registerScenario('links can be written and read between DNAs', async (s, // ASSERT: check event readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) + console.log('readResponse', readResponse) t.ok(readResponse.economicEvent.fulfills, 'EconomicEvent.fulfills value present') t.equal(readResponse.economicEvent.fulfills.length, 1, 'EconomicEvent.fulfills reference saved') t.deepEqual(readResponse.economicEvent.fulfills[0], fulfillmentId, 'EconomicEvent.fulfills reference OK') diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index 92fcae0f7..e1049514c 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -15,7 +15,7 @@ use hdk_records::{ read_record_entry_by_header, update_record, delete_record, - }, + }, dna_info, }; use hdk_semantic_indexes_client_lib::*; @@ -32,11 +32,15 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - // handle link fields // :TODO: propogate errors! + let dna_i = dna_info()?; + hdk::prelude::debug!("dnainfo! {:?}", dna_i); + if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &commitment { let e = create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address))?; hdk::prelude::debug!("handle_create_commitment::input_of::create_index: {:?}", e); }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { + hdk::prelude::debug!("handle_create_commitment::output_of: {:?}", output_of); let e = create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address))?; hdk::prelude::debug!("handle_create_commitment::output_of::create_index: {:?}", e); }; diff --git a/zomes/rea_commitment/zome_idx_planning/src/lib.rs b/zomes/rea_commitment/zome_idx_planning/src/lib.rs index f35bb0117..c001dc35f 100644 --- a/zomes/rea_commitment/zome_idx_planning/src/lib.rs +++ b/zomes/rea_commitment/zome_idx_planning/src/lib.rs @@ -15,6 +15,9 @@ fn entry_defs(_: ()) -> ExternResult { PathEntry::entry_def(), ProcessAddress::entry_def(), CommitmentAddress::entry_def(), + ProcessAddress::entry_def(), + AgreementAddress::entry_def(), + FulfillmentAddress::entry_def(), ])) } diff --git a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs index fa00bee08..62518f225 100644 --- a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs @@ -15,6 +15,7 @@ fn entry_defs(_: ()) -> ExternResult { PathEntry::entry_def(), ProcessAddress::entry_def(), AgreementAddress::entry_def(), + FulfillmentAddress::entry_def(), EconomicEventAddress::entry_def(), ])) } diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index 15bd22a24..743133f07 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -26,6 +26,9 @@ use hc_zome_rea_fulfillment_storage::*; use hc_zome_rea_fulfillment_rpc::*; use hc_zome_rea_fulfillment_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) -> RecordAPIResult where S: AsRef { diff --git a/zomes/rea_fulfillment/lib_origin/Cargo.toml b/zomes/rea_fulfillment/lib_origin/Cargo.toml index eea43dcc2..f3992a730 100644 --- a/zomes/rea_fulfillment/lib_origin/Cargo.toml +++ b/zomes/rea_fulfillment/lib_origin/Cargo.toml @@ -6,6 +6,9 @@ edition = "2018" [dependencies] paste = "1.0" +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_fulfillment_storage_consts = { path = "../storage_consts" } diff --git a/zomes/rea_fulfillment/lib_origin/src/lib.rs b/zomes/rea_fulfillment/lib_origin/src/lib.rs index 18c4064ef..8eaafcb0c 100644 --- a/zomes/rea_fulfillment/lib_origin/src/lib.rs +++ b/zomes/rea_fulfillment/lib_origin/src/lib.rs @@ -28,21 +28,26 @@ use hc_zome_rea_fulfillment_storage::*; use hc_zome_rea_fulfillment_rpc::*; use hc_zome_rea_fulfillment_lib::construct_response; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) -> RecordAPIResult where S: AsRef { let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - create_index!(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address))?; - + let create_index_results = create_index!(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address))?; + hdk::prelude::debug!("handle_create_fulfillment::fulfills::create_index!: {:?}", create_index_results); + + // :TODO: report any error // update in the associated foreign DNA as well - let _pingback: OtherCellResult = call_zome_method( - fulfillment.get_fulfilled_by(), - &REPLICATE_CREATE_API_METHOD, - CreateParams { fulfillment: fulfillment.to_owned() }, + let pingback: OtherCellResult = call_zome_method( + fulfillment.get_fulfilled_by(), + &REPLICATE_CREATE_API_METHOD, + CreateParams { fulfillment: fulfillment.to_owned() }, ); - // :TODO: report any error + hdk::prelude::debug!("handle_create_fulfillment::call_zome_method::{:?}: {:?}", REPLICATE_CREATE_API_METHOD, pingback); construct_response(&fulfillment_address, &revision_id, &entry_resp) } diff --git a/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs b/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs index c01a44322..300a20855 100644 --- a/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs +++ b/zomes/rea_fulfillment/zome_idx_observation/src/lib.rs @@ -8,6 +8,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_fulfillment_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + FulfillmentAddress::entry_def(), + EconomicEventAddress::entry_def(), + ])) +} + #[index_zome] struct Fulfillment { fulfilled_by: Local, diff --git a/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs b/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs index 582128de6..5a37b80bc 100644 --- a/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs +++ b/zomes/rea_fulfillment/zome_idx_planning/src/lib.rs @@ -8,6 +8,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_fulfillment_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + FulfillmentAddress::entry_def(), + CommitmentAddress::entry_def(), + ])) +} + #[index_zome] struct Fulfillment { fulfills: Local, diff --git a/zomes/rea_fulfillment/zome_observation/src/lib.rs b/zomes/rea_fulfillment/zome_observation/src/lib.rs index 438ff19f9..09a72a164 100644 --- a/zomes/rea_fulfillment/zome_observation/src/lib.rs +++ b/zomes/rea_fulfillment/zome_observation/src/lib.rs @@ -20,6 +20,13 @@ fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), FulfillmentAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: FULFILLMENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, diff --git a/zomes/rea_fulfillment/zome_planning/src/lib.rs b/zomes/rea_fulfillment/zome_planning/src/lib.rs index 581ac4730..469d280ca 100644 --- a/zomes/rea_fulfillment/zome_planning/src/lib.rs +++ b/zomes/rea_fulfillment/zome_planning/src/lib.rs @@ -13,10 +13,19 @@ use hc_zome_rea_fulfillment_lib_origin::*; use hc_zome_rea_fulfillment_rpc::*; use hc_zome_rea_fulfillment_storage_consts::*; + #[hdk_extern] fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + FulfillmentAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: FULFILLMENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, From e1a334614e8b1638f926405a1a3ae91fea2417ed Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 26 Apr 2022 13:08:07 +1000 Subject: [PATCH 133/181] fix agreement index zome missing entrydefs --- zomes/rea_agreement/zome_idx_agreement/src/lib.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/zomes/rea_agreement/zome_idx_agreement/src/lib.rs b/zomes/rea_agreement/zome_idx_agreement/src/lib.rs index 7fdeb8f8b..d8eaf3342 100644 --- a/zomes/rea_agreement/zome_idx_agreement/src/lib.rs +++ b/zomes/rea_agreement/zome_idx_agreement/src/lib.rs @@ -1,14 +1,24 @@ /** * Agreement query indexes for agreement DNA * - * :TODO: - * * @package Holo-REA * @since 2021-09-06 */ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_agreement_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + AgreementAddress::entry_def(), + EconomicEventAddress::entry_def(), + CommitmentAddress::entry_def(), + ])) +} + #[index_zome] struct Agreement { economic_events: Remote, From 9fa89adbff89a7b0f4b3a15420c97a0bc0233105 Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 26 Apr 2022 13:29:49 +1000 Subject: [PATCH 134/181] sort IDs clientside in agreement link test pending resolution to #220 --- test/agreement/test_agreement_links.js | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/test/agreement/test_agreement_links.js b/test/agreement/test_agreement_links.js index 40d4b59b4..f17e22e15 100644 --- a/test/agreement/test_agreement_links.js +++ b/test/agreement/test_agreement_links.js @@ -5,6 +5,7 @@ const { buildPlayer, mockIdentifier, mockAgentId, + sortById, } = require('../init') const runner = buildRunner() @@ -148,12 +149,19 @@ runner.registerScenario('Agreement links & queries', async (s, t) => { } } `) + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedCIds = [{ id: cId }, { id: c2Id }].sort(sortById) + resp.data.agreement.commitments.sort(sortById) + const sortedEIds = [{ id: eId }, { id: e2Id }].sort(sortById) + resp.data.agreement.economicEvents.sort(sortById) + t.equal(resp.data.agreement.commitments.length, 2, '2nd commitment ref added') - t.equal(resp.data.agreement.commitments[0].id, c2Id, 'commitment ref 1 OK') - t.equal(resp.data.agreement.commitments[1].id, cId, 'commitment ref 2 OK') + t.equal(resp.data.agreement.commitments[0].id, sortedCIds[0].id, 'commitment ref 1 OK') + t.equal(resp.data.agreement.commitments[1].id, sortedCIds[1].id, 'commitment ref 2 OK') t.equal(resp.data.agreement.economicEvents.length, 2, '2nd event ref added') - t.equal(resp.data.agreement.economicEvents[0].id, e2Id, 'event ref 1 OK') - t.equal(resp.data.agreement.economicEvents[1].id, eId, 'event ref 2 OK') + t.equal(resp.data.agreement.economicEvents[0].id, sortedEIds[0].id, 'event ref 1 OK') + t.equal(resp.data.agreement.economicEvents[1].id, sortedEIds[1].id, 'event ref 2 OK') }) runner.run() From 7491cfe79c50ecbd5b7198ab5127b70dd999127d Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Mon, 25 Apr 2022 20:54:31 -0700 Subject: [PATCH 135/181] fix most of test/specification #249 --- test/specification/test_incoming_links.js | 20 ++++++------------- .../test_processspecification_crud.js | 12 +++++++---- .../test_resourcespecification_crud.js | 12 +++++++---- test/specification/test_unit_crud.js | 16 +++++++++------ 4 files changed, 32 insertions(+), 28 deletions(-) diff --git a/test/specification/test_incoming_links.js b/test/specification/test_incoming_links.js index 58282996a..eb4996b07 100644 --- a/test/specification/test_incoming_links.js +++ b/test/specification/test_incoming_links.js @@ -3,29 +3,21 @@ const { buildConfig, buildRunner, buildPlayer, + mockAgentId, } = require('../init') const runner = buildRunner() -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), - specification: getDNA('specification'), -}, { - vf_observation: ['planning', 'observation'], - vf_specification: ['observation', 'specification'], -}) +const config = buildConfig() -const tempProviderAgentId = 'some-agent-provider' -const tempReceiverAgentId = 'some-agent-receiver' const fillerProps = { - provider: tempProviderAgentId, - receiver: tempReceiverAgentId, + provider: mockAgentId(), + receiver: mockAgentId(), hasPointInTime: '2019-11-19T04:27:55.056Z', } runner.registerScenario('inbound Specification link references', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const alice = await buildPlayer(s, config, ['observation', 'planning', 'specification']) // setup some records for linking to let resp = await alice.graphQL(` @@ -84,7 +76,7 @@ runner.registerScenario('inbound Specification link references', async (s, t) => await s.consistency() t.ok(resp.data.res.resourceSpecification.id, 'resource specification created') - t.equal(resp.data.res.resourceSpecification.defaultUnitOfEffort.id, 'm', 'resource specification default unit ok') + t.ok(resp.data.res.resourceSpecification.defaultUnitOfEffort.id, 'resource specification default unit ok') const rsId = resp.data.res.resourceSpecification.id // test simple links diff --git a/test/specification/test_processspecification_crud.js b/test/specification/test_processspecification_crud.js index 87fac6801..5598ff9d9 100644 --- a/test/specification/test_processspecification_crud.js +++ b/test/specification/test_processspecification_crud.js @@ -25,6 +25,7 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { res: createProcessSpecification(processSpecification: $rs) { processSpecification { id + revisionId } } } @@ -56,6 +57,7 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { res: updateProcessSpecification(processSpecification: $rs) { processSpecification { id + revisionId } } } @@ -65,12 +67,14 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { await s.consistency() t.equal(updateResp.data.res.processSpecification.id, psId, 'record updated') + const updatedPsRevId = updateResp.data.res.processSpecification.revisionId // now we fetch the Entry again to check that the update was successful const updatedGetResp = await alice.graphQL(` query($id: ID!) { res: processSpecification(id: $id) { id + revisionId name note } @@ -78,14 +82,14 @@ runner.registerScenario('ProcessSpecification record API', async (s, t) => { `, { id: psId, }) - t.deepEqual(updatedGetResp.data.res, { id: psId, ...updatedExampleEntry }, 'record updated OK') + t.deepEqual(updatedGetResp.data.res, { id: psId, revisionId: updatedPsRevId, ...updatedExampleEntry }, 'record updated OK') const deleteResult = await alice.graphQL(` - mutation($id: ID!) { - res: deleteProcessSpecification(id: $id) + mutation($revisionId: ID!) { + res: deleteProcessSpecification(revisionId: $revisionId) } `, { - id: psId, + revisionId: updatedPsRevId, }) await s.consistency() diff --git a/test/specification/test_resourcespecification_crud.js b/test/specification/test_resourcespecification_crud.js index 051849809..33e5cf116 100644 --- a/test/specification/test_resourcespecification_crud.js +++ b/test/specification/test_resourcespecification_crud.js @@ -27,6 +27,7 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { res: createResourceSpecification(resourceSpecification: $rs) { resourceSpecification { id + revisionId } } } @@ -59,12 +60,14 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { res: updateResourceSpecification(resourceSpecification: $rs) { resourceSpecification { id + revisionId } } } `, { rs: { revisionId: rsRev, ...updatedExampleEntry }, }) + const updatedRsRevId = updateResp.data.res.resourceSpecification.revisionId await s.consistency() t.equal(updateResp.data.res.resourceSpecification.id, rsId, 'record update OK') @@ -74,6 +77,7 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { query($id: ID!) { res: resourceSpecification(id: $id) { id + revisionId name image note @@ -83,14 +87,14 @@ runner.registerScenario('ResourceSpecification record API', async (s, t) => { id: rsId, }) - t.deepEqual(updatedGetResp.data.res, { id: rsId, ...updatedExampleEntry }, 'record properties updated') + t.deepEqual(updatedGetResp.data.res, { id: rsId, revisionId: updatedRsRevId, ...updatedExampleEntry }, 'record properties updated') const deleteResult = await alice.graphQL(` - mutation($id: ID!) { - res: deleteResourceSpecification(id: $id) + mutation($revisionId: ID!) { + res: deleteResourceSpecification(revisionId: $revisionId) } `, { - id: rsId, + revisionId: updatedRsRevId, }) await s.consistency() diff --git a/test/specification/test_unit_crud.js b/test/specification/test_unit_crud.js index 427b8fe73..88e9d3339 100644 --- a/test/specification/test_unit_crud.js +++ b/test/specification/test_unit_crud.js @@ -25,6 +25,7 @@ runner.registerScenario('Unit record API', async (s, t) => { res: createUnit(unit: $rs) { unit { id + revisionId } } } @@ -37,11 +38,11 @@ runner.registerScenario('Unit record API', async (s, t) => { t.equal(createResp.data.res.unit.id.split(':')[0], exampleEntry.symbol, 'record index set') let uId = createResp.data.res.unit.id let uRevision = createResp.data.res.unit.revisionId - const getResp = await alice.graphQL(` query($id: ID!) { res: unit(id: $id) { id + revisionId label symbol } @@ -50,19 +51,21 @@ runner.registerScenario('Unit record API', async (s, t) => { id: uId, }) - t.deepEqual(getResp.data.res, { 'id': uId, ...exampleEntry }, 'record read OK') + t.deepEqual(getResp.data.res, { 'id': uId, revisionId: uRevision, ...exampleEntry }, 'record read OK') const updateResp = await alice.graphQL(` mutation($rs: UnitUpdateParams!) { res: updateUnit(unit: $rs) { unit { id + revisionId } } } `, { rs: { revisionId: uRevision, ...updatedExampleEntry }, }) + const updatedUnitRevId = updateResp.data.res.unit.revisionId await s.consistency() t.notEqual(updateResp.data.res.unit.id, uId, 'update operation succeeded') @@ -74,6 +77,7 @@ runner.registerScenario('Unit record API', async (s, t) => { query($id: ID!) { res: unit(id: $id) { id + revisionId label symbol } @@ -82,14 +86,14 @@ runner.registerScenario('Unit record API', async (s, t) => { id: uId, }) - t.deepEqual(updatedGetResp.data.res, { id: uId, ...updatedExampleEntry }, 'record updated OK') + t.deepEqual(updatedGetResp.data.res, { id: uId, revisionId: updatedUnitRevId, ...updatedExampleEntry }, 'record updated OK') const deleteResult = await alice.graphQL(` - mutation($id: ID!) { - res: deleteUnit(id: $id) + mutation($revisionId: ID!) { + res: deleteUnit(revisionId: $revisionId) } `, { - id: uId, + revisionId: updatedUnitRevId, }) await s.consistency() From 08a97020956af97b14bcbe4b4f9b93d4c1039b9b Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 26 Apr 2022 13:56:14 +1000 Subject: [PATCH 136/181] fix agreement index update method names in DNA config --- bundles/dna_templates/agreement/dna.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bundles/dna_templates/agreement/dna.yaml b/bundles/dna_templates/agreement/dna.yaml index c67e44108..1d1ba421c 100644 --- a/bundles/dna_templates/agreement/dna.yaml +++ b/bundles/dna_templates/agreement/dna.yaml @@ -9,9 +9,9 @@ properties: remote_auth: permissions: - extern_id: index_agreement_economic_events - allowed_method: [agreement_index, index_economic_events] + allowed_method: [agreement_index, index_agreement_economic_events] - extern_id: index_agreement_commitments - allowed_method: [agreement_index, index_commitments] + allowed_method: [agreement_index, index_agreement_commitments] zomes: # application zomes From 7c66addd11f50efc6b2113a8172ffe828214a456 Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 26 Apr 2022 13:58:49 +1000 Subject: [PATCH 137/181] index config fixes for observation, planning & specification DNAs --- bundles/dna_templates/observation/dna.yaml | 8 ++++---- bundles/dna_templates/planning/dna.yaml | 2 +- bundles/dna_templates/specification/dna.yaml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bundles/dna_templates/observation/dna.yaml b/bundles/dna_templates/observation/dna.yaml index 04e73b8e4..f7a2ecf12 100644 --- a/bundles/dna_templates/observation/dna.yaml +++ b/bundles/dna_templates/observation/dna.yaml @@ -30,13 +30,13 @@ properties: remote_auth: permissions: - extern_id: index_process_committed_inputs - allowed_method: [process_index, index_committed_inputs] + allowed_method: [process_index, index_process_committed_inputs] - extern_id: index_process_committed_outputs - allowed_method: [process_index, index_committed_outputs] + allowed_method: [process_index, index_process_committed_outputs] - extern_id: index_process_intended_inputs - allowed_method: [process_index, index_intended_inputs] + allowed_method: [process_index, index_process_intended_inputs] - extern_id: index_process_intended_outputs - allowed_method: [process_index, index_intended_outputs] + allowed_method: [process_index, index_process_intended_outputs] - extern_id: create_fulfillment allowed_method: [fulfillment, fulfillment_created] diff --git a/bundles/dna_templates/planning/dna.yaml b/bundles/dna_templates/planning/dna.yaml index 6568fc2a6..62cd50df1 100644 --- a/bundles/dna_templates/planning/dna.yaml +++ b/bundles/dna_templates/planning/dna.yaml @@ -24,7 +24,7 @@ properties: record_storage_zome: satisfaction remote_auth: permissions: - - extern_id: index_intent_proposals + - extern_id: index_intent_proposed_in allowed_method: [intent_index, index_intent_proposed_in] zomes: diff --git a/bundles/dna_templates/specification/dna.yaml b/bundles/dna_templates/specification/dna.yaml index e5a4835b4..3f15dda3c 100644 --- a/bundles/dna_templates/specification/dna.yaml +++ b/bundles/dna_templates/specification/dna.yaml @@ -7,7 +7,7 @@ properties: remote_auth: permissions: - extern_id: index_resource_specification_conforming_resources - allowed_method: [resource_specification_index, index_conforming_resources] + allowed_method: [resource_specification_index, index_resource_specification_conforming_resources] - extern_id: read_resource_specification allowed_method: [resource_specification, get_resource_specification] zomes: From 669903220670cb558edb1c1dfc9b660a02b9a701 Mon Sep 17 00:00:00 2001 From: pospi Date: Tue, 26 Apr 2022 14:00:20 +1000 Subject: [PATCH 138/181] fix empty config for locally-hosted index zome throwing an unnecessary error when remote indexes are being updated --- lib/hdk_semantic_indexes/client/src/lib.rs | 54 ++++++++++++++-------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/lib/hdk_semantic_indexes/client/src/lib.rs b/lib/hdk_semantic_indexes/client/src/lib.rs index 5d97f29c8..847e62ee9 100644 --- a/lib/hdk_semantic_indexes/client/src/lib.rs +++ b/lib/hdk_semantic_indexes/client/src/lib.rs @@ -273,27 +273,45 @@ pub fn manage_index( // Manage local index creation / removal - let local_forward_add = targets.local_dests.0.iter() - .map(|dest| { - request_sync_local_index( - origin_zome_name_from_config, origin_fn_name, - dest, &sources, &vec![], - ) - }); - let local_forward_remove = targets.local_dests.1.iter() - .map(|dest| { - request_sync_local_index( - origin_zome_name_from_config, origin_fn_name, - dest, &vec![], &sources, - ) - }); - let local_reciprocal_update = std::iter::once(request_sync_local_index( - dest_zome_name_from_config, dest_fn_name, - source, targets.local_dests.0.as_slice(), targets.local_dests.1.as_slice(), - )); + let empty = vec![]; + + let local_forward_add = ( + if targets.local_dests.0.len() > 0 { targets.local_dests.0.iter() } + else { empty.iter() } + ).map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &sources, &vec![], + ) + }); + + let local_forward_remove = ( + if targets.local_dests.1.len() > 0 { targets.local_dests.1.iter() } + else { empty.iter() } + ).map(|dest| { + request_sync_local_index( + origin_zome_name_from_config, origin_fn_name, + dest, &vec![], &sources, + ) + }); + + let mut local_updates = vec![]; + let local_reciprocal_update = + if targets.local_dests.0.len() > 0 || targets.local_dests.1.len() > 0 { + let mut others = vec![request_sync_local_index( + dest_zome_name_from_config, dest_fn_name, + source, targets.local_dests.0.as_slice(), targets.local_dests.1.as_slice(), + )]; + local_updates.append(&mut others); + local_updates.to_owned() + } else { vec![] }; // Manage remote index creation / removal & append to resultset + // :TODO: improve error handling by asserting that successful RPC + // calls fired for local targets + remote targets add up to equal + // the number of input `dest_addresses` & `remove_addresses` + Ok(std::iter::empty() .chain(local_forward_add) .chain(local_forward_remove) From 811645e8937299a43d78bf9087f2c5f1ccd6fa0a Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 26 Apr 2022 08:24:12 -0700 Subject: [PATCH 139/181] fixes #246 test/process --- Cargo.lock | 1 + zomes/rea_intent/lib/Cargo.toml | 5 +++++ zomes/rea_intent/lib/src/lib.rs | 6 ++++-- zomes/rea_intent/zome_idx_planning/src/lib.rs | 11 +++++++++++ zomes/rea_process/zome_idx_observation/src/lib.rs | 2 ++ 5 files changed, 23 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6e336c21f..cae2bfd20 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -734,6 +734,7 @@ version = "0.1.0" dependencies = [ "hc_zome_rea_intent_rpc", "hc_zome_rea_intent_storage", + "hdk", "hdk_records", "hdk_semantic_indexes_client_lib", "paste", diff --git a/zomes/rea_intent/lib/Cargo.toml b/zomes/rea_intent/lib/Cargo.toml index ed04b06b5..bbae58647 100644 --- a/zomes/rea_intent/lib/Cargo.toml +++ b/zomes/rea_intent/lib/Cargo.toml @@ -6,6 +6,11 @@ edition = "2018" [dependencies] paste = "1.0" + +# :TODO: remove if removing debug outputs from this crate +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_intent_storage = { path = "../storage" } diff --git a/zomes/rea_intent/lib/src/lib.rs b/zomes/rea_intent/lib/src/lib.rs index 9fbc6afff..379ce31a0 100644 --- a/zomes/rea_intent/lib/src/lib.rs +++ b/zomes/rea_intent/lib/src/lib.rs @@ -33,10 +33,12 @@ pub fn handle_create_intent(entry_def_id: S, intent: CreateRequest) -> Record // handle link fields if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &intent { - create_index!(intent.input_of(input_of), process.intended_inputs(&base_address))?; + let e = create_index!(intent.input_of(input_of), process.intended_inputs(&base_address))?; + hdk::prelude::debug!("handle_create_intent::input_of::create_index!: {:?}", e); }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &intent { - create_index!(intent.output_of(output_of), process.intended_outputs(&base_address))?; + let e = create_index!(intent.output_of(output_of), process.intended_outputs(&base_address))?; + hdk::prelude::debug!("handle_create_intent::output_of::create_index!: {:?}", e); }; // return entire record structure diff --git a/zomes/rea_intent/zome_idx_planning/src/lib.rs b/zomes/rea_intent/zome_idx_planning/src/lib.rs index a67bf390e..2b232b605 100644 --- a/zomes/rea_intent/zome_idx_planning/src/lib.rs +++ b/zomes/rea_intent/zome_idx_planning/src/lib.rs @@ -7,6 +7,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_intent_rpc::*; +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + IntentAddress::entry_def(), + ProcessAddress::entry_def(), + ])) +} + #[index_zome] struct Intent { satisfied_by: Local, diff --git a/zomes/rea_process/zome_idx_observation/src/lib.rs b/zomes/rea_process/zome_idx_observation/src/lib.rs index a7ffa7b5d..0fe4a7df0 100644 --- a/zomes/rea_process/zome_idx_observation/src/lib.rs +++ b/zomes/rea_process/zome_idx_observation/src/lib.rs @@ -13,6 +13,8 @@ use hc_zome_rea_process_rpc::*; fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), + IntentAddress::entry_def(), + CommitmentAddress::entry_def(), ProcessAddress::entry_def(), EconomicEventAddress::entry_def(), ])) From e0b11fb7672becf9a41424e583d7f9d58eac3a79 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 26 Apr 2022 10:19:55 -0700 Subject: [PATCH 140/181] almost complete #254 test/flows --- Cargo.lock | 3 ++ modules/vf-graphql-holochain/connection.ts | 8 ++++++ .../resolvers/commitment.ts | 14 ++++++---- .../resolvers/economicEvent.ts | 14 ++++++---- .../resolvers/fulfillment.ts | 6 ++-- .../vf-graphql-holochain/resolvers/intent.ts | 11 +++++--- .../vf-graphql-holochain/resolvers/process.ts | 28 +++++++++---------- .../resolvers/satisfaction.ts | 3 +- .../zome_idx_planning/src/lib.rs | 1 + .../zome_idx_observation/src/lib.rs | 2 ++ zomes/rea_economic_resource/lib/src/lib.rs | 10 ++++--- .../lib_destination/Cargo.toml | 4 +++ .../lib_destination/src/lib.rs | 3 +- zomes/rea_intent/zome_idx_planning/src/lib.rs | 1 + zomes/rea_proposed_intent/lib/src/lib.rs | 6 ++-- zomes/rea_proposed_to/lib/Cargo.toml | 4 +++ zomes/rea_proposed_to/lib/src/lib.rs | 3 +- .../lib_destination/Cargo.toml | 4 +++ .../lib_destination/src/lib.rs | 3 +- zomes/rea_satisfaction/lib_origin/src/lib.rs | 27 ++++++++++-------- .../zome_idx_observation/src/lib.rs | 11 ++++++++ .../zome_idx_planning/src/lib.rs | 12 ++++++++ 22 files changed, 125 insertions(+), 53 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cae2bfd20..d0c2abdf4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -636,6 +636,7 @@ dependencies = [ "hc_zome_rea_fulfillment_lib", "hc_zome_rea_fulfillment_rpc", "hc_zome_rea_fulfillment_storage", + "hdk", "hdk_records", "hdk_semantic_indexes_client_lib", "paste", @@ -1028,6 +1029,7 @@ version = "0.1.0" dependencies = [ "hc_zome_rea_proposed_to_rpc", "hc_zome_rea_proposed_to_storage", + "hdk", "hdk_records", "hdk_semantic_indexes_client_lib", "paste", @@ -1155,6 +1157,7 @@ dependencies = [ "hc_zome_rea_satisfaction_lib", "hc_zome_rea_satisfaction_rpc", "hc_zome_rea_satisfaction_storage", + "hdk", "hdk_records", "hdk_semantic_indexes_client_lib", "paste", diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index e958e45cf..d1abdc02a 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -287,3 +287,11 @@ const zomeFunction = (socketURI: string, cell_id: CellId, zome_name: string, fn_ */ export const mapZomeFn = (mappings: DNAIdMappings, socketURI: string, instance: string, zome: string, fn: string, skipEncodeDecode?: boolean) => zomeFunction(socketURI, (mappings && mappings[instance]), zome, fn, skipEncodeDecode) + + +export const extractEdges = (withEdges: { edges: { node: T }[] }): T[] => { + if (!withEdges.edges || !withEdges.edges.length) { + return [] + } + return withEdges.edges.map(({ node }) => node) +} \ No newline at end of file diff --git a/modules/vf-graphql-holochain/resolvers/commitment.ts b/modules/vf-graphql-holochain/resolvers/commitment.ts index d6e8de780..b20b22b44 100644 --- a/modules/vf-graphql-holochain/resolvers/commitment.ts +++ b/modules/vf-graphql-holochain/resolvers/commitment.ts @@ -6,7 +6,7 @@ */ import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' -import { mapZomeFn } from '../connection' +import { extractEdges, mapZomeFn } from '../connection' import { Agent, @@ -39,11 +39,13 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return Object.assign( { fulfilledBy: async (record: Commitment): Promise => { - return (await readFulfillments({ params: { fulfills: record.id } })).map(({ fulfillment }) => fulfillment) + const results = await readFulfillments({ params: { fulfills: record.id } }) + return extractEdges(results) }, satisfies: async (record: Commitment): Promise => { - return (await readSatisfactions({ params: { satisfiedBy: record.id } })).map(({ satisfaction }) => satisfaction) + const results = await readSatisfactions({ params: { satisfiedBy: record.id } }) + return extractEdges(results) }, }, (hasAgent ? { @@ -57,11 +59,13 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN } : {}), (hasObservation ? { inputOf: async (record: Commitment): Promise => { - return (await readProcesses({ params: { committedInputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { committedInputs: record.id } }) + return results.edges.pop()['node'] }, outputOf: async (record: Commitment): Promise => { - return (await readProcesses({ params: { committedOutputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { committedOutputs: record.id } }) + return results.edges.pop()['node'] }, } : {}), (hasKnowledge ? { diff --git a/modules/vf-graphql-holochain/resolvers/economicEvent.ts b/modules/vf-graphql-holochain/resolvers/economicEvent.ts index 78ebb00b6..cb2cc4818 100644 --- a/modules/vf-graphql-holochain/resolvers/economicEvent.ts +++ b/modules/vf-graphql-holochain/resolvers/economicEvent.ts @@ -6,7 +6,7 @@ */ import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' -import { mapZomeFn } from '../connection' +import { extractEdges, mapZomeFn } from '../connection' import { Agent, @@ -42,11 +42,13 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return Object.assign( { inputOf: async (record: EconomicEvent): Promise => { - return (await readProcesses({ params: { inputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { inputs: record.id } }) + return results.edges.pop()['node'] }, outputOf: async (record: EconomicEvent): Promise => { - return (await readProcesses({ params: { outputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { outputs: record.id } }) + return results.edges.pop()['node'] }, resourceInventoriedAs: async (record: EconomicEvent): Promise => { @@ -65,11 +67,13 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN } : {}), (hasPlanning ? { fulfills: async (record: EconomicEvent): Promise => { - return (await readFulfillments({ params: { fulfilledBy: record.id } })).map(({ fulfillment }) => fulfillment) + const results = await readFulfillments({ params: { fulfilledBy: record.id } }) + return extractEdges(results) }, satisfies: async (record: EconomicEvent): Promise => { - return (await readSatisfactions({ params: { satisfiedBy: record.id } })).map(({ satisfaction }) => satisfaction) + const results = await readSatisfactions({ params: { satisfiedBy: record.id } }) + return extractEdges(results) }, } : {}), (hasKnowledge ? { diff --git a/modules/vf-graphql-holochain/resolvers/fulfillment.ts b/modules/vf-graphql-holochain/resolvers/fulfillment.ts index a901b7831..0ee39a9ec 100644 --- a/modules/vf-graphql-holochain/resolvers/fulfillment.ts +++ b/modules/vf-graphql-holochain/resolvers/fulfillment.ts @@ -23,12 +23,14 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return Object.assign( { fulfills: injectTypename('Commitment', async (record: Fulfillment): Promise => { - return (await readCommitments({ params: { fulfilledBy: record.id } })).pop()['commitment'] + const results = await readCommitments({ params: { fulfilledBy: record.id } }) + return results.edges.pop()['node'] }), }, (hasObservation ? { fulfilledBy: injectTypename('EconomicEvent', async (record: Fulfillment): Promise => { - return (await readEvents({ params: { fulfills: record.id } })).pop()['economicEvent'] + const results = await readEvents({ params: { fulfills: record.id } }) + return results.edges.pop()['node'] }), } : {}), ) diff --git a/modules/vf-graphql-holochain/resolvers/intent.ts b/modules/vf-graphql-holochain/resolvers/intent.ts index 46948ad90..d29c1d688 100644 --- a/modules/vf-graphql-holochain/resolvers/intent.ts +++ b/modules/vf-graphql-holochain/resolvers/intent.ts @@ -6,7 +6,7 @@ */ import { DNAIdMappings, DEFAULT_VF_MODULES, VfModule } from '../types' -import { mapZomeFn } from '../connection' +import { extractEdges, mapZomeFn } from '../connection' import { Maybe, @@ -39,7 +39,8 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return Object.assign( { satisfiedBy: async (record: Intent): Promise => { - return (await readSatisfactions({ params: { satisfies: record.id } })).map(({ satisfaction }) => satisfaction) + const results = await readSatisfactions({ params: { satisfies: record.id } }) + return extractEdges(results) }, }, (hasAgent ? { @@ -53,11 +54,13 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN } : {}), (hasObservation ? { inputOf: async (record: Intent): Promise => { - return (await readProcesses({ params: { intendedInputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { intendedInputs: record.id } }) + return results.edges.pop()['node'] }, outputOf: async (record: Intent): Promise => { - return (await readProcesses({ params: { intendedOutputs: record.id } })).pop()['process'] + const results = await readProcesses({ params: { intendedOutputs: record.id } }) + return results.edges.pop()['node'] }, } : {}), (hasProposal ? { diff --git a/modules/vf-graphql-holochain/resolvers/process.ts b/modules/vf-graphql-holochain/resolvers/process.ts index a5e340d8c..2e3f6367d 100644 --- a/modules/vf-graphql-holochain/resolvers/process.ts +++ b/modules/vf-graphql-holochain/resolvers/process.ts @@ -6,7 +6,7 @@ */ import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES, VfModule } from '../types' -import { mapZomeFn } from '../connection' +import { mapZomeFn, extractEdges } from '../connection' import { Process, @@ -28,36 +28,34 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return Object.assign( { inputs: injectTypename('EconomicEvent', async (record: Process): Promise => { - const economicEvents = await readEvents({ params: { inputOf: record.id } }) - if (!economicEvents.edges || !economicEvents.edges.length) { - return [] - } - return economicEvents.edges.map(({ node }) => node) + const results = await readEvents({ params: { inputOf: record.id } }) + return extractEdges(results) }), outputs: injectTypename('EconomicEvent', async (record: Process): Promise => { - const economicEvents = await readEvents({ params: { outputOf: record.id } }) - if (!economicEvents.edges || !economicEvents.edges.length) { - return [] - } - return economicEvents.edges.map(({ node }) => node) + const results = await readEvents({ params: { outputOf: record.id } }) + return extractEdges(results) }), }, (hasPlanning ? { committedInputs: injectTypename('Commitment', async (record: Process): Promise => { - return (await readCommitments({ params: { inputOf: record.id } })).map(({ commitment }) => commitment) + const results = await readCommitments({ params: { inputOf: record.id } }) + return extractEdges(results) }), committedOutputs: injectTypename('Commitment', async (record: Process): Promise => { - return (await readCommitments({ params: { outputOf: record.id } })).map(({ commitment }) => commitment) + const results = await readCommitments({ params: { outputOf: record.id } }) + return extractEdges(results) }), intendedInputs: async (record: Process): Promise => { - return (await readIntents({ params: { inputOf: record.id } })).map(({ intent }) => intent) + const results = await readIntents({ params: { inputOf: record.id } }) + return extractEdges(results) }, intendedOutputs: async (record: Process): Promise => { - return (await readIntents({ params: { outputOf: record.id } })).map(({ intent }) => intent) + const results = await readIntents({ params: { outputOf: record.id } }) + return extractEdges(results) }, } : {}), (hasKnowledge ? { diff --git a/modules/vf-graphql-holochain/resolvers/satisfaction.ts b/modules/vf-graphql-holochain/resolvers/satisfaction.ts index c50637800..064fe53da 100644 --- a/modules/vf-graphql-holochain/resolvers/satisfaction.ts +++ b/modules/vf-graphql-holochain/resolvers/satisfaction.ts @@ -48,7 +48,8 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN }, satisfies: async (record: Satisfaction): Promise => { - return (await readIntents({ params: { satisfiedBy: record.id } })).pop()['intent'] + const results = await readIntents({ params: { satisfiedBy: record.id } }) + return results.edges.pop()['node'] }, } } diff --git a/zomes/rea_commitment/zome_idx_planning/src/lib.rs b/zomes/rea_commitment/zome_idx_planning/src/lib.rs index c001dc35f..a392b552d 100644 --- a/zomes/rea_commitment/zome_idx_planning/src/lib.rs +++ b/zomes/rea_commitment/zome_idx_planning/src/lib.rs @@ -14,6 +14,7 @@ fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), ProcessAddress::entry_def(), + SatisfactionAddress::entry_def(), CommitmentAddress::entry_def(), ProcessAddress::entry_def(), AgreementAddress::entry_def(), diff --git a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs index 62518f225..573f0be72 100644 --- a/zomes/rea_economic_event/zome_idx_observation/src/lib.rs +++ b/zomes/rea_economic_event/zome_idx_observation/src/lib.rs @@ -15,7 +15,9 @@ fn entry_defs(_: ()) -> ExternResult { PathEntry::entry_def(), ProcessAddress::entry_def(), AgreementAddress::entry_def(), + SatisfactionAddress::entry_def(), FulfillmentAddress::entry_def(), + EconomicResourceAddress::entry_def(), EconomicEventAddress::entry_def(), ])) } diff --git a/zomes/rea_economic_resource/lib/src/lib.rs b/zomes/rea_economic_resource/lib/src/lib.rs index c86eed78e..ada38edec 100644 --- a/zomes/rea_economic_resource/lib/src/lib.rs +++ b/zomes/rea_economic_resource/lib/src/lib.rs @@ -87,10 +87,12 @@ impl API for EconomicResourceZomePermissableDefault { // :NOTE: this will always run- resource without a specification ID would fail entry validation (implicit in the above) if let Some(conforms_to) = resource_spec { - let _ = create_index!(economic_resource.conforms_to(conforms_to), resource_specification.conforming_resources(&base_address)); - } - if let Some(contained_in) = resource_params.get_contained_in() { - let _ = create_index!(economic_resource(&base_address).contained_in(&contained_in))?; + let e = create_index!(economic_resource.conforms_to(conforms_to), resource_specification.conforming_resources(&base_address)); + hdk::prelude::debug!("create_inventory_from_event::conforms_to::create_index!: {:?}", e); + } + if let Some(contained_in) = resource_params.get_contained_in() { + let e = create_index!(economic_resource(&base_address).contained_in(&contained_in))?; + hdk::prelude::debug!("create_inventory_from_event::contained_in::create_index!: {:?}", e); }; Ok((revision_id, base_address, entry_resp)) diff --git a/zomes/rea_fulfillment/lib_destination/Cargo.toml b/zomes/rea_fulfillment/lib_destination/Cargo.toml index 01ff40093..7d249a913 100644 --- a/zomes/rea_fulfillment/lib_destination/Cargo.toml +++ b/zomes/rea_fulfillment/lib_destination/Cargo.toml @@ -6,6 +6,10 @@ edition = "2018" [dependencies] paste = "1.0" +# :TODO: remove if removing debug logging +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_fulfillment_storage = { path = "../storage" } diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index 743133f07..df667e99c 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -35,7 +35,8 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), event.fulfills(&fulfillment_address))?; + let create_index_results = create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), event.fulfills(&fulfillment_address))?; + hdk::prelude::debug!("handle_create_fulfillment::fulfilled_by::create_index!: {:?}", create_index_results); // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs diff --git a/zomes/rea_intent/zome_idx_planning/src/lib.rs b/zomes/rea_intent/zome_idx_planning/src/lib.rs index 2b232b605..94f07808e 100644 --- a/zomes/rea_intent/zome_idx_planning/src/lib.rs +++ b/zomes/rea_intent/zome_idx_planning/src/lib.rs @@ -14,6 +14,7 @@ fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), IntentAddress::entry_def(), + SatisfactionAddress::entry_def(), ProcessAddress::entry_def(), ])) } diff --git a/zomes/rea_proposed_intent/lib/src/lib.rs b/zomes/rea_proposed_intent/lib/src/lib.rs index bbb979e41..a75b6ea63 100644 --- a/zomes/rea_proposed_intent/lib/src/lib.rs +++ b/zomes/rea_proposed_intent/lib/src/lib.rs @@ -27,8 +27,10 @@ pub fn handle_create_proposed_intent(entry_def_id: S, proposed_intent: Create let (revision_id, base_address, entry_resp): (_, ProposedIntentAddress, EntryData) = create_record(&entry_def_id, proposed_intent.to_owned())?; // handle link fields - create_index!(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address))?; - create_index!(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address))?; + let r1 = create_index!(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address))?; + hdk::prelude::debug!("handle_create_proposed_intent::published_in::create_index!: {:?}", r1); + let r2 = create_index!(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address))?; + hdk::prelude::debug!("handle_create_proposed_intent::publishes::create_index!: {:?}", r2); Ok(construct_response(&base_address, &revision_id, &entry_resp)) } diff --git a/zomes/rea_proposed_to/lib/Cargo.toml b/zomes/rea_proposed_to/lib/Cargo.toml index 27193c2b6..fbcf52d97 100644 --- a/zomes/rea_proposed_to/lib/Cargo.toml +++ b/zomes/rea_proposed_to/lib/Cargo.toml @@ -6,6 +6,10 @@ edition = "2018" [dependencies] paste = "1.0" +# :TODO: remove if removing debug logging +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_proposed_to_storage = { path = "../storage" } diff --git a/zomes/rea_proposed_to/lib/src/lib.rs b/zomes/rea_proposed_to/lib/src/lib.rs index 827f66648..864c3f8f1 100644 --- a/zomes/rea_proposed_to/lib/src/lib.rs +++ b/zomes/rea_proposed_to/lib/src/lib.rs @@ -27,7 +27,8 @@ pub fn handle_create_proposed_to(entry_def_id: S, proposed_to: CreateRequest) let (revision_id, base_address, entry_resp): (_, ProposedToAddress, EntryData) = create_record(&entry_def_id, proposed_to.to_owned())?; // handle link fields - create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.proposed_to(&base_address))?; + let r1 = create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.proposed_to(&base_address))?; + hdk::prelude::debug!("handle_create_proposed_to::proposed::create_index!: {:?}", r1); // :TODO: create index for retrieving all proposals for an agent diff --git a/zomes/rea_satisfaction/lib_destination/Cargo.toml b/zomes/rea_satisfaction/lib_destination/Cargo.toml index bb784e84d..48a5ae5b2 100644 --- a/zomes/rea_satisfaction/lib_destination/Cargo.toml +++ b/zomes/rea_satisfaction/lib_destination/Cargo.toml @@ -6,6 +6,10 @@ edition = "2018" [dependencies] paste = "1.0" +# :TODO: remove if removing debug logging +# :DUPE: hdk-rust-revid +hdk = "0.0.124" + hdk_records = { path = "../../../lib/hdk_records" } hdk_semantic_indexes_client_lib = { path = "../../../lib/hdk_semantic_indexes/client" } hc_zome_rea_satisfaction_storage = { path = "../storage" } diff --git a/zomes/rea_satisfaction/lib_destination/src/lib.rs b/zomes/rea_satisfaction/lib_destination/src/lib.rs index 1bc85473e..13cf3b5f2 100644 --- a/zomes/rea_satisfaction/lib_destination/src/lib.rs +++ b/zomes/rea_satisfaction/lib_destination/src/lib.rs @@ -35,7 +35,8 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - create_index!(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address))?; + let r1 = create_index!(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address))?; + hdk::prelude::debug!("destination::handle_create_satisfaction::satisfied_by::create_index!: {:?}", r1); // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs diff --git a/zomes/rea_satisfaction/lib_origin/src/lib.rs b/zomes/rea_satisfaction/lib_origin/src/lib.rs index 5e918c7ec..3965e1481 100644 --- a/zomes/rea_satisfaction/lib_origin/src/lib.rs +++ b/zomes/rea_satisfaction/lib_origin/src/lib.rs @@ -39,23 +39,26 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - create_index!(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address))?; + let r1 = create_index!(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address))?; + hdk::prelude::debug!("origin::handle_create_satisfaction::satisfies::create_index!: {:?}", r1); // link entries which may be local or remote let event_or_commitment = satisfaction.get_satisfied_by(); if is_satisfiedby_local_commitment(event_or_commitment)? { - // links to local commitment, create link index pair - create_index!(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address))?; + // links to local commitment, create link index pair + let r2 = create_index!(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address))?; + hdk::prelude::debug!("origin::handle_create_satisfaction::satisfied_by::create_index!: {:?}", r2); } else { - // links to remote event, ping associated foreign DNA & fail if there's an error - // :TODO: consider the implications of this in loosely coordinated multi-network spaces - // we assign a type to the response so that call_zome_method can - // effectively deserialize the response without failing - let _result: ResponseData = call_zome_method( - event_or_commitment, - &REPLICATE_CREATE_API_METHOD, - CreateParams { satisfaction: satisfaction.to_owned() }, - )?; + // links to remote event, ping associated foreign DNA & fail if there's an error + // :TODO: consider the implications of this in loosely coordinated multi-network spaces + // we assign a type to the response so that call_zome_method can + // effectively deserialize the response without failing + let _result: ResponseData = call_zome_method( + event_or_commitment, + &REPLICATE_CREATE_API_METHOD, + CreateParams { satisfaction: satisfaction.to_owned() }, + )?; + // hdk::prelude::debug!("origin::handle_create_satisfaction::call_zome_method::{:?}: {:?}", REPLICATE_CREATE_API_METHOD, result); } construct_response(&satisfaction_address, &revision_id, &entry_resp) diff --git a/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs b/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs index 8f46163fb..b6f389ce6 100644 --- a/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs +++ b/zomes/rea_satisfaction/zome_idx_observation/src/lib.rs @@ -8,6 +8,17 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_satisfaction_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + EconomicEventAddress::entry_def(), + SatisfactionAddress::entry_def(), + ])) +} + #[index_zome] struct Satisfaction { // :NOTE: this gets updated by shadowed local record storage zome, not the remote one in Planning DNA diff --git a/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs b/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs index 3b3daf014..dea1a0128 100644 --- a/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs +++ b/zomes/rea_satisfaction/zome_idx_planning/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_satisfaction_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + IntentAddress::entry_def(), + SatisfactionAddress::entry_def(), + CommitmentAddress::entry_def(), + ])) +} + #[index_zome] struct Satisfaction { satisfies: Local, From eaa60382b6ab3b8584587dd137fd0b68c48d8b3e Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 26 Apr 2022 12:35:38 -0700 Subject: [PATCH 141/181] fixes related to #248 test/core-architecture. see that issue for remaining bugs --- .../test_record_links_cross_dna.js | 195 +++++++++--------- .../test_record_links_cross_zome.js | 32 +-- 2 files changed, 119 insertions(+), 108 deletions(-) diff --git a/test/core-architecture/test_record_links_cross_dna.js b/test/core-architecture/test_record_links_cross_dna.js index cb867c7cf..e64800858 100644 --- a/test/core-architecture/test_record_links_cross_dna.js +++ b/test/core-architecture/test_record_links_cross_dna.js @@ -1,145 +1,153 @@ const { - getDNA, buildConfig, + buildPlayer, buildRunner, + mockAgentId, + mockIdentifier, } = require('../init') const runner = buildRunner() -const config = buildConfig({ - observation: getDNA('observation'), - planning: getDNA('planning'), -}, { - vf_observation: ['planning', 'observation'], -}) +const config = buildConfig() const testEventProps = { action: 'consume', resourceClassifiedAs: ['some-resource-type'], - resourceQuantity: { hasNumericalValue: 1, hasUnit: 'dangling-unit-todo-tidy-up' }, - provider: 'agentid-1-todo', - receiver: 'agentid-2-todo', + resourceQuantity: { hasNumericalValue: 1, hasUnit: mockIdentifier(false) }, + provider: mockAgentId(false), + receiver: mockAgentId(false), due: '2019-11-19T04:29:55.056Z', } runner.registerScenario('updating remote link fields syncs fields and associated indexes', async (s, t) => { - const { alice } = await s.players({ alice: config }, true) + const { cells: [observation, planning] } = await buildPlayer(s, config, ['observation', 'planning']) // SCENARIO: write initial records const process = { name: 'context record for testing relationships', } - const pResp = await alice.call('observation', 'process', 'create_process', { process }) - t.ok(pResp.Ok.process && pResp.Ok.process.id, 'target record created successfully') + const pResp = await observation.call('process', 'create_process', { process }) + t.ok(pResp.process && pResp.process.id, 'target record created successfully') await s.consistency() - const processId = pResp.Ok.process.id + const processId = pResp.process.id const process2 = { name: 'second context record for testing relationships', } - const pResp2 = await alice.call('observation', 'process', 'create_process', { process: process2 }) - t.ok(pResp2.Ok.process && pResp2.Ok.process.id, 'secondary record created successfully') + const pResp2 = await observation.call('process', 'create_process', { process: process2 }) + t.ok(pResp2.process && pResp2.process.id, 'secondary record created successfully') await s.consistency() - const differentProcessId = pResp2.Ok.process.id + const differentProcessId = pResp2.process.id const iCommitment = { note: 'test input commitment', inputOf: processId, ...testEventProps, } - const icResp = await alice.call('planning', 'commitment', 'create_commitment', { commitment: iCommitment }) - t.ok(icResp.Ok.commitment && icResp.Ok.commitment.id, 'input record created successfully') - t.equal(icResp.Ok.commitment.inputOf, processId, 'field reference OK in write') + const icResp = await planning.call('commitment', 'create_commitment', { commitment: iCommitment }) + t.ok(icResp.commitment && icResp.commitment.id, 'input record created successfully') + t.deepEqual(icResp.commitment.inputOf, processId, 'field reference OK in write') await s.consistency() - const iCommitmentId = icResp.Ok.commitment.id + const iCommitmentId = icResp.commitment.id + const iCommitmentRevisionId = icResp.commitment.revisionId // ASSERT: test forward link field - let readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, processId, 'field reference OK on read') + let readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.deepEqual(readResponse.commitment && readResponse.commitment.inputOf, processId, 'field reference OK on read') // ASSERT: test reciprocal link field - readResponse = await alice.call('observation', 'process', 'get_process', { address: processId }) - t.equal(readResponse.Ok.process - && readResponse.Ok.process.committedInputs - && readResponse.Ok.process.committedInputs[0], iCommitmentId, 'reciprocal field reference OK on read') + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.deepEqual(readResponse.process + && readResponse.process.committedInputs + && readResponse.process.committedInputs[0], iCommitmentId, 'reciprocal field reference OK on read') // ASSERT: test commitment input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'field query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].commitment && readResponse.Ok[0].commitment.id, iCommitmentId, 'query index OK') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iCommitmentId, 'query index OK') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'reciprocal query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].process && readResponse.Ok[0].process.id, processId, 'reciprocal query index OK') + readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') // SCENARIO: update link field const updateCommitment = { id: iCommitmentId, + revisionId: iCommitmentRevisionId, inputOf: differentProcessId, } - const ieResp2 = await alice.call('planning', 'commitment', 'update_commitment', { commitment: updateCommitment }) - t.equal(ieResp2.Ok.commitment && ieResp2.Ok.commitment.inputOf, differentProcessId, 'record link field updated successfully') + const ieResp2 = await planning.call('commitment', 'update_commitment', { commitment: updateCommitment }) + t.deepEqual(ieResp2.commitment && ieResp2.commitment.inputOf, differentProcessId, 'record link field updated successfully') await s.consistency() // ASSERT: test commitment fields - readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.ok(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, 'field reference OK on read') - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, differentProcessId, 'field updated successfully') + readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.ok(readResponse.commitment && readResponse.commitment.inputOf, 'field reference OK on read') + t.deepEqual(readResponse.commitment && readResponse.commitment.inputOf, differentProcessId, 'field updated successfully') // ASSERT: test new commitment input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.Ok[0] - && readResponse.Ok[0].commitment - && readResponse.Ok[0].commitment.id, iCommitmentId, 'new field query index applied') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) + t.deepEqual(readResponse.edges && readResponse.edges[0] + && readResponse.edges[0].node + && readResponse.edges[0].node.id, iCommitmentId, 'new field query index applied') // ASSERT: test stale commitment input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'stale field query index removed') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'stale field query index removed') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'reciprocal query index count ok') - t.equal(readResponse.Ok && readResponse.Ok[0] - && readResponse.Ok[0].process - && readResponse.Ok[0].process.id, differentProcessId, 'new reciprocal query index applied') + readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index count ok') + t.deepEqual(readResponse.edges && readResponse.edges[0] + && readResponse.edges[0].node + && readResponse.edges[0].node.id, differentProcessId, 'new reciprocal query index applied') // SCENARIO: update link field (no-op) - const ieResp3 = await alice.call('planning', 'commitment', 'update_commitment', { commitment: updateCommitment }) - t.equal(ieResp3.Ok.commitment && ieResp3.Ok.commitment.inputOf, differentProcessId, 'update with same fields is no-op') + const ieResp3 = await planning.call('commitment', 'update_commitment', { commitment: updateCommitment }) + const ieResp3RevisionId = ieResp3.revisionId + t.deepEqual(ieResp3.commitment && ieResp3.commitment.inputOf, differentProcessId, 'update with same fields is no-op') await s.consistency() // ASSERT: test event fields - readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, differentProcessId, 'field update no-op OK') + readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.deepEqual(readResponse.commitment && readResponse.commitment.inputOf, differentProcessId, 'field update no-op OK') // SCENARIO: remove link field const wipeEventInput = { id: iCommitmentId, + revisionId: ieResp3RevisionId, action: 'lower', inputOf: null, } - const ieResp4 = await alice.call('planning', 'commitment', 'update_commitment', { commitment: wipeEventInput }) - t.equal(ieResp4.Ok.commitment && ieResp4.Ok.commitment.inputOf, undefined, 'update with null value erases field') + let ieResp4 + try { + ieResp4 = await planning.call('commitment', 'update_commitment', { commitment: wipeEventInput }) + } catch (e) { + // to create a failure, pretend that we still have what was there + ieResp4 = { commitment: { inputOf: differentProcessId }} + console.error(e) + } + t.equal(ieResp4.commitment && ieResp4.commitment.inputOf, undefined, 'update with null value erases field') await s.consistency() // ASSERT: test event fields - readResponse = await alice.call('planning', 'commitment', 'get_commitment', { address: iCommitmentId }) - t.equal(readResponse.Ok.commitment && readResponse.Ok.commitment.inputOf, undefined, 'field erased successfully') + readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) + t.equal(readResponse.commitment && readResponse.commitment.inputOf, undefined, 'field erased successfully') // ASSERT: test event input query edge - readResponse = await alice.call('planning', 'commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'field query index updated') + readResponse = await planning.call('commitment_index', 'query_commitments', { params: { inputOf: differentProcessId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'field query index updated') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'reciprocal field query index updated') + readResponse = await observation.call('process_index', 'query_processes', { params: { committedInputs: iCommitmentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'reciprocal field query index updated') @@ -148,71 +156,74 @@ runner.registerScenario('updating remote link fields syncs fields and associated }) runner.registerScenario('removing records with linked remote indexes clears them in associated records', async (s, t) => { - const { alice } = await s.players({ alice: config }, true) + const { cells: [observation, planning] } = await buildPlayer(s, config, ['observation', 'planning']) // SCENARIO: write initial records const process = { name: 'context record for testing relationships', } - const pResp = await alice.call('observation', 'process', 'create_process', { process }) - t.ok(pResp.Ok.process && pResp.Ok.process.id, 'record created successfully') + const pResp = await observation.call('process', 'create_process', { process }) + t.ok(pResp.process && pResp.process.id, 'record created successfully') await s.consistency() - const processId = pResp.Ok.process.id + const processId = pResp.process.id const iIntent = { note: 'test input intent', inputOf: processId, ...testEventProps, } - const iiResp = await alice.call('planning', 'intent', 'create_intent', { intent: iIntent }) - t.ok(iiResp.Ok.intent && iiResp.Ok.intent.id, 'input record created successfully') - t.equal(iiResp.Ok.intent.inputOf, processId, 'field reference OK in write') + const iiResp = await planning.call('intent', 'create_intent', { intent: iIntent }) + t.ok(iiResp.intent && iiResp.intent.id, 'input record created successfully') + t.deepEqual(iiResp.intent.inputOf, processId, 'field reference OK in write') await s.consistency() - const iIntentId = iiResp.Ok.intent.id + const iIntentId = iiResp.intent.id + const iIntentRevisionId = iiResp.intent.revisionId // ASSERT: test forward link field - let readResponse = await alice.call('planning', 'intent', 'get_intent', { address: iIntentId }) - t.equal(readResponse.Ok.intent && readResponse.Ok.intent.inputOf, processId, 'field reference OK on read') + let readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) + t.deepEqual(readResponse.intent && readResponse.intent.inputOf, processId, 'field reference OK on read') // ASSERT: test reciprocal link field - readResponse = await alice.call('observation', 'process', 'get_process', { address: processId }) - t.equal(readResponse.Ok.process - && readResponse.Ok.process.intendedInputs - && readResponse.Ok.process.intendedInputs[0], iIntentId, 'reciprocal field reference OK on read') + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.deepEqual(readResponse.process + && readResponse.process.intendedInputs + && readResponse.process.intendedInputs[0], iIntentId, 'reciprocal field reference OK on read') // ASSERT: test commitment input query edge - readResponse = await alice.call('planning', 'intent_index', 'query_intents', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'field query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].intent && readResponse.Ok[0].intent.id, iIntentId, 'query index OK') + readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iIntentId, 'query index OK') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 1, 'reciprocal query index present') - t.equal(readResponse.Ok && readResponse.Ok[0] && readResponse.Ok[0].process && readResponse.Ok[0].process.id, processId, 'reciprocal query index OK') + readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') // SCENARIO: wipe associated record - const delResp = await alice.call('planning', 'intent', 'delete_intent', { address: iIntentId }) - t.ok(delResp.Ok, 'input record deleted') + await planning.call('intent', 'delete_intent', { address: iIntentRevisionId }) await s.consistency() // ASSERT: test forward link field - readResponse = await alice.call('planning', 'intent', 'get_intent', { address: iIntentId }) - t.equal(readResponse.Err && readResponse.Err.Internal, 'No entry at this address', 'record deletion OK') + try { + readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) + } catch (err) { + t.ok(err.data.data.includes('No entry at this address'), 'record deletion OK') + } // ASSERT: test reciprocal link field - readResponse = await alice.call('observation', 'process', 'get_process', { address: processId }) - t.equal(readResponse.Ok.process - && readResponse.Ok.process.intendedInputs.length, 0, 'reciprocal field reference removed') + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.equal(readResponse.process + && readResponse.process.intendedInputs.length, 0, 'reciprocal field reference removed') // ASSERT: test commitment input query edge - readResponse = await alice.call('planning', 'intent_index', 'query_intents', { params: { inputOf: processId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'field query index removed') + readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'field query index removed') // ASSERT: test process input query edge - readResponse = await alice.call('observation', 'process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.equal(readResponse.Ok && readResponse.Ok.length, 0, 'reciprocal query index removed') + readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'reciprocal query index removed') }) runner.run() diff --git a/test/core-architecture/test_record_links_cross_zome.js b/test/core-architecture/test_record_links_cross_zome.js index d3e19ac61..1ad0cd12f 100644 --- a/test/core-architecture/test_record_links_cross_zome.js +++ b/test/core-architecture/test_record_links_cross_zome.js @@ -58,13 +58,13 @@ runner.registerScenario('updating local link fields syncs fields and associated // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - t.equal(readResponse.results && readResponse.results.length, 1, 'field query index present') - t.deepEqual(readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'query index OK') + t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iEventId, 'query index OK') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse.results && readResponse.results.length, 1, 'reciprocal query index present') - t.deepEqual(readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'reciprocal query index OK') + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') // :TODO: need to find a new record with a local zome link to test, since EconomicEvent is not updateable /* @@ -84,13 +84,13 @@ runner.registerScenario('updating local link fields syncs fields and associated // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.results.length, 1, 'field query index present') - t.equal(readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'field query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 1, 'field query index present') + t.equal(readResponse.edges[0] && readResponse.edges[0].economicEvent && readResponse.edges[0].economicEvent.id, iEventId, 'field query index updated') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse.Ok && readResponse.results.length, 1, 'process query index present') - t.equal(readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, differentProcessId, 'process query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 1, 'process query index present') + t.equal(readResponse.edges[0] && readResponse.edges[0].process && readResponse.edges[0].process.id, differentProcessId, 'process query index updated') @@ -120,11 +120,11 @@ runner.registerScenario('updating local link fields syncs fields and associated // ASSERT: test event input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: differentProcessId } }) - t.equal(readResponse.Ok && readResponse.results.length, 0, 'field query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 0, 'field query index updated') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse.Ok && readResponse.results.length, 0, 'process query index updated') + t.equal(readResponse.Ok && readResponse.edges.length, 0, 'process query index updated') */ @@ -183,13 +183,13 @@ runner2.registerScenario('removing records with linked local indexes clears them // ASSERT: test commitment input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - t.equal(readResponse && readResponse.results.length, 1, 'field query index present') - t.deepEqual(readResponse && readResponse.results[0] && readResponse.results[0].economicEvent && readResponse.results[0].economicEvent.id, iEventId, 'query index OK') + t.equal(readResponse && readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse && readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iEventId, 'query index OK') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse && readResponse.results.length, 1, 'reciprocal query index present') - t.deepEqual(readResponse && readResponse.results[0] && readResponse.results[0].process && readResponse.results[0].process.id, processId, 'reciprocal query index OK') + t.equal(readResponse && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') // SCENARIO: wipe associated record const delResp = await observation.call('economic_event', 'delete_economic_event', { address: iEventRev }) @@ -209,11 +209,11 @@ runner2.registerScenario('removing records with linked local indexes clears them // ASSERT: test commitment input query edge readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { inputOf: processId } }) - t.equal(readResponse && readResponse.results.length, 0, 'field query index removed') + t.equal(readResponse && readResponse.edges.length, 0, 'field query index removed') // ASSERT: test process input query edge readResponse = await observation.call('process_index', 'query_processes', { params: { inputs: iEventId } }) - t.equal(readResponse && readResponse.results.length, 0, 'reciprocal query index removed') + t.equal(readResponse && readResponse.edges.length, 0, 'reciprocal query index removed') }) runner.run() From ca3fcacce7edc554c5a978143a4e0fd51da4a2dc Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Tue, 26 Apr 2022 14:43:12 -0700 Subject: [PATCH 142/181] fixes #248 --- .../test_record_links_cross_dna.js | 73 +-------------- .../test_record_links_remove_cross_dna.js | 92 +++++++++++++++++++ 2 files changed, 93 insertions(+), 72 deletions(-) create mode 100644 test/core-architecture/test_record_links_remove_cross_dna.js diff --git a/test/core-architecture/test_record_links_cross_dna.js b/test/core-architecture/test_record_links_cross_dna.js index e64800858..8caf3567f 100644 --- a/test/core-architecture/test_record_links_cross_dna.js +++ b/test/core-architecture/test_record_links_cross_dna.js @@ -109,9 +109,9 @@ runner.registerScenario('updating remote link fields syncs fields and associated // SCENARIO: update link field (no-op) const ieResp3 = await planning.call('commitment', 'update_commitment', { commitment: updateCommitment }) - const ieResp3RevisionId = ieResp3.revisionId t.deepEqual(ieResp3.commitment && ieResp3.commitment.inputOf, differentProcessId, 'update with same fields is no-op') await s.consistency() + const ieResp3RevisionId = ieResp3.commitment.revisionId // ASSERT: test event fields readResponse = await planning.call('commitment', 'get_commitment', { address: iCommitmentId }) @@ -155,75 +155,4 @@ runner.registerScenario('updating remote link fields syncs fields and associated // :TODO: updates for fields when other values are present in the index array }) -runner.registerScenario('removing records with linked remote indexes clears them in associated records', async (s, t) => { - const { cells: [observation, planning] } = await buildPlayer(s, config, ['observation', 'planning']) - - // SCENARIO: write initial records - const process = { - name: 'context record for testing relationships', - } - const pResp = await observation.call('process', 'create_process', { process }) - t.ok(pResp.process && pResp.process.id, 'record created successfully') - await s.consistency() - const processId = pResp.process.id - - const iIntent = { - note: 'test input intent', - inputOf: processId, - ...testEventProps, - } - const iiResp = await planning.call('intent', 'create_intent', { intent: iIntent }) - t.ok(iiResp.intent && iiResp.intent.id, 'input record created successfully') - t.deepEqual(iiResp.intent.inputOf, processId, 'field reference OK in write') - await s.consistency() - const iIntentId = iiResp.intent.id - const iIntentRevisionId = iiResp.intent.revisionId - - // ASSERT: test forward link field - let readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) - t.deepEqual(readResponse.intent && readResponse.intent.inputOf, processId, 'field reference OK on read') - - // ASSERT: test reciprocal link field - readResponse = await observation.call('process', 'get_process', { address: processId }) - t.deepEqual(readResponse.process - && readResponse.process.intendedInputs - && readResponse.process.intendedInputs[0], iIntentId, 'reciprocal field reference OK on read') - - // ASSERT: test commitment input query edge - readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) - t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') - t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iIntentId, 'query index OK') - - // ASSERT: test process input query edge - readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') - t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') - - - - // SCENARIO: wipe associated record - await planning.call('intent', 'delete_intent', { address: iIntentRevisionId }) - await s.consistency() - - // ASSERT: test forward link field - try { - readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) - } catch (err) { - t.ok(err.data.data.includes('No entry at this address'), 'record deletion OK') - } - - // ASSERT: test reciprocal link field - readResponse = await observation.call('process', 'get_process', { address: processId }) - t.equal(readResponse.process - && readResponse.process.intendedInputs.length, 0, 'reciprocal field reference removed') - - // ASSERT: test commitment input query edge - readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) - t.equal(readResponse.edges && readResponse.edges.length, 0, 'field query index removed') - - // ASSERT: test process input query edge - readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) - t.equal(readResponse.edges && readResponse.edges.length, 0, 'reciprocal query index removed') -}) - runner.run() diff --git a/test/core-architecture/test_record_links_remove_cross_dna.js b/test/core-architecture/test_record_links_remove_cross_dna.js new file mode 100644 index 000000000..274862ac6 --- /dev/null +++ b/test/core-architecture/test_record_links_remove_cross_dna.js @@ -0,0 +1,92 @@ +const { + buildConfig, + buildPlayer, + buildRunner, + mockAgentId, + mockIdentifier, +} = require('../init') + +const runner = buildRunner() + +const config = buildConfig() + +const testEventProps = { + action: 'consume', + resourceClassifiedAs: ['some-resource-type'], + resourceQuantity: { hasNumericalValue: 1, hasUnit: mockIdentifier(false) }, + provider: mockAgentId(false), + receiver: mockAgentId(false), + due: '2019-11-19T04:29:55.056Z', +} + +runner.registerScenario('removing records with linked remote indexes clears them in associated records', async (s, t) => { + const { cells: [observation, planning] } = await buildPlayer(s, config, ['observation', 'planning']) + + // SCENARIO: write initial records + const process = { + name: 'context record for testing relationships', + } + const pResp = await observation.call('process', 'create_process', { process }) + t.ok(pResp.process && pResp.process.id, 'record created successfully') + await s.consistency() + const processId = pResp.process.id + + const iIntent = { + note: 'test input intent', + inputOf: processId, + ...testEventProps, + } + const iiResp = await planning.call('intent', 'create_intent', { intent: iIntent }) + t.ok(iiResp.intent && iiResp.intent.id, 'input record created successfully') + t.deepEqual(iiResp.intent.inputOf, processId, 'field reference OK in write') + await s.consistency() + const iIntentId = iiResp.intent.id + const iIntentRevisionId = iiResp.intent.revisionId + + // ASSERT: test forward link field + let readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) + t.deepEqual(readResponse.intent && readResponse.intent.inputOf, processId, 'field reference OK on read') + + // ASSERT: test reciprocal link field + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.deepEqual(readResponse.process + && readResponse.process.intendedInputs + && readResponse.process.intendedInputs[0], iIntentId, 'reciprocal field reference OK on read') + + // ASSERT: test commitment input query edge + readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'field query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, iIntentId, 'query index OK') + + // ASSERT: test process input query edge + readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 1, 'reciprocal query index present') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, processId, 'reciprocal query index OK') + + + + // SCENARIO: wipe associated record + await planning.call('intent', 'delete_intent', { address: iIntentRevisionId }) + await s.consistency() + + // ASSERT: test forward link field + try { + readResponse = await planning.call('intent', 'get_intent', { address: iIntentId }) + } catch (err) { + t.ok(err.data.data.includes('No entry at this address'), 'record deletion OK') + } + + // ASSERT: test reciprocal link field + readResponse = await observation.call('process', 'get_process', { address: processId }) + t.equal(readResponse.process.intendedInputs, undefined, 'reciprocal field reference removed') + + // ASSERT: test commitment input query edge + readResponse = await planning.call('intent_index', 'query_intents', { params: { inputOf: processId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'field query index removed') + + // ASSERT: test process input query edge + readResponse = await observation.call('process_index', 'query_processes', { params: { intendedInputs: iIntentId } }) + t.equal(readResponse.edges && readResponse.edges.length, 0, 'reciprocal query index removed') +}) + +runner.run() From a794029a7e5b3138a932bf587193de0a9ec3880c Mon Sep 17 00:00:00 2001 From: pospi Date: Sun, 1 May 2022 07:59:44 +1000 Subject: [PATCH 143/181] fix nonstandard name for EconomicEvent index zome in Fulfillment --- zomes/rea_fulfillment/lib_destination/src/lib.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index df667e99c..9b1eb3715 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -35,7 +35,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - let create_index_results = create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), event.fulfills(&fulfillment_address))?; + let create_index_results = create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), economic_event.fulfills(&fulfillment_address))?; hdk::prelude::debug!("handle_create_fulfillment::fulfilled_by::create_index!: {:?}", create_index_results); // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs @@ -60,7 +60,7 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) fulfillment .fulfilled_by(&vec![new_entry.fulfilled_by.clone()]) .not(&vec![prev_entry.fulfilled_by]), - event.fulfills(&base_address) + economic_event.fulfills(&base_address) )?; } @@ -73,13 +73,13 @@ pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // handle link fields - update_index!(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), event.fulfills(&base_address))?; + update_index!(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), economic_event.fulfills(&base_address))?; delete_record::(&revision_id) } /// Properties accessor for zome config. -fn read_event_index_zome(conf: DnaConfigSliceObservation) -> Option { +fn read_economic_event_index_zome(conf: DnaConfigSliceObservation) -> Option { Some(conf.fulfillment.economic_event_index_zome) } From 1bc911cd0f5f6ec6ee4c986b782b47ff7cdd4a2c Mon Sep 17 00:00:00 2001 From: pospi Date: Sun, 1 May 2022 08:09:08 +1000 Subject: [PATCH 144/181] only check record ID portion of EconomicEvent.fulfills in assertion temporary solution until split record handling is addressed in #266 --- test/fulfillment/fulfillment_records_e2e.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/fulfillment/fulfillment_records_e2e.js b/test/fulfillment/fulfillment_records_e2e.js index 418ad8a03..8541ba50e 100644 --- a/test/fulfillment/fulfillment_records_e2e.js +++ b/test/fulfillment/fulfillment_records_e2e.js @@ -62,7 +62,7 @@ runner.registerScenario('links can be written and read between DNAs', async (s, console.log('readResponse', readResponse) t.ok(readResponse.economicEvent.fulfills, 'EconomicEvent.fulfills value present') t.equal(readResponse.economicEvent.fulfills.length, 1, 'EconomicEvent.fulfills reference saved') - t.deepEqual(readResponse.economicEvent.fulfills[0], fulfillmentId, 'EconomicEvent.fulfills reference OK') + t.deepEqual(readResponse.economicEvent.fulfills[0][1], fulfillmentId[1], 'EconomicEvent.fulfills reference OK') // ASSERT: check commitment readResponse = await planning.call('commitment', 'get_commitment', { address: commitmentId }) From 056d071b5913a320e7cdec5fa059c78d797eac60 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 2 May 2022 21:37:39 +1000 Subject: [PATCH 145/181] add helpers for sorting lists of records with IDs as raw buffers (for tests not using GraphQL API) --- test/init.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/init.js b/test/init.js index f56d94d0d..d8766ab59 100644 --- a/test/init.js +++ b/test/init.js @@ -224,6 +224,14 @@ module.exports = { if (a.id === b.id) return 0 return a.id < b.id ? -1 : 1 }, + sortByIdBuffer: (a, b) => { // :NOTE: this sorts on EntryHash, ignores DnaHash + if (a.id[1] === b.id[1]) return 0 + return a.id[1] < b.id[1] ? -1 : 1 + }, + sortBuffers: (a, b) => { // :NOTE: this sorts on EntryHash, ignores DnaHash + if (a[1] === b[1]) return 0 + return a[1] < b[1] ? -1 : 1 + }, waitForInput, } From 08de190cda0e07e0670bba163af1555b4fb0711c Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 2 May 2022 21:46:47 +1000 Subject: [PATCH 146/181] fix assertions in Fulfillment tests for ordering, list output structs & cross-DNA IDs --- test/fulfillment/fulfillment_records_e2e.js | 77 +++++++++++++-------- 1 file changed, 48 insertions(+), 29 deletions(-) diff --git a/test/fulfillment/fulfillment_records_e2e.js b/test/fulfillment/fulfillment_records_e2e.js index 8541ba50e..6c21b9769 100644 --- a/test/fulfillment/fulfillment_records_e2e.js +++ b/test/fulfillment/fulfillment_records_e2e.js @@ -4,6 +4,8 @@ const { buildPlayer, mockIdentifier, mockAgentId, + sortByIdBuffer, + sortIdBuffers, } = require('../init') const runner = buildRunner() @@ -51,39 +53,40 @@ runner.registerScenario('links can be written and read between DNAs', async (s, t.ok(fulfillmentResp.fulfillment && fulfillmentResp.fulfillment.id, 'fulfillment created successfully') await s.consistency() const fulfillmentId = fulfillmentResp.fulfillment.id + const fulfillmentIdObs = [eventId[0], fulfillmentId[1]] // :NOTE: ID in dest network will be same EntryHash, different DnaHash // ASSERT: check fulfillment in originating network let readResponse = await planning.call('fulfillment', 'get_fulfillment', { address: fulfillmentId }) - t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved') - t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved') + t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved in planning DNA') + t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved in planning DNA') // ASSERT: check event readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) console.log('readResponse', readResponse) t.ok(readResponse.economicEvent.fulfills, 'EconomicEvent.fulfills value present') - t.equal(readResponse.economicEvent.fulfills.length, 1, 'EconomicEvent.fulfills reference saved') - t.deepEqual(readResponse.economicEvent.fulfills[0][1], fulfillmentId[1], 'EconomicEvent.fulfills reference OK') + t.equal(readResponse.economicEvent.fulfills.length, 1, 'EconomicEvent.fulfills reference saved in observation DNA') + t.deepEqual(readResponse.economicEvent.fulfills[0], fulfillmentIdObs, 'EconomicEvent.fulfills reference OK in observation DNA') // ASSERT: check commitment readResponse = await planning.call('commitment', 'get_commitment', { address: commitmentId }) t.ok(readResponse.commitment.fulfilledBy, 'Commitment.fulfilledBy reciprocal value present') - t.equal(readResponse.commitment.fulfilledBy.length, 1, 'Commitment.fulfilledBy reciprocal reference saved') - t.deepEqual(readResponse.commitment.fulfilledBy[0], fulfillmentId, 'Commitment.fulfilledBy reciprocal fulfillment reference OK') + t.equal(readResponse.commitment.fulfilledBy.length, 1, 'Commitment.fulfilledBy reciprocal reference saved in planning DNA') + t.deepEqual(readResponse.commitment.fulfilledBy[0], fulfillmentId, 'Commitment.fulfilledBy reciprocal fulfillment reference OK in planning DNA') - // ASSERT: check fulfillment in target network - readResponse = await observation.call('fulfillment', 'get_fulfillment', { address: fulfillmentId }) - t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved') - t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved') + // ASSERT: check fulfillment in destination network + readResponse = await observation.call('fulfillment', 'get_fulfillment', { address: fulfillmentIdObs }) + t.deepEqual(readResponse.fulfillment.fulfilledBy, eventId, 'Fulfillment.fulfilledBy reference saved in observation DNA') + t.deepEqual(readResponse.fulfillment.fulfills, commitmentId, 'Fulfillment.fulfills reference saved in observation DNA') // ASSERT: check forward query indexes readResponse = await planning.call('fulfillment_index', 'query_fulfillments', { params: { fulfills: commitmentId } }) - t.equal(readResponse.length, 1, 'read fulfillments by commitment OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'Fulfillment.fulfills indexed correctly') + t.equal(readResponse.edges.length, 1, 'read fulfillments by commitment OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, fulfillmentId, 'Fulfillment.fulfills indexed correctly') // ASSERT: check reverse query indexes readResponse = await observation.call('fulfillment_index', 'query_fulfillments', { params: { fulfilledBy: eventId } }) - t.equal(readResponse.length, 1, 'read fulfillments by event OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'Fulfillment.fulfilledBy indexed correctly') + t.equal(readResponse.edges.length, 1, 'read fulfillments by event OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id[1], fulfillmentId[1], 'Fulfillment.fulfilledBy indexed correctly in observation DNA') @@ -97,40 +100,56 @@ runner.registerScenario('links can be written and read between DNAs', async (s, t.ok(fulfillmentResp2.fulfillment && fulfillmentResp2.fulfillment.id, 'additional fulfillment created successfully') await s.consistency() const fulfillmentId2 = fulfillmentResp2.fulfillment.id + const fulfillmentId2Obs = [eventId[0], fulfillmentId2[1]] // ASSERT: check forward query indices readResponse = await planning.call('fulfillment_index', 'query_fulfillments', { params: { fulfills: commitmentId } }) - t.equal(readResponse.length, 2, 'appending fulfillments for read OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'fulfillment 1 indexed correctly') - t.deepEqual(readResponse.Ok[1].fulfillment.id, fulfillmentId2, 'fulfillment 2 indexed correctly') + t.equal(readResponse.edges.length, 2, 'appending fulfillments for read OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, fulfillmentId, 'fulfillment 1 indexed correctly') + t.deepEqual(readResponse.edges && readResponse.edges[1] && readResponse.edges[1].node && readResponse.edges[1].node.id, fulfillmentId2, 'fulfillment 2 indexed correctly') // ASSERT: ensure append is working on the event read side readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedFIds = [{ id: fulfillmentId }, { id: fulfillmentId2 }].sort(sortByIdBuffer) + const sortedFIdsObs = [{ id: fulfillmentIdObs }, { id: fulfillmentId2Obs }].sort(sortByIdBuffer) + readResponse.economicEvent.fulfills.sort(sortIdBuffers) + t.equal(readResponse.economicEvent.fulfills.length, 2, 'EconomicEvent.fulfills appending OK') - t.deepEqual(readResponse.economicEvent.fulfills[0], fulfillmentId, 'EconomicEvent.fulfills reference 1 OK') - t.deepEqual(readResponse.economicEvent.fulfills[1], fulfillmentId2, 'EconomicEvent.fulfills reference 2 OK') + t.deepEqual(readResponse.economicEvent.fulfills[0], sortedFIdsObs[0].id, 'EconomicEvent.fulfills reference 1 OK in observation DNA') + t.deepEqual(readResponse.economicEvent.fulfills[1], sortedFIdsObs[1].id, 'EconomicEvent.fulfills reference 2 OK in observation DNA') + // :TODO: test fulfillment reference in planning DNA // ASSERT: ensure query indices on the event read side - readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { fulfills: fulfillmentId } }) - t.equal(readResponse.length, 1, 'appending fulfillments for event query OK') - t.deepEqual(readResponse.Ok[0].economicEvent.id, eventId, 'event query indexed correctly') + readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { fulfills: fulfillmentIdObs } }) + t.equal(readResponse.edges.length, 1, 'appending fulfillments for event query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, eventId, 'event query indexed correctly') // ASSERT: ensure append is working on the commitment read side readResponse = await planning.call('commitment', 'get_commitment', { address: commitmentId }) + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + readResponse.commitment.fulfilledBy.sort(sortIdBuffers) + t.equal(readResponse.commitment.fulfilledBy.length, 2, 'Commitment.fulfilledBy appending OK') - t.deepEqual(readResponse.commitment.fulfilledBy[0], fulfillmentId, 'Commitment.fulfilledBy reference 1 OK') - t.deepEqual(readResponse.commitment.fulfilledBy[1], fulfillmentId2, 'Commitment.fulfilledBy reference 2 OK') + t.deepEqual(readResponse.commitment.fulfilledBy[0], sortedFIds[0].id, 'Commitment.fulfilledBy reference 1 OK') + t.deepEqual(readResponse.commitment.fulfilledBy[1], sortedFIds[1].id, 'Commitment.fulfilledBy reference 2 OK') // ASSERT: ensure query indices on the commitment read side readResponse = await planning.call('commitment_index', 'query_commitments', { params: { fulfilledBy: fulfillmentId } }) - t.equal(readResponse.length, 1, 'appending fulfillments for commitment query OK') - t.deepEqual(readResponse.Ok[0].commitment.id, commitmentId, 'commitment query indexed correctly') + t.equal(readResponse.edges.length, 1, 'appending fulfillments for commitment query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, commitmentId, 'commitment query indexed correctly') // ASSERT: check reciprocal query indexes readResponse = await observation.call('fulfillment_index', 'query_fulfillments', { params: { fulfilledBy: eventId } }) - t.equal(readResponse.length, 2, 'read fulfillments by event OK') - t.deepEqual(readResponse.Ok[0].fulfillment.id, fulfillmentId, 'fulfillment 1 indexed correctly') - t.deepEqual(readResponse.Ok[1].fulfillment.id, fulfillmentId2, 'fulfillment 2 indexed correctly') + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + readResponse.edges.sort(({ node }, { node: node2 }) => sortByIdBuffer(node, node2)) + + t.equal(readResponse.edges.length, 2, 'read fulfillments by event OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, sortedFIdsObs[0].id, 'fulfillment 1 indexed correctly in observation DNA') + t.deepEqual(readResponse.edges && readResponse.edges[1] && readResponse.edges[1].node && readResponse.edges[1].node.id, sortedFIdsObs[1].id, 'fulfillment 2 indexed correctly in observation DNA') }) runner.run() From 0fd255026994d63aa16460416288bdcced5a0f1e Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 12:32:02 +1000 Subject: [PATCH 147/181] fix satisfaction test assertions with workarounds for sorting, list API changes & multi-DNA awareness --- test/satisfaction/satisfaction_records_e2e.js | 73 ++++++++++--------- 1 file changed, 37 insertions(+), 36 deletions(-) diff --git a/test/satisfaction/satisfaction_records_e2e.js b/test/satisfaction/satisfaction_records_e2e.js index 6c1f151eb..915940b4f 100644 --- a/test/satisfaction/satisfaction_records_e2e.js +++ b/test/satisfaction/satisfaction_records_e2e.js @@ -4,6 +4,7 @@ const { buildPlayer, mockIdentifier, mockAgentId, + sortByIdBuffer, sortIdBuffers, } = require('../init') const runner = buildRunner() @@ -51,58 +52,49 @@ runner.registerScenario('satisfactions can be written and read between DNAs by a t.ok(satisfactionResp.satisfaction && satisfactionResp.satisfaction.id, 'satisfaction by event created successfully') await s.consistency() const satisfactionId = satisfactionResp.satisfaction.id + const satisfactionIdObs = [eventId[0], satisfactionId[1]] // :NOTE: ID in dest network will be same EntryHash, different DnaHash // ASSERT: check satisfaction in originating network let readResponse = await planning.call('satisfaction', 'get_satisfaction', { address: satisfactionId }) - t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved') - t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved') + t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved in planning DNA') + t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved in planning DNA') // ASSERT: check satisfaction in target network - readResponse = await observation.call('satisfaction', 'get_satisfaction', { address: satisfactionId }) - t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved') - t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved') + readResponse = await observation.call('satisfaction', 'get_satisfaction', { address: satisfactionIdObs }) + t.deepEqual(readResponse.satisfaction.satisfiedBy, eventId, 'Satisfaction.satisfiedBy reference saved in observation DNA') + t.deepEqual(readResponse.satisfaction.satisfies, intentId, 'Satisfaction.satisfies reference saved in observation DNA') // ASSERT: check event field refs readResponse = await observation.call('economic_event', 'get_economic_event', { address: eventId }) - // TESTS start to fail here, and continue to the end of the file - // The zome calls themselves are not failing, it is that - // the response data does not match the assertions - /* - not ok 8 EconomicEvent.satisfies value present - --- - operator: ok - expected: true - actual: undefined - */ t.ok(readResponse.economicEvent.satisfies, 'EconomicEvent.satisfies value present') - t.equal(readResponse.economicEvent.satisfies.length, 1, 'EconomicEvent.satisfies reference saved') - t.deepEqual(readResponse.economicEvent.satisfies[0], satisfactionId, 'EconomicEvent.satisfies reference OK') + t.equal(readResponse.economicEvent.satisfies.length, 1, 'EconomicEvent.satisfies reference saved in observation DNA') + t.deepEqual(readResponse.economicEvent.satisfies[0], satisfactionIdObs, 'EconomicEvent.satisfies reference OK in observation DNA') // ASSERT: check intent field refs readResponse = await planning.call('intent', 'get_intent', { address: intentId }) t.ok(readResponse.intent.satisfiedBy, 'intent.satisfiedBy reciprocal value present') t.equal(readResponse.intent.satisfiedBy.length, 1, 'Intent.satisfiedBy reciprocal reference saved') - t.deepEqual(readResponse.intent.satisfiedBy[0], satisfactionId, 'Intent.satisfiedBy reciprocal satisfaction reference OK') + t.deepEqual(readResponse.intent.satisfiedBy[0], satisfactionId, 'Intent.satisfiedBy reciprocal satisfaction reference OK in planning DNA') // ASSERT: check intent query indexes readResponse = await planning.call('satisfaction_index', 'query_satisfactions', { params: { satisfies: intentId } }) - t.equal(readResponse.length, 1, 'read satisfactions by intent OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId, 'Satisfaction.satisfies indexed correctly') + t.equal(readResponse.edges.length, 1, 'read satisfactions by intent OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, satisfactionId, 'Satisfaction.satisfies indexed correctly in planning DNA') // ASSERT: check event query indexes readResponse = await observation.call('satisfaction_index', 'query_satisfactions', { params: { satisfiedBy: eventId } }) - t.equal(readResponse.length, 1, 'read satisfactions by event OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId, 'Satisfaction.satisfiedBy indexed correctly') + t.equal(readResponse.edges.length, 1, 'read satisfactions by event OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, satisfactionIdObs, 'Satisfaction.satisfiedBy indexed correctly in observation DNA') // ASSERT: check intent satisfaction query indexes readResponse = await planning.call('intent_index', 'query_intents', { params: { satisfiedBy: satisfactionId } }) - t.equal(readResponse.length, 1, 'indexing satisfactions for intent query OK') - t.deepEqual(readResponse.Ok[0].intent.id, intentId, 'intent query 1 indexed correctly') + t.equal(readResponse.edges.length, 1, 'indexing satisfactions for intent query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, intentId, 'intent query 1 indexed correctly in planning DNA') // ASSERT: check event satisfaction query indexes - readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { satisfies: satisfactionId } }) - t.equal(readResponse.length, 1, 'indexing satisfactions for event query OK') - t.deepEqual(readResponse.Ok[0].economicEvent.id, eventId, 'event query 1 indexed correctly') + readResponse = await observation.call('economic_event_index', 'query_economic_events', { params: { satisfies: satisfactionIdObs } }) + t.equal(readResponse.edges.length, 1, 'indexing satisfactions for event query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, eventId, 'event query 1 indexed correctly in observation DNA') @@ -134,25 +126,34 @@ runner.registerScenario('satisfactions can be written and read between DNAs by a // ASSERT: check intent query indices readResponse = await planning.call('satisfaction_index', 'query_satisfactions', { params: { satisfies: intentId } }) - t.equal(readResponse.length, 2, 'appending satisfactions for read OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId2, 'satisfaction 2 indexed correctly') - t.deepEqual(readResponse.Ok[1].satisfaction.id, satisfactionId, 'satisfaction 1 indexed correctly') + t.equal(readResponse.edges.length, 2, 'appending satisfactions for read OK') + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedSIds = [{ id: satisfactionId }, { id: satisfactionId2 }].sort(sortByIdBuffer) + readResponse.edges.sort(({ node }, { node: node2 }) => sortByIdBuffer(node, node2)) + + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, sortedSIds[0].id, 'satisfaction 1 indexed correctly') + t.deepEqual(readResponse.edges && readResponse.edges[1] && readResponse.edges[1].node && readResponse.edges[1].node.id, sortedSIds[1].id, 'satisfaction 2 indexed correctly') // ASSERT: check intent field refs readResponse = await planning.call('intent', 'get_intent', { address: intentId }) t.equal(readResponse.intent.satisfiedBy.length, 2, 'Intent.satisfiedBy appending OK') - t.deepEqual(readResponse.intent.satisfiedBy[0], satisfactionId2, 'Intent.satisfiedBy reference 2 OK') - t.deepEqual(readResponse.intent.satisfiedBy[1], satisfactionId, 'Intent.satisfiedBy reference 1 OK') + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + readResponse.intent.satisfiedBy.sort(sortIdBuffers) + + t.deepEqual(readResponse.intent.satisfiedBy[0], sortedSIds[0].id, 'Intent.satisfiedBy reference 1 OK') + t.deepEqual(readResponse.intent.satisfiedBy[1], sortedSIds[1].id, 'Intent.satisfiedBy reference 2 OK') // ASSERT: check commitment query indexes readResponse = await planning.call('satisfaction_index', 'query_satisfactions', { params: { satisfiedBy: commitmentId } }) - t.equal(readResponse.length, 1, 'read satisfactions by commitment OK') - t.deepEqual(readResponse.Ok[0].satisfaction.id, satisfactionId2, 'Satisfaction.satisfiedBy indexed correctly') + t.equal(readResponse.edges.length, 1, 'read satisfactions by commitment OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, satisfactionId2, 'Satisfaction.satisfiedBy indexed correctly') // ASSERT: check intent satisfaction query indexes readResponse = await planning.call('intent_index', 'query_intents', { params: { satisfiedBy: satisfactionId2 } }) - t.equal(readResponse.length, 1, 'appending satisfactions for intent query OK') - t.deepEqual(readResponse.Ok[0].intent.id, intentId, 'intent query 2 indexed correctly') + t.equal(readResponse.edges.length, 1, 'appending satisfactions for intent query OK') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, intentId, 'intent query 2 indexed correctly') }) runner.run() From 5560ebb59c15263dbf4a32c3cc9a9bea451fc819 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 12:32:47 +1000 Subject: [PATCH 148/181] minor update to assertion message --- test/fulfillment/fulfillment_records_e2e.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/fulfillment/fulfillment_records_e2e.js b/test/fulfillment/fulfillment_records_e2e.js index 6c21b9769..5372b4256 100644 --- a/test/fulfillment/fulfillment_records_e2e.js +++ b/test/fulfillment/fulfillment_records_e2e.js @@ -81,7 +81,7 @@ runner.registerScenario('links can be written and read between DNAs', async (s, // ASSERT: check forward query indexes readResponse = await planning.call('fulfillment_index', 'query_fulfillments', { params: { fulfills: commitmentId } }) t.equal(readResponse.edges.length, 1, 'read fulfillments by commitment OK') - t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, fulfillmentId, 'Fulfillment.fulfills indexed correctly') + t.deepEqual(readResponse.edges && readResponse.edges[0] && readResponse.edges[0].node && readResponse.edges[0].node.id, fulfillmentId, 'Fulfillment.fulfills indexed correctly in planning NDA') // ASSERT: check reverse query indexes readResponse = await observation.call('fulfillment_index', 'query_fulfillments', { params: { fulfilledBy: eventId } }) From 3574f1c210ecd2c9f6e965979ccd23d6da8313b8 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 12:41:23 +1000 Subject: [PATCH 149/181] first pass at porting Proposal tests to new Tryorama #251 --- test/proposal/test_proposal_crud.js | 19 +++++----- test/proposal/test_proposedintent_crud.js | 43 ++++++++++------------- test/proposal/test_proposedto_crud.js | 26 ++++++-------- 3 files changed, 37 insertions(+), 51 deletions(-) diff --git a/test/proposal/test_proposal_crud.js b/test/proposal/test_proposal_crud.js index d4c1b3a93..1414ff46f 100644 --- a/test/proposal/test_proposal_crud.js +++ b/test/proposal/test_proposal_crud.js @@ -1,5 +1,4 @@ const { - getDNA, buildConfig, buildRunner, buildPlayer, @@ -7,9 +6,7 @@ const { const runner = buildRunner() -const config = buildConfig({ - proposal: getDNA('proposal'), -}, {}) +const config = buildConfig() const exampleEntry = { name: 'String', @@ -28,9 +25,9 @@ const updatedExampleEntry = { } runner.registerScenario('Proposal record API', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { graphQL } = await buildPlayer(s, config, ['proposal']) - let createResp = await alice.graphQL(` + let createResp = await graphQL(` mutation($rs: ProposalCreateParams!) { res: createProposal(proposal: $rs) { proposal { @@ -45,7 +42,7 @@ runner.registerScenario('Proposal record API', async (s, t) => { t.ok(createResp.data.res.proposal.id, 'record created') const psId = createResp.data.res.proposal.id - let getResp = await alice.graphQL(` + let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -61,7 +58,7 @@ runner.registerScenario('Proposal record API', async (s, t) => { id: psId, }) t.deepEqual(getResp.data.res, { 'id': psId, ...exampleEntry }, 'record read OK') - const updateResp = await alice.graphQL(` + const updateResp = await graphQL(` mutation($rs: ProposalUpdateParams!) { res: updateProposal(proposal: $rs) { proposal { @@ -76,7 +73,7 @@ runner.registerScenario('Proposal record API', async (s, t) => { t.equal(updateResp.data.res.proposal.id, psId, 'record updated') // now we fetch the Entry again to check that the update was successful - const updatedGetResp = await alice.graphQL(` + const updatedGetResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -93,7 +90,7 @@ runner.registerScenario('Proposal record API', async (s, t) => { }) t.deepEqual(updatedGetResp.data.res, { id: psId, created: exampleEntry.created, ...updatedExampleEntry }, 'record updated OK') - const deleteResult = await alice.graphQL(` + const deleteResult = await graphQL(` mutation($id: ID!) { res: deleteProposal(id: $id) } @@ -104,7 +101,7 @@ runner.registerScenario('Proposal record API', async (s, t) => { t.equal(deleteResult.data.res, true) - const queryForDeleted = await alice.graphQL(` + const queryForDeleted = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id diff --git a/test/proposal/test_proposedintent_crud.js b/test/proposal/test_proposedintent_crud.js index 7dea53a66..7d51b484f 100644 --- a/test/proposal/test_proposedintent_crud.js +++ b/test/proposal/test_proposedintent_crud.js @@ -1,5 +1,4 @@ const { - getDNA, buildConfig, buildRunner, buildPlayer, @@ -7,20 +6,14 @@ const { const runner = buildRunner() -const config = buildConfig({ - proposal: getDNA('proposal'), - planning: getDNA('planning'), - agent: getDNA('agent'), -}, { - vf_planning: ['proposal', 'planning'], -}) +const config = buildConfig() const exampleProposal = { name: 'String', - hasBeginning: '2019-11-19T00:00:00.056Z', - hasEnd: '2019-11-19T00:00:00.056Z', + hasBeginning: new Date('2019-11-19T00:00:00.056Z'), + hasEnd: new Date('2019-11-19T00:00:00.056Z'), unitBased: true, - created: '2019-11-19T00:00:00.056Z', + created: new Date('2019-11-19T00:00:00.056Z'), note: 'note', } @@ -29,12 +22,12 @@ const exampleIntent = { } runner.registerScenario('ProposedIntent external link', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { graphQL } = await buildPlayer(s, config, ['proposal', 'planning', 'agent']) /* * the next code is only for getting an intent and a proposal to link to the proposedIntent. * the idea is to verify the intent linking by getting Proposal->ProposedIntent->Intent */ - const agentAddress = (await alice.graphQL(`{ + const agentAddress = (await graphQL(`{ myAgent { id } @@ -42,7 +35,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { exampleIntent.provider = agentAddress // intent creation - let intentRes = await alice.graphQL(` + let intentRes = await graphQL(` mutation($rs: IntentCreateParams!) { res: createIntent(intent: $rs) { intent { @@ -58,7 +51,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.ok(intentAdress, 'can create intent') // proposal creation - let proposalRes = await alice.graphQL(` + let proposalRes = await graphQL(` mutation($rs: ProposalCreateParams!) { res: createProposal(proposal: $rs) { proposal { @@ -73,7 +66,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { let proposalAdress = proposalRes.data.res.proposal.id t.ok(proposalAdress, 'can create proposal') - proposalRes = await alice.graphQL(` + proposalRes = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -88,7 +81,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.equal(proposalRes.data.res.id, proposalAdress, 'proposal read OK') t.equal(proposalRes.data.res.publishes.length, 0, 'proposedIntent list empty') - let proposeIntentResp = await alice.graphQL(` + let proposeIntentResp = await graphQL(` mutation($pIn: ID!, $ps: ID!, $re: Boolean) { res: proposeIntent(publishedIn: $pIn, publishes: $ps, reciprocal: $re) { proposedIntent { @@ -105,7 +98,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.ok(proposeIntentResp.data.res.proposedIntent.id, 'can propose') const proposedIntentAdress = proposeIntentResp.data.res.proposedIntent.id - let getResp = await alice.graphQL(` + let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -126,7 +119,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress, 'intent fetching from proposedIntent succesful') // another intent - intentRes = await alice.graphQL(` + intentRes = await graphQL(` mutation($rs: IntentCreateParams!) { res: createIntent(intent: $rs) { intent { @@ -145,7 +138,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.ok(intentAdress2, 'can create intent') // another proposed intent - let proposeIntentResp2 = await alice.graphQL(` + let proposeIntentResp2 = await graphQL(` mutation($pIn: ID!, $ps: ID!, $re: Boolean) { res: proposeIntent(publishedIn: $pIn, publishes: $ps, reciprocal: $re) { proposedIntent { @@ -162,7 +155,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.ok(proposeIntentResp2.data.res.proposedIntent.id, 'can propose') const proposedIntentAdress2 = proposeIntentResp2.data.res.proposedIntent.id - getResp = await alice.graphQL(` + getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -184,7 +177,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress2, 'intent B fetching from proposedIntent succesful') t.equal(getResp.data.res.publishes[1].publishes.id, intentAdress, 'intent A fetching from proposedIntent succesful') - await alice.graphQL(` + await graphQL(` mutation($in: ID!) { res: deleteProposedIntent(id: $in) } @@ -193,7 +186,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { }) await s.consistency() - getResp = await alice.graphQL(` + getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -213,7 +206,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { t.equal(getResp.data.res.publishes[0].id, proposedIntentAdress2, 'proposedIntent fetching from proposal after delete succesful') t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress2, 'intent fetching from proposedIntent after delete succesful') - await alice.graphQL(` + await graphQL(` mutation($in: ID!) { res: deleteProposedIntent(id: $in) } @@ -222,7 +215,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { }) await s.consistency() - getResp = await alice.graphQL(` + getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id diff --git a/test/proposal/test_proposedto_crud.js b/test/proposal/test_proposedto_crud.js index 3c56da76a..cd4bdfdbf 100644 --- a/test/proposal/test_proposedto_crud.js +++ b/test/proposal/test_proposedto_crud.js @@ -1,5 +1,4 @@ const { - getDNA, buildConfig, buildRunner, buildPlayer, @@ -7,30 +6,27 @@ const { const runner = buildRunner() -const config = buildConfig({ - proposal: getDNA('proposal'), - agent: getDNA('agent'), -}, {}) +const config = buildConfig() const exampleProposal = { name: 'String', - hasBeginning: '2019-11-19T00:00:00.056Z', - hasEnd: '2019-11-19T00:00:00.056Z', + hasBeginning: new Date('2019-11-19T00:00:00.056Z'), + hasEnd: new Date('2019-11-19T00:00:00.056Z'), unitBased: true, - created: '2019-11-19T00:00:00.056Z', + created: new Date('2019-11-19T00:00:00.056Z'), note: 'note', } runner.registerScenario('ProposedTo record API', async (s, t) => { - const alice = await buildPlayer(s, 'alice', config) + const { graphQL } = await buildPlayer(s, config, ['proposal', 'agent']) - const agentAddress = (await alice.graphQL(`{ + const agentAddress = (await graphQL(`{ myAgent { id } }`)).data.myAgent.id - let proposalRes = await alice.graphQL(` + let proposalRes = await graphQL(` mutation($rs: ProposalCreateParams!) { res: createProposal(proposal: $rs) { proposal { @@ -46,7 +42,7 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { await s.consistency() - let createResp = await alice.graphQL(` + let createResp = await graphQL(` mutation($p: ID!, $pTo: ID!) { res: proposeTo(proposed: $p,proposedTo: $pTo) { proposedTo { @@ -62,7 +58,7 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { t.ok(createResp.data.res.proposedTo.id, 'record created') const psID = createResp.data.res.proposedTo.id - let getResp = await alice.graphQL(` + let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id @@ -82,7 +78,7 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { t.equal(getResp.data.res.publishedTo[0].id, psID, 'proposedTo fetching from proposal succesful') t.equal(getResp.data.res.publishedTo[0].proposedTo.id, agentAddress, 'agent fetching from proposedTo succesful') - const deleteResult = await alice.graphQL(` + const deleteResult = await graphQL(` mutation($id: ID!) { res: deleteProposedTo(id: $id) } @@ -93,7 +89,7 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { t.equal(deleteResult.data.res, true) - const queryForDeleted = await alice.graphQL(` + const queryForDeleted = await graphQL(` query { res: proposal(id: "${proposalID}") { id From 9720a469bb02e3f1313d5d8692fe7f9669a86252 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 12:45:40 +1000 Subject: [PATCH 150/181] fix Proposal & Process structs using old Timestamp instead of new DateTime for time fields --- zomes/rea_process/rpc/src/lib.rs | 26 +++++++++++++------------- zomes/rea_process/storage/src/lib.rs | 10 +++++----- zomes/rea_proposal/rpc/src/lib.rs | 19 ++++++++++--------- zomes/rea_proposal/storage/src/lib.rs | 8 ++++---- 4 files changed, 32 insertions(+), 31 deletions(-) diff --git a/zomes/rea_process/rpc/src/lib.rs b/zomes/rea_process/rpc/src/lib.rs index 68ecee09d..164d7ffd5 100644 --- a/zomes/rea_process/rpc/src/lib.rs +++ b/zomes/rea_process/rpc/src/lib.rs @@ -10,7 +10,7 @@ use serde_maybe_undefined::{ pub use vf_attributes_hdk::{ HeaderHash, ByHeader, ProcessAddress, - Timestamp, + DateTime, FixedOffset, ExternalURL, ProcessSpecificationAddress, PlanAddress, @@ -30,13 +30,13 @@ pub struct Response { pub revision_id: HeaderHash, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] - pub has_beginning: Option, + pub has_beginning: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub has_end: Option, + pub has_end: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub before: Option, + pub before: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub after: Option, + pub after: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub classified_as: Option>, #[serde(skip_serializing_if = "Option::is_none")] @@ -104,13 +104,13 @@ pub struct ResponseData { pub struct CreateRequest { pub name: String, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] - pub before: MaybeUndefined, + pub before: MaybeUndefined>, #[serde(default)] - pub after: MaybeUndefined, + pub after: MaybeUndefined>, #[serde(default)] pub classified_as: MaybeUndefined>, #[serde(default)] @@ -139,13 +139,13 @@ pub struct UpdateRequest { #[serde(default)] pub name: MaybeUndefined, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] - pub before: MaybeUndefined, + pub before: MaybeUndefined>, #[serde(default)] - pub after: MaybeUndefined, + pub after: MaybeUndefined>, #[serde(default)] pub classified_as: MaybeUndefined>, #[serde(default)] diff --git a/zomes/rea_process/storage/src/lib.rs b/zomes/rea_process/storage/src/lib.rs index 823085c71..6d519ab20 100644 --- a/zomes/rea_process/storage/src/lib.rs +++ b/zomes/rea_process/storage/src/lib.rs @@ -16,7 +16,7 @@ use hdk_records::{ use vf_attributes_hdk::{ ProcessAddress, - Timestamp, + DateTime, FixedOffset, ExternalURL, ProcessSpecificationAddress, PlanAddress, @@ -42,10 +42,10 @@ pub struct ProcessZomeConfig { #[derive(Clone, Serialize, Deserialize, SerializedBytes, Debug)] pub struct EntryData { pub name: String, - pub has_beginning: Option, - pub has_end: Option, - pub before: Option, - pub after: Option, + pub has_beginning: Option>, + pub has_end: Option>, + pub before: Option>, + pub after: Option>, pub classified_as: Option>, pub based_on: Option, pub planned_within: Option, diff --git a/zomes/rea_proposal/rpc/src/lib.rs b/zomes/rea_proposal/rpc/src/lib.rs index 9fd03bff8..fe7145678 100644 --- a/zomes/rea_proposal/rpc/src/lib.rs +++ b/zomes/rea_proposal/rpc/src/lib.rs @@ -10,7 +10,8 @@ use holochain_serialized_bytes::prelude::*; use serde_maybe_undefined::MaybeUndefined; pub use vf_attributes_hdk::{ HeaderHash, ByAddress, ByHeader, - ProposalAddress, ProposedIntentAddress, ProposedToAddress, Timestamp, + ProposalAddress, ProposedIntentAddress, ProposedToAddress, + DateTime, FixedOffset, }; /// Toplevel I/O structs for WASM API @@ -36,13 +37,13 @@ pub struct Response { pub revision_id: HeaderHash, pub name: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub has_beginning: Option, + pub has_beginning: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub has_end: Option, + pub has_end: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub unit_based: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub created: Option, + pub created: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub note: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -76,13 +77,13 @@ pub struct CreateRequest { #[serde(default)] pub name: MaybeUndefined, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] pub unit_based: MaybeUndefined, #[serde(default)] - pub created: MaybeUndefined, + pub created: MaybeUndefined>, #[serde(default)] pub note: MaybeUndefined, #[serde(default)] @@ -104,9 +105,9 @@ pub struct UpdateRequest { #[serde(default)] pub name: MaybeUndefined, #[serde(default)] - pub has_beginning: MaybeUndefined, + pub has_beginning: MaybeUndefined>, #[serde(default)] - pub has_end: MaybeUndefined, + pub has_end: MaybeUndefined>, #[serde(default)] pub unit_based: MaybeUndefined, #[serde(default)] diff --git a/zomes/rea_proposal/storage/src/lib.rs b/zomes/rea_proposal/storage/src/lib.rs index 1daee20b7..58e6b4332 100644 --- a/zomes/rea_proposal/storage/src/lib.rs +++ b/zomes/rea_proposal/storage/src/lib.rs @@ -13,7 +13,7 @@ use hdk_records::{ generate_record_entry, }; -pub use vf_attributes_hdk::{ ProposalAddress, ProposedIntentAddress, ProposedToAddress, Timestamp }; +pub use vf_attributes_hdk::{ ProposalAddress, ProposedIntentAddress, ProposedToAddress, DateTime, FixedOffset }; use hc_zome_rea_proposal_rpc::{CreateRequest, UpdateRequest}; @@ -35,10 +35,10 @@ pub struct ProposalZomeConfig { #[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone)] pub struct EntryData { pub name: Option, - pub has_beginning: Option, - pub has_end: Option, + pub has_beginning: Option>, + pub has_end: Option>, pub unit_based: Option, - pub created: Option, + pub created: Option>, pub note: Option, pub in_scope_of: Option>, //[TODO]: From dcebb124ad9df8ebf9c58c406bafc54293f167c8 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 12:48:21 +1000 Subject: [PATCH 151/181] fix missing index config for Proposal zome --- bundles/dna_templates/proposal/dna.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bundles/dna_templates/proposal/dna.yaml b/bundles/dna_templates/proposal/dna.yaml index fe1030b8a..6f627e219 100644 --- a/bundles/dna_templates/proposal/dna.yaml +++ b/bundles/dna_templates/proposal/dna.yaml @@ -2,6 +2,8 @@ manifest_version: "1" name: "hrea_proposal" uuid: "" properties: + proposal: + index_zome: proposal_index proposal_index: record_storage_zome: proposal proposed_intent: From eaeaf766da034a92df896a2493b3603fa91509fe Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:08:57 +1000 Subject: [PATCH 152/181] update Proposal CRUD tests to use revisions #251 --- test/proposal/test_proposal_crud.js | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/test/proposal/test_proposal_crud.js b/test/proposal/test_proposal_crud.js index 1414ff46f..c2471b71f 100644 --- a/test/proposal/test_proposal_crud.js +++ b/test/proposal/test_proposal_crud.js @@ -32,6 +32,7 @@ runner.registerScenario('Proposal record API', async (s, t) => { res: createProposal(proposal: $rs) { proposal { id + revisionId } } } @@ -41,11 +42,13 @@ runner.registerScenario('Proposal record API', async (s, t) => { await s.consistency() t.ok(createResp.data.res.proposal.id, 'record created') const psId = createResp.data.res.proposal.id + const psRev = createResp.data.res.proposal.revisionId let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id + revisionId name hasBeginning hasEnd @@ -57,26 +60,30 @@ runner.registerScenario('Proposal record API', async (s, t) => { `, { id: psId, }) - t.deepEqual(getResp.data.res, { 'id': psId, ...exampleEntry }, 'record read OK') + t.deepEqual(getResp.data.res, { 'id': psId, 'revisionId': psRev, ...exampleEntry }, 'record read OK') const updateResp = await graphQL(` mutation($rs: ProposalUpdateParams!) { res: updateProposal(proposal: $rs) { proposal { id + revisionId } } } `, { - rs: { id: psId, ...updatedExampleEntry }, + rs: { revisionId: psRev, ...updatedExampleEntry }, }) await s.consistency() - t.equal(updateResp.data.res.proposal.id, psId, 'record updated') + t.equal(updateResp.data.res.proposal.id, psId, 'record ID consistent') + t.notEqual(updateResp.data.res.proposal.revisionId, psRev, 'record updated') + const psRev2 = updateResp.data.res.proposal.revisionId // now we fetch the Entry again to check that the update was successful const updatedGetResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { id + revisionId created name hasBeginning @@ -88,14 +95,14 @@ runner.registerScenario('Proposal record API', async (s, t) => { `, { id: psId, }) - t.deepEqual(updatedGetResp.data.res, { id: psId, created: exampleEntry.created, ...updatedExampleEntry }, 'record updated OK') + t.deepEqual(updatedGetResp.data.res, { id: psId, revisionId: psRev2, created: exampleEntry.created, ...updatedExampleEntry }, 'record updated OK') const deleteResult = await graphQL(` - mutation($id: ID!) { - res: deleteProposal(id: $id) + mutation($revisionId: ID!) { + res: deleteProposal(revisionId: $revisionId) } `, { - id: psId, + revisionId: psRev2, }) await s.consistency() From 4bb7ff53eb618de35597c6e0ed26464e26a77590 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:14:57 +1000 Subject: [PATCH 153/181] fix missing EntryDefs in proposal index zomes --- zomes/rea_proposal/zome_idx_proposal/src/lib.rs | 12 ++++++++++++ .../rea_proposed_intent/zome_idx_proposal/src/lib.rs | 12 ++++++++++++ zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs | 12 ++++++++++++ 3 files changed, 36 insertions(+) diff --git a/zomes/rea_proposal/zome_idx_proposal/src/lib.rs b/zomes/rea_proposal/zome_idx_proposal/src/lib.rs index 9550449ae..9169641a6 100644 --- a/zomes/rea_proposal/zome_idx_proposal/src/lib.rs +++ b/zomes/rea_proposal/zome_idx_proposal/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_proposal_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProposalAddress::entry_def(), + ProposedIntentAddress::entry_def(), + ProposedToAddress::entry_def(), + ])) +} + #[index_zome] struct Proposal { publishes: Local, diff --git a/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs b/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs index 96b81c2f1..15ed89743 100644 --- a/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs +++ b/zomes/rea_proposed_intent/zome_idx_proposal/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_proposed_intent_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProposedIntentAddress::entry_def(), + ProposalAddress::entry_def(), + IntentAddress::entry_def(), + ])) +} + #[index_zome] struct ProposedIntent { published_in: Local, diff --git a/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs b/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs index 09fbe8398..1cd87af5d 100644 --- a/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs +++ b/zomes/rea_proposed_to/zome_idx_proposal/src/lib.rs @@ -8,6 +8,18 @@ use hdk_semantic_indexes_zome_derive::index_zome; use hc_zome_rea_proposed_to_rpc::*; use hdk_semantic_indexes_zome_lib::ByAddress; // disambiguate from RPC query struct +// :TODO: remove this; should not be necessary since all these types are imported +// along with their entry_def! in dependent crates +#[hdk_extern] +fn entry_defs(_: ()) -> ExternResult { + Ok(EntryDefsCallbackResult::from(vec![ + PathEntry::entry_def(), + ProposedToAddress::entry_def(), + ProposalAddress::entry_def(), + // AgentAddress::entry_def(), // :TODO: + ])) +} + #[index_zome] struct ProposedTo { proposed: Local, From 20171e5ab4e3d0866ecb89ce0bce60f027391198 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:22:17 +1000 Subject: [PATCH 154/181] update ProposedIntent tests to use revisions for update/delete --- test/proposal/test_proposedintent_crud.js | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/proposal/test_proposedintent_crud.js b/test/proposal/test_proposedintent_crud.js index 7d51b484f..da3119b30 100644 --- a/test/proposal/test_proposedintent_crud.js +++ b/test/proposal/test_proposedintent_crud.js @@ -86,6 +86,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { res: proposeIntent(publishedIn: $pIn, publishes: $ps, reciprocal: $re) { proposedIntent { id + revisionId } } } @@ -97,6 +98,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { await s.consistency() t.ok(proposeIntentResp.data.res.proposedIntent.id, 'can propose') const proposedIntentAdress = proposeIntentResp.data.res.proposedIntent.id + const proposedIntentRev = proposeIntentResp.data.res.proposedIntent.revisionId let getResp = await graphQL(` query($id: ID!) { @@ -143,6 +145,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { res: proposeIntent(publishedIn: $pIn, publishes: $ps, reciprocal: $re) { proposedIntent { id + revisionId } } } @@ -154,6 +157,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { await s.consistency() t.ok(proposeIntentResp2.data.res.proposedIntent.id, 'can propose') const proposedIntentAdress2 = proposeIntentResp2.data.res.proposedIntent.id + const proposedIntentRev2 = proposeIntentResp2.data.res.proposedIntent.revisionId getResp = await graphQL(` query($id: ID!) { @@ -179,10 +183,10 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { await graphQL(` mutation($in: ID!) { - res: deleteProposedIntent(id: $in) + res: deleteProposedIntent(revisionId: $in) } `, { - in: proposedIntentAdress, + in: proposedIntentRev, }) await s.consistency() @@ -208,10 +212,10 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { await graphQL(` mutation($in: ID!) { - res: deleteProposedIntent(id: $in) + res: deleteProposedIntent(revisionId: $in) } `, { - in: proposedIntentAdress2, + in: proposedIntentRev2, }) await s.consistency() From 7314df03393ddc0506f58ee7c23fa3196c174268 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:22:34 +1000 Subject: [PATCH 155/181] fix remote_auth zome not being included in planning DNA --- bundles/dna_templates/planning/dna.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bundles/dna_templates/planning/dna.yaml b/bundles/dna_templates/planning/dna.yaml index 62cd50df1..0201e49c1 100644 --- a/bundles/dna_templates/planning/dna.yaml +++ b/bundles/dna_templates/planning/dna.yaml @@ -46,3 +46,7 @@ zomes: path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_fulfillment_index_planning.wasm" - name: satisfaction_index path: "/target/wasm32-unknown-unknown/release/hc_zome_rea_satisfaction_index_planning.wasm" + + # utility zomes + - name: remote_auth + path: "/target/wasm32-unknown-unknown/release/hc_zome_dna_auth_resolver_hrea.wasm" From 5a79ac63992ac9a94a9a5793c14bdb9f88acb3b1 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:22:57 +1000 Subject: [PATCH 156/181] fix ProposedIntent not declaring EntryDef needed to store credentials for updating planning DNA --- zomes/rea_proposed_intent/lib/src/lib.rs | 3 +++ zomes/rea_proposed_intent/zome/src/lib.rs | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/zomes/rea_proposed_intent/lib/src/lib.rs b/zomes/rea_proposed_intent/lib/src/lib.rs index a75b6ea63..b943e4fa6 100644 --- a/zomes/rea_proposed_intent/lib/src/lib.rs +++ b/zomes/rea_proposed_intent/lib/src/lib.rs @@ -21,6 +21,9 @@ use hdk_semantic_indexes_client_lib::*; use hc_zome_rea_proposed_intent_rpc::*; use hc_zome_rea_proposed_intent_storage::*; +// :SHONK: needed to re-export for zome `entry_defs()` where macro-assigned defs are overridden +pub use hdk_records::CAP_STORAGE_ENTRY_DEF_ID; + pub fn handle_create_proposed_intent(entry_def_id: S, proposed_intent: CreateRequest) -> RecordAPIResult where S: AsRef, { diff --git a/zomes/rea_proposed_intent/zome/src/lib.rs b/zomes/rea_proposed_intent/zome/src/lib.rs index 276388401..9f78cb0ad 100644 --- a/zomes/rea_proposed_intent/zome/src/lib.rs +++ b/zomes/rea_proposed_intent/zome/src/lib.rs @@ -18,6 +18,13 @@ fn entry_defs(_: ()) -> ExternResult { Ok(EntryDefsCallbackResult::from(vec![ PathEntry::entry_def(), ProposedIntentAddress::entry_def(), + EntryDef { + id: CAP_STORAGE_ENTRY_DEF_ID.into(), + visibility: EntryVisibility::Private, + crdt_type: CrdtType, + required_validations: 1.into(), + required_validation_type: RequiredValidationType::default(), + }, EntryDef { id: PROPOSED_INTENT_ENTRY_TYPE.into(), visibility: EntryVisibility::Public, From 757dc069a417fd48abf2b7dfbc28b76ec18b4549 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:24:07 +1000 Subject: [PATCH 157/181] fix missing entryDef in intent index zome --- zomes/rea_intent/zome_idx_planning/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/zomes/rea_intent/zome_idx_planning/src/lib.rs b/zomes/rea_intent/zome_idx_planning/src/lib.rs index 94f07808e..bd74e1fdf 100644 --- a/zomes/rea_intent/zome_idx_planning/src/lib.rs +++ b/zomes/rea_intent/zome_idx_planning/src/lib.rs @@ -16,6 +16,7 @@ fn entry_defs(_: ()) -> ExternResult { IntentAddress::entry_def(), SatisfactionAddress::entry_def(), ProcessAddress::entry_def(), + ProposedIntentAddress::entry_def(), ])) } From a173b470ba8c0659f298249573657b29e44fd21d Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:31:09 +1000 Subject: [PATCH 158/181] fix ProposedIntent tests erroring due to undefined ordering --- test/proposal/test_proposedintent_crud.js | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/test/proposal/test_proposedintent_crud.js b/test/proposal/test_proposedintent_crud.js index da3119b30..955f2dac0 100644 --- a/test/proposal/test_proposedintent_crud.js +++ b/test/proposal/test_proposedintent_crud.js @@ -2,6 +2,7 @@ const { buildConfig, buildRunner, buildPlayer, + sortById, } = require('../init') const runner = buildRunner() @@ -131,7 +132,7 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { } `, { rs: { - hasPointInTime: '2019-11-19T00:00:00.056Z', + hasPointInTime: new Date('2019-11-19T00:00:00.056Z'), ...exampleIntent, }, }) @@ -176,10 +177,16 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { }) t.equal(getResp.data.res.id, proposalAdress, 'proposal fetch succesful') t.equal(getResp.data.res.publishes.length, 2, 'proposedIntent count as expected') - t.equal(getResp.data.res.publishes[0].id, proposedIntentAdress2, 'proposedIntent B fetching from proposal succesful') - t.equal(getResp.data.res.publishes[1].id, proposedIntentAdress, 'proposedIntent A fetching from proposal succesful') - t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress2, 'intent B fetching from proposedIntent succesful') - t.equal(getResp.data.res.publishes[1].publishes.id, intentAdress, 'intent A fetching from proposedIntent succesful') + + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedPIIds = [{ id: proposedIntentAdress }, { id: proposedIntentAdress2 }].sort(sortById) + getResp.data.res.publishes.sort(sortById) + + t.equal(getResp.data.res.publishes[0].id, sortedPIIds[0].id, 'proposedIntent B fetching from proposal succesful') + t.equal(getResp.data.res.publishes[1].id, sortedPIIds[1].id, 'proposedIntent A fetching from proposal succesful') + // :SHONK: non-deterministic sort ordering, depends upon dummy data hashing + t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress, 'intent B fetching from proposedIntent succesful') + t.equal(getResp.data.res.publishes[1].publishes.id, intentAdress2, 'intent A fetching from proposedIntent succesful') await graphQL(` mutation($in: ID!) { From 180b749e79b8f250c311973e2e473a2c25e7f51f Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:35:50 +1000 Subject: [PATCH 159/181] fix incorrect index name for Proposal.publishedTo --- zomes/rea_proposed_to/lib/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zomes/rea_proposed_to/lib/src/lib.rs b/zomes/rea_proposed_to/lib/src/lib.rs index 864c3f8f1..84fa444b9 100644 --- a/zomes/rea_proposed_to/lib/src/lib.rs +++ b/zomes/rea_proposed_to/lib/src/lib.rs @@ -27,7 +27,7 @@ pub fn handle_create_proposed_to(entry_def_id: S, proposed_to: CreateRequest) let (revision_id, base_address, entry_resp): (_, ProposedToAddress, EntryData) = create_record(&entry_def_id, proposed_to.to_owned())?; // handle link fields - let r1 = create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.proposed_to(&base_address))?; + let r1 = create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.published_to(&base_address))?; hdk::prelude::debug!("handle_create_proposed_to::proposed::create_index!: {:?}", r1); // :TODO: create index for retrieving all proposals for an agent @@ -46,7 +46,7 @@ pub fn handle_delete_proposed_to(revision_id: &HeaderHash) -> RecordAPIResult(&revision_id)?; - update_index!(proposed_to.proposed.not(&vec![entry.proposed]), proposal.proposed_to(&base_address))?; + update_index!(proposed_to.proposed.not(&vec![entry.proposed]), proposal.published_to(&base_address))?; delete_record::(&revision_id) } From 449f2376d27004b4f9cda1bde092c369056addf4 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 13:36:04 +1000 Subject: [PATCH 160/181] update ProposedTo tests to use revisions for deletion --- test/proposal/test_proposedto_crud.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/proposal/test_proposedto_crud.js b/test/proposal/test_proposedto_crud.js index cd4bdfdbf..94ab8eb88 100644 --- a/test/proposal/test_proposedto_crud.js +++ b/test/proposal/test_proposedto_crud.js @@ -47,6 +47,7 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { res: proposeTo(proposed: $p,proposedTo: $pTo) { proposedTo { id + revisionId } } } @@ -58,6 +59,7 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { t.ok(createResp.data.res.proposedTo.id, 'record created') const psID = createResp.data.res.proposedTo.id + const psRev = createResp.data.res.proposedTo.revisionId let getResp = await graphQL(` query($id: ID!) { res: proposal(id: $id) { @@ -80,10 +82,10 @@ runner.registerScenario('ProposedTo record API', async (s, t) => { const deleteResult = await graphQL(` mutation($id: ID!) { - res: deleteProposedTo(id: $id) + res: deleteProposedTo(revisionId: $id) } `, { - id: psID, + id: psRev, }) await s.consistency() From d5025681d29ba077c073c384255966d497d0d79c Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 14:00:21 +1000 Subject: [PATCH 161/181] implement ID translation method to allow cross-DNA resolvers to function correctly --- modules/vf-graphql-holochain/connection.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/modules/vf-graphql-holochain/connection.ts b/modules/vf-graphql-holochain/connection.ts index d1abdc02a..ad006bfd5 100644 --- a/modules/vf-graphql-holochain/connection.ts +++ b/modules/vf-graphql-holochain/connection.ts @@ -156,6 +156,13 @@ function seralizeStringId(id: [Buffer,string]): string { return `${id[1]}:${serializeHash(id[0])}` } +// Construct appropriate IDs for records in associated DNAs by substituting +// the CellId portion of the ID with that of an appropriate destination record +export function remapCellId(originalId, newCellId) { + const [origId, _origCell] = originalId.split(':') + return `${origId}:${newCellId.split(':')[1]}` +} + const LONG_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ss.SSSZ' const SHORT_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ssZ' const isoDateRegex = /^\d{4}-\d\d-\d\d(T\d\d:\d\d:\d\d(\.\d\d\d)?)?([+-]\d\d:\d\d)?$/ @@ -294,4 +301,4 @@ export const extractEdges = (withEdges: { edges: { node: T }[] }): T[] => { return [] } return withEdges.edges.map(({ node }) => node) -} \ No newline at end of file +} From 7194171d7671dfe28c8e7c97205962aa7945ec98 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 14:00:49 +1000 Subject: [PATCH 162/181] fix Satisfaction & Fulfillment resolvers not dealing with cross-DNA links properly --- modules/vf-graphql-holochain/resolvers/fulfillment.ts | 5 +++-- modules/vf-graphql-holochain/resolvers/satisfaction.ts | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/modules/vf-graphql-holochain/resolvers/fulfillment.ts b/modules/vf-graphql-holochain/resolvers/fulfillment.ts index 0ee39a9ec..a23b9b6fd 100644 --- a/modules/vf-graphql-holochain/resolvers/fulfillment.ts +++ b/modules/vf-graphql-holochain/resolvers/fulfillment.ts @@ -6,7 +6,7 @@ */ import { DNAIdMappings, injectTypename, DEFAULT_VF_MODULES, VfModule } from '../types' -import { mapZomeFn } from '../connection' +import { mapZomeFn, remapCellId } from '../connection' import { Fulfillment, @@ -29,7 +29,8 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN }, (hasObservation ? { fulfilledBy: injectTypename('EconomicEvent', async (record: Fulfillment): Promise => { - const results = await readEvents({ params: { fulfills: record.id } }) + const associatedId = remapCellId(record.id, record.fulfilledBy) + const results = await readEvents({ params: { fulfills: associatedId } }) return results.edges.pop()['node'] }), } : {}), diff --git a/modules/vf-graphql-holochain/resolvers/satisfaction.ts b/modules/vf-graphql-holochain/resolvers/satisfaction.ts index 064fe53da..c1410c0bd 100644 --- a/modules/vf-graphql-holochain/resolvers/satisfaction.ts +++ b/modules/vf-graphql-holochain/resolvers/satisfaction.ts @@ -6,7 +6,7 @@ */ import { DNAIdMappings, addTypename, DEFAULT_VF_MODULES, VfModule } from '../types' -import { mapZomeFn } from '../connection' +import { mapZomeFn, remapCellId } from '../connection' import { Satisfaction, @@ -31,6 +31,7 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN return { satisfiedBy: async (record: Satisfaction): Promise => { + const associatedId = remapCellId(record.id, record.satisfiedBy) // :NOTE: this presumes a satisfaction will never be erroneously linked to 2 records return ( await Promise.all([ @@ -38,7 +39,7 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN .then(addTypename('Commitment')) .catch((e) => e), ].concat(hasObservation ? [ - extractRecordsOrFail(readEvents({ params: { satisfies: record.id } }), 'economicEvent') + extractRecordsOrFail(readEvents({ params: { satisfies: associatedId } }), 'economicEvent') .then(addTypename('EconomicEvent')) .catch((e) => e), ] : [])) From 7943db01187da52b9d91f2cccbe4aae4bb02f3f6 Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 14:12:36 +1000 Subject: [PATCH 163/181] export cell remapping helper temporarily for use in tests --- modules/vf-graphql-holochain/index.ts | 7 +++++-- test/init.js | 2 ++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/modules/vf-graphql-holochain/index.ts b/modules/vf-graphql-holochain/index.ts index f4a6783ae..cdaa1d7db 100644 --- a/modules/vf-graphql-holochain/index.ts +++ b/modules/vf-graphql-holochain/index.ts @@ -12,7 +12,7 @@ import { makeExecutableSchema } from '@graphql-tools/schema' import { APIOptions, ResolverOptions, DEFAULT_VF_MODULES, DNAIdMappings, CellId, VfModule } from './types' import generateResolvers from './resolvers' -import { mapZomeFn, autoConnect, openConnection, sniffHolochainAppCells } from './connection' +import { mapZomeFn, autoConnect, openConnection, sniffHolochainAppCells, remapCellId } from './connection' const { buildSchema, printSchema } = require('@valueflows/vf-graphql') export { @@ -23,7 +23,10 @@ export { // direct access to Holochain zome method bindings for authoring own custom resolvers bound to non-REA DNAs mapZomeFn, // types that wrapper libraries may need to manage conductor DNA connection logic - DNAIdMappings, CellId, APIOptions, VfModule + DNAIdMappings, CellId, APIOptions, VfModule, + + // :TODO: remove this. After #266 clients should not need to think about differing IDs between Cells. + remapCellId, } /** diff --git a/test/init.js b/test/init.js index d8766ab59..18d46b22f 100644 --- a/test/init.js +++ b/test/init.js @@ -20,6 +20,7 @@ const GQLTester = require('easygraphql-tester') const resolverLoggerMiddleware = require('./graphql-logger-middleware') const schema = require('@valueflows/vf-graphql/ALL_VF_SDL') const { generateResolvers } = require('@valueflows/vf-graphql-holochain') +const { remapCellId } = require('@valueflows/vf-graphql-holochain') process.on('unhandledRejection', error => { console.error('unhandled rejection:', error) @@ -218,6 +219,7 @@ module.exports = { return asStr ? `${id}:${serializeHash(dna)}` : [dna, id] }, + remapCellId, // :TODO: temporary code until date indexing order is implemented sortById: (a, b) => { From f36279254d13283788b0b94c421aee2c47bbb7ac Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 14:12:53 +1000 Subject: [PATCH 164/181] fix assertions for EconomicEvent.satisfies / fulfills by remapping CellIds --- test/flows/flow_records_graphql.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/test/flows/flow_records_graphql.js b/test/flows/flow_records_graphql.js index df9289e56..d3c5c9af8 100644 --- a/test/flows/flow_records_graphql.js +++ b/test/flows/flow_records_graphql.js @@ -5,6 +5,7 @@ const { buildPlayer, mockAgentId, mockIdentifier, + remapCellId, } = require('../init') const runner = buildRunner() @@ -268,6 +269,7 @@ runner.registerScenario('flow records and relationships', async (s, t) => { resp = await graphQL(` { inputEvent: economicEvent(id:"${inputEventId}") { + id fulfills { id } @@ -325,10 +327,14 @@ runner.registerScenario('flow records and relationships', async (s, t) => { } `) + // :TODO: revisit pending a decision on https://github.com/h-REA/hREA/issues/266 + const ifIdObs = remapCellId(ifId, resp.data.inputEvent.id) + const iesIdObs = remapCellId(iesId, resp.data.inputEvent.id) + t.equal(resp.data.inputEvent.fulfills.length, 1, 'input event fulfillment ref added') - t.equal(resp.data.inputEvent.fulfills[0].id, ifId, 'input event fulfillment ref OK') + t.equal(resp.data.inputEvent.fulfills[0].id, ifIdObs, 'input event fulfillment ref OK') t.equal(resp.data.inputEvent.satisfies.length, 1, 'input event satisfaction ref added') - t.equal(resp.data.inputEvent.satisfies[0].id, iesId, 'input event satisfaction ref OK') + t.equal(resp.data.inputEvent.satisfies[0].id, iesIdObs, 'input event satisfaction ref OK') t.equal(resp.data.inputCommitment.fulfilledBy.length, 1, 'input commitment fulfillment ref added') t.equal(resp.data.inputCommitment.fulfilledBy[0].id, ifId, 'input commitment fulfillment ref OK') t.equal(resp.data.inputCommitment.satisfies.length, 1, 'input commitment satisfaction ref added') From c903077659918f1c7d3c28aea29d4bf6c73b1efe Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 14:16:54 +1000 Subject: [PATCH 165/181] fix passing param adjusted for CellId to satisfaction resolver --- modules/vf-graphql-holochain/resolvers/satisfaction.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/vf-graphql-holochain/resolvers/satisfaction.ts b/modules/vf-graphql-holochain/resolvers/satisfaction.ts index c1410c0bd..b83c1ac4f 100644 --- a/modules/vf-graphql-holochain/resolvers/satisfaction.ts +++ b/modules/vf-graphql-holochain/resolvers/satisfaction.ts @@ -35,7 +35,7 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN // :NOTE: this presumes a satisfaction will never be erroneously linked to 2 records return ( await Promise.all([ - extractRecordsOrFail(readCommitments({ params: { satisfies: record.id } }), 'commitment') + extractRecordsOrFail(readCommitments({ params: { satisfies: associatedId } }), 'commitment') .then(addTypename('Commitment')) .catch((e) => e), ].concat(hasObservation ? [ From 459c8c98e224a3330350a38ac6b0c9178dab88df Mon Sep 17 00:00:00 2001 From: pospi Date: Wed, 4 May 2022 14:29:31 +1000 Subject: [PATCH 166/181] fix errors due to nondeterministic ordering in flow records tests --- test/flows/flow_records_graphql.js | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/test/flows/flow_records_graphql.js b/test/flows/flow_records_graphql.js index d3c5c9af8..8efd8fed3 100644 --- a/test/flows/flow_records_graphql.js +++ b/test/flows/flow_records_graphql.js @@ -5,6 +5,7 @@ const { buildPlayer, mockAgentId, mockIdentifier, + sortById, remapCellId, } = require('../init') @@ -331,6 +332,10 @@ runner.registerScenario('flow records and relationships', async (s, t) => { const ifIdObs = remapCellId(ifId, resp.data.inputEvent.id) const iesIdObs = remapCellId(iesId, resp.data.inputEvent.id) + // :TODO: remove client-side sorting when deterministic time-ordered indexing is implemented + const sortedSIds = [{ id: iesId }, { id: icsId }].sort(sortById) + resp.data.inputIntent.satisfiedBy.sort(sortById) + t.equal(resp.data.inputEvent.fulfills.length, 1, 'input event fulfillment ref added') t.equal(resp.data.inputEvent.fulfills[0].id, ifIdObs, 'input event fulfillment ref OK') t.equal(resp.data.inputEvent.satisfies.length, 1, 'input event satisfaction ref added') @@ -340,8 +345,8 @@ runner.registerScenario('flow records and relationships', async (s, t) => { t.equal(resp.data.inputCommitment.satisfies.length, 1, 'input commitment satisfaction ref added') t.equal(resp.data.inputCommitment.satisfies[0].id, icsId, 'input commitment satisfaction ref OK') t.equal(resp.data.inputIntent.satisfiedBy.length, 2, 'input intent satisfaction refs added') - t.equal(resp.data.inputIntent.satisfiedBy[0].id, iesId, 'input intent>event satisfaction ref OK') - t.equal(resp.data.inputIntent.satisfiedBy[1].id, icsId, 'input intent>commitment satisfaction ref OK') + t.equal(resp.data.inputIntent.satisfiedBy[0].id, sortedSIds[0].id, 'input intent>event satisfaction ref OK') + t.equal(resp.data.inputIntent.satisfiedBy[1].id, sortedSIds[1].id, 'input intent>commitment satisfaction ref OK') t.equal(resp.data.if.fulfills.id, inputCommitmentId, 'input fulfillment commitment ref OK') t.equal(resp.data.if.fulfilledBy.id, inputEventId, 'input fulfillment event ref OK') From 98743aa7a63d4ca026f88e6649b74ba443361c43 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 16:55:41 +1000 Subject: [PATCH 167/181] workaround result sorting in test assertions for proposedIntent #251 --- test/proposal/test_proposedintent_crud.js | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/test/proposal/test_proposedintent_crud.js b/test/proposal/test_proposedintent_crud.js index 955f2dac0..37a2f026b 100644 --- a/test/proposal/test_proposedintent_crud.js +++ b/test/proposal/test_proposedintent_crud.js @@ -182,11 +182,16 @@ runner.registerScenario('ProposedIntent external link', async (s, t) => { const sortedPIIds = [{ id: proposedIntentAdress }, { id: proposedIntentAdress2 }].sort(sortById) getResp.data.res.publishes.sort(sortById) + const sortedIIds = [{ id: intentAdress }, { id: intentAdress2 }].sort(sortById) + const sortedPublishesIds = [ + { id: getResp.data.res.publishes[0].publishes.id }, + { id: getResp.data.res.publishes[1].publishes.id }, + ].sort(sortById) + t.equal(getResp.data.res.publishes[0].id, sortedPIIds[0].id, 'proposedIntent B fetching from proposal succesful') t.equal(getResp.data.res.publishes[1].id, sortedPIIds[1].id, 'proposedIntent A fetching from proposal succesful') - // :SHONK: non-deterministic sort ordering, depends upon dummy data hashing - t.equal(getResp.data.res.publishes[0].publishes.id, intentAdress, 'intent B fetching from proposedIntent succesful') - t.equal(getResp.data.res.publishes[1].publishes.id, intentAdress2, 'intent A fetching from proposedIntent succesful') + t.equal(sortedPublishesIds[0].id, sortedIIds[0].id, 'intent B fetching from proposedIntent succesful') + t.equal(sortedPublishesIds[1].id, sortedIIds[1].id, 'intent A fetching from proposedIntent succesful') await graphQL(` mutation($in: ID!) { From c2357a21c7433c4e3a87cc08aea25c7db4a598e1 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 18:19:47 +1000 Subject: [PATCH 168/181] #254 fix satisfaction result handling helper to match paginated response format --- modules/vf-graphql-holochain/resolvers/satisfaction.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/vf-graphql-holochain/resolvers/satisfaction.ts b/modules/vf-graphql-holochain/resolvers/satisfaction.ts index b83c1ac4f..5e5a32b66 100644 --- a/modules/vf-graphql-holochain/resolvers/satisfaction.ts +++ b/modules/vf-graphql-holochain/resolvers/satisfaction.ts @@ -14,12 +14,12 @@ import { Intent, } from '@valueflows/vf-graphql' -async function extractRecordsOrFail (query, subfieldId: string): Promise { +async function extractRecordsOrFail (query): Promise { const val = await query - if (!val || !val.length || !val[0][subfieldId]) { + if (!val || !val.edges || !val.edges.length || !val.edges[0].node) { throw new Error('Reference not found') } - return val[0][subfieldId] + return val.edges[0].node } export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DNAIdMappings, conductorUri: string) => { @@ -35,11 +35,11 @@ export default (enabledVFModules: VfModule[] = DEFAULT_VF_MODULES, dnaConfig: DN // :NOTE: this presumes a satisfaction will never be erroneously linked to 2 records return ( await Promise.all([ - extractRecordsOrFail(readCommitments({ params: { satisfies: associatedId } }), 'commitment') + extractRecordsOrFail(readCommitments({ params: { satisfies: associatedId } })) .then(addTypename('Commitment')) .catch((e) => e), ].concat(hasObservation ? [ - extractRecordsOrFail(readEvents({ params: { satisfies: associatedId } }), 'economicEvent') + extractRecordsOrFail(readEvents({ params: { satisfies: associatedId } })) .then(addTypename('EconomicEvent')) .catch((e) => e), ] : [])) From 887fd9aaa9d74259add3b865c0baa0f747e43d0c Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 19:42:16 +1000 Subject: [PATCH 169/181] update logic for anchored records to encode string-based index paths into LinkTag fixes Unit CRUD tests in #249 by allowing string IDs to be queried from the DHT again, after PathEntry changes made this impossible using Path construct --- .../src/anchored_record_helpers.rs | 83 +++++++++++-------- lib/hdk_records/src/lib.rs | 11 ++- lib/hdk_records/src/link_helpers.rs | 14 ++++ 3 files changed, 74 insertions(+), 34 deletions(-) diff --git a/lib/hdk_records/src/anchored_record_helpers.rs b/lib/hdk_records/src/anchored_record_helpers.rs index b8d74b4ca..69f9b06fc 100644 --- a/lib/hdk_records/src/anchored_record_helpers.rs +++ b/lib/hdk_records/src/anchored_record_helpers.rs @@ -10,7 +10,6 @@ * @since 2021-09-15 */ use hdk::prelude::*; -use hdk::hash_path::path::Component; use hdk_uuid_types::{ DnaAddressable, DnaIdentifiable, }; @@ -21,17 +20,17 @@ use crate::{ Identified, Identifiable, UniquelyIdentifiable, Updateable, UpdateableIdentifier, }, - link_helpers::get_linked_addresses, + link_helpers::{ + get_linked_addresses, + get_linked_tags, + get_linked_headers, + }, identity_helpers::calculate_identity_address, records::{ create_record, read_record_entry_by_identity, - // read_record_entry_by_header, - get_latest_header_hash, }, entries::{ - try_entry_from_element, - try_decode_entry, get_entry_by_header, update_entry, delete_entry, @@ -78,16 +77,13 @@ fn read_entry_anchor_id( identity_path_address: &EntryHash, ) -> RecordAPIResult { - let mut addrs = get_linked_addresses(identity_path_address, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; - let entry_hash = addrs.pop().ok_or(DataIntegrityError::IndexNotFound((*identity_path_address).clone()))?; - - let path_element = get(entry_hash, GetOptions::default())?; - let entry = try_entry_from_element(path_element.as_ref())?; - let path: Path = try_decode_entry(entry.to_owned())?; - let components: &Vec = path.as_ref(); - let last_component = components.last().unwrap(); - - Ok(last_component.try_into()?) + let mut tags = get_linked_tags(identity_path_address, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + tags.pop() + .map(|t| { + let bytes = &t.into_inner()[3..]; + Ok(String::from_utf8(bytes.to_vec())?) + }) + .ok_or(DataIntegrityError::IndexNotFound((*identity_path_address).clone()))? } /// Given the `EntryHash` of an anchor `Path`, query the identity of the associated entry @@ -155,14 +151,9 @@ pub fn create_anchored_record( // write base record and identity index path let (revision_id, entry_internal_id, entry_data) = create_record::(&entry_def_id, create_payload)?; - // create manually assigned identifier - let path = identity_path_for(&entry_def_id, &entry_id); - path.ensure()?; - - // link the hash identifier to the manually assigned identifier so we can determine it when reading & updating + // link the hash identifier to a new manually assigned identifier so we can determine the anchor when reading & updating let identifier_hash = calculate_identity_address(entry_def_id, &entry_internal_id)?; - create_link(identifier_hash.clone(), path.path_entry_hash()?, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; - create_link(path.path_entry_hash()?, identifier_hash.clone(), LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + link_identities(entry_def_id, &identifier_hash, &entry_id)?; Ok((revision_id, A::new(dna_info()?.hash, entry_id), entry_data)) } @@ -174,7 +165,7 @@ pub fn create_anchored_record( /// /// @see hdk_records::record_interface::UpdateableIdentifier /// -pub fn update_anchored_record( +pub fn update_anchored_record( entry_def_id: &S, revision_id: &HeaderHash, update_payload: U, @@ -185,16 +176,18 @@ pub fn update_anchored_record( I: std::fmt::Debug + Identifiable + Updateable, U: UpdateableIdentifier, WasmError: From, - Entry: TryFrom, + Entry: TryFrom + TryFrom, R: Clone + std::fmt::Debug + Identified, SerializedBytes: TryInto, { // get referenced entry and identifiers for the given header let previous: R = get_entry_by_header(revision_id)?; + let prev_entry = previous.entry(); let identity = previous.identity()?; - let identity_hash: &EntryHash = identity.as_ref(); - let maybe_current_id = read_entry_anchor_id(identity_hash); + + let identity_hash = calculate_identity_address(entry_def_id, &identity)?; + let maybe_current_id = read_entry_anchor_id(&identity_hash); // ensure the referenced entry exists and has an anchored identifier path match maybe_current_id { @@ -214,18 +207,15 @@ pub fn update_anchored_record( Some(new_id) => { if new_id != final_id { // clear any old identity path, ensuring the link structure is as expected - let mut addrs = get_linked_addresses(identity_hash, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + let mut addrs = get_linked_headers(&identity_hash, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; if addrs.len() != 1 { return Err(DataIntegrityError::IndexNotFound(identity_hash.to_owned())); } let old_link = addrs.pop().unwrap(); - let old_link_hash = get_latest_header_hash(old_link)?; - delete_link(old_link_hash.to_owned())?; + delete_link(old_link)?; // create the new identifier and link to it - let path = identity_path_for(&entry_def_id, &new_id); - path.ensure()?; - create_link(identity_hash.to_owned(), path.path_entry_hash()?, LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))?; + link_identities(entry_def_id, &identity_hash, &new_id)?; // reference final ID in record updates to new identifier path final_id = new_id.into(); @@ -255,3 +245,30 @@ pub fn delete_anchored_record(address: &HeaderHash) -> RecordAPIResult delete_entry::(address)?; Ok(true) } + +/// Writes a bidirectional set of anchoring entries for a record so that the string-based identifier +/// can be looked up from the content-addressable `EntryHash`-based identifier +/// +fn link_identities(entry_def_id: S, identifier_hash: &EntryHash, id_string: A) -> RecordAPIResult<()> + where S: AsRef, + A: Clone + AsRef, +{ + // create manually assigned identifier + let path = identity_path_for(&entry_def_id, &id_string); + path.ensure()?; + + let identifier_tag = create_id_tag(id_string.to_owned()); + create_link(identifier_hash.clone(), path.path_entry_hash()?, identifier_tag.to_owned())?; + create_link(path.path_entry_hash()?, identifier_hash.clone(), identifier_tag)?; + + Ok(()) +} + +/// Generate a link tag for the identity anchor of a record by encoding the ID string into the tag +/// so that it can be retreived by querying the DHT later. +/// +fn create_id_tag(id_str: S) -> LinkTag + where S: AsRef, +{ + LinkTag::new([crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG, id_str.as_ref().as_bytes()].concat()) +} diff --git a/lib/hdk_records/src/lib.rs b/lib/hdk_records/src/lib.rs index 8205f518c..7d3976b06 100644 --- a/lib/hdk_records/src/lib.rs +++ b/lib/hdk_records/src/lib.rs @@ -3,6 +3,7 @@ */ use thiserror::Error; use std::convert::Infallible; +use std::string::FromUtf8Error; use hdk::prelude::*; pub use hdk_uuid_types::DnaAddressable; @@ -67,6 +68,8 @@ pub enum DataIntegrityError { EmptyQuery, #[error("Index at address {0} with malformed bytes {1:?}")] CorruptIndexError(EntryHash, Option>), + #[error("String index with malformed bytes {0:?}")] + BadStringIndexError(Vec), #[error("Error in remote call {0}")] RemoteRequestError(String), #[error("Bad zome RPC response format from {0}")] @@ -102,12 +105,18 @@ impl From for DataIntegrityError { } } +impl From for DataIntegrityError { + fn from(e: FromUtf8Error) -> DataIntegrityError { + DataIntegrityError::BadStringIndexError(e.into_bytes()) + } +} + // module constants / internals pub mod identifiers { // Holochain DHT storage type IDs pub const RECORD_INITIAL_ENTRY_LINK_TAG: &'static [u8] = b"initial_entry"; - pub const RECORD_IDENTITY_ANCHOR_LINK_TAG: &'static [u8] = b"identity_anchor"; + pub const RECORD_IDENTITY_ANCHOR_LINK_TAG: &'static [u8] = b"id|"; // :WARNING: byte length is important here. @see anchored_record_helpers::read_entry_anchor_id // temporary: @see query_root_index() pub const RECORD_GLOBAL_INDEX_LINK_TAG: &'static [u8] = b"all_entries"; } diff --git a/lib/hdk_records/src/link_helpers.rs b/lib/hdk_records/src/link_helpers.rs index 0ffa2db88..bade80d48 100644 --- a/lib/hdk_records/src/link_helpers.rs +++ b/lib/hdk_records/src/link_helpers.rs @@ -43,6 +43,16 @@ pub fn get_linked_headers( pull_links_data(base_address, link_tag, get_link_target_header) } +/// Load any set of `LinkTag`s being referenced from the +/// provided `base_address` with the given `link_tag` prefix. +/// +pub fn get_linked_tags( + base_address: &EntryHash, + link_tag: LinkTag, +) -> RecordAPIResult> { + pull_links_data(base_address, link_tag, get_link_target_tag) +} + /// Execute the provided `link_map` function against the set of links /// between a `base_address` and `target_address` via the given `link_tag`. /// @@ -93,3 +103,7 @@ fn get_link_target_entry(l: &Link) -> EntryHash { fn get_link_target_header(l: &Link) -> HeaderHash { l.create_link_hash.clone() } + +fn get_link_target_tag(l: &Link) -> LinkTag { + l.tag.clone() +} From a55571b39afdddb5b06d7f2ac87b8717728fbb0e Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 19:42:40 +1000 Subject: [PATCH 170/181] #249 fix unit ID assertion in tests to factor in DnaHash portion of ID --- test/specification/test_unit_crud.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/specification/test_unit_crud.js b/test/specification/test_unit_crud.js index 88e9d3339..f1d3501e0 100644 --- a/test/specification/test_unit_crud.js +++ b/test/specification/test_unit_crud.js @@ -69,7 +69,7 @@ runner.registerScenario('Unit record API', async (s, t) => { await s.consistency() t.notEqual(updateResp.data.res.unit.id, uId, 'update operation succeeded') - t.equal(updateResp.data.res.unit.id, updatedExampleEntry.symbol, 'record index updated') + t.equal(updateResp.data.res.unit.id.split(':')[0], updatedExampleEntry.symbol, 'record index updated') uId = updateResp.data.res.unit.id // now we fetch the Entry again to check that the update was successful From 7691ac29071d43b54122893d41fe56375f6988c2 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 19:43:15 +1000 Subject: [PATCH 171/181] fix wrong type used to constrain Unit deletion logic #249 --- zomes/rea_unit/lib/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zomes/rea_unit/lib/src/lib.rs b/zomes/rea_unit/lib/src/lib.rs index 64af16d12..ba757df52 100644 --- a/zomes/rea_unit/lib/src/lib.rs +++ b/zomes/rea_unit/lib/src/lib.rs @@ -48,7 +48,7 @@ pub fn handle_update_unit(entry_def_id: S, unit: UpdateRequest) -> RecordAPIR } pub fn handle_delete_unit(revision_id: HeaderHash) -> RecordAPIResult { - delete_anchored_record::(&revision_id) + delete_anchored_record::(&revision_id) } fn construct_response<'a>( From f0dcba3fd605833c5934a84fe2f560b10ca25a0b Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 19:43:21 +1000 Subject: [PATCH 172/181] remove debug --- zomes/rea_unit/zome/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/zomes/rea_unit/zome/src/lib.rs b/zomes/rea_unit/zome/src/lib.rs index 4e064d870..c01a8a39e 100644 --- a/zomes/rea_unit/zome/src/lib.rs +++ b/zomes/rea_unit/zome/src/lib.rs @@ -45,7 +45,6 @@ struct ById { #[hdk_extern] fn get_unit(ById { id }: ById) -> ExternResult { - debug!("READ UNIT {:?}", id); Ok(handle_get_unit(UNIT_ENTRY_TYPE, id)?) } From 30eba4eddd024141eb43792c961124b7c6059128 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 19:49:46 +1000 Subject: [PATCH 173/181] remove overly verbose logging in index management methods --- lib/hdk_semantic_indexes/client/src/lib.rs | 20 -------------------- zomes/rea_commitment/lib/src/lib.rs | 16 ++++++---------- 2 files changed, 6 insertions(+), 30 deletions(-) diff --git a/lib/hdk_semantic_indexes/client/src/lib.rs b/lib/hdk_semantic_indexes/client/src/lib.rs index 847e62ee9..ede05d0c9 100644 --- a/lib/hdk_semantic_indexes/client/src/lib.rs +++ b/lib/hdk_semantic_indexes/client/src/lib.rs @@ -243,24 +243,6 @@ pub fn manage_index( F: Copy + Fn(C) -> Option, G: Copy + Fn(C) -> Option, { - - hdk::prelude::debug!("handling call to manage_index, here are the arguments:"); - hdk::prelude::debug!("source: {:?}", source); - hdk::prelude::debug!("dest_addresses: {:?}", dest_addresses); - hdk::prelude::debug!("remove_addresses: {:?}", remove_addresses); - // hdk::prelude::debug!("remote_permission_id: {:?}", remote_permission_id); - let zome_props = hdk::prelude::dna_info()?.properties - .try_into()?; - let zome_props_2 = hdk::prelude::dna_info()?.properties - .try_into()?; - let oznfc = origin_zome_name_from_config.clone()(zome_props_2); - hdk::prelude::debug!("origin_zome_name_from_config: {:?}", oznfc); - hdk::prelude::debug!("origin_fn_name: {:?}", origin_fn_name.as_ref().to_string()); - let dznfc = dest_zome_name_from_config.clone()(zome_props); - hdk::prelude::debug!("dest_zome_name_from_config: {:?}", dznfc); - hdk::prelude::debug!("dest_fn_name: {:?}", dest_fn_name.as_ref().to_string()); - hdk::prelude::debug!("remote_permission_id: {:?}", remote_permission_id.as_ref().to_string()); - // altering an index with no targets is a no-op if dest_addresses.len() == 0 && remove_addresses.len() == 0 { return Ok(vec![]) @@ -269,8 +251,6 @@ pub fn manage_index( let sources = vec![source.clone()]; let targets = prefilter_target_dnas(dest_addresses, remove_addresses)?; - hdk::prelude::debug!("manage_index::targets: {:?}", targets); - // Manage local index creation / removal let empty = vec![]; diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index e1049514c..df70e421d 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -15,7 +15,7 @@ use hdk_records::{ read_record_entry_by_header, update_record, delete_record, - }, dna_info, + }, }; use hdk_semantic_indexes_client_lib::*; @@ -31,20 +31,16 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - let (header_addr, base_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, commitment.to_owned())?; // handle link fields - // :TODO: propogate errors! - let dna_i = dna_info()?; - hdk::prelude::debug!("dnainfo! {:?}", dna_i); - + // :TODO: propogate errors if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &commitment { let e = create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address))?; hdk::prelude::debug!("handle_create_commitment::input_of::create_index: {:?}", e); - }; - if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { - hdk::prelude::debug!("handle_create_commitment::output_of: {:?}", output_of); + }; + if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { let e = create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address))?; hdk::prelude::debug!("handle_create_commitment::output_of::create_index: {:?}", e); - }; - if let CreateRequest { clause_of: MaybeUndefined::Some(clause_of), .. } = &commitment { + }; + if let CreateRequest { clause_of: MaybeUndefined::Some(clause_of), .. } = &commitment { let e = create_index!(commitment.clause_of(clause_of), agreement.commitments(&base_address))?; hdk::prelude::debug!("handle_create_commitment::clause_of::create_index: {:?}", e); }; From 458daed94cd1a924ace0efb4d352e66c7fb7ab12 Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 20:19:16 +1000 Subject: [PATCH 174/181] WIP intermediate state for #264 so as to not cause complex config issues. All index updates & RPC calls now uniformly treated as non-critical and logged for output to assist in debugging. --- zomes/rea_commitment/lib/src/lib.rs | 36 +++++++----- zomes/rea_economic_event/lib/src/lib.rs | 24 ++++---- zomes/rea_economic_resource/lib/src/lib.rs | 9 +-- .../lib_destination/src/lib.rs | 12 ++-- zomes/rea_fulfillment/lib_origin/src/lib.rs | 22 ++++--- zomes/rea_intent/lib/src/lib.rs | 24 ++++---- zomes/rea_proposed_intent/lib/src/lib.rs | 16 ++--- zomes/rea_proposed_to/lib/src/lib.rs | 7 ++- .../lib_destination/src/lib.rs | 12 ++-- zomes/rea_satisfaction/lib_origin/src/lib.rs | 58 +++++++++++-------- 10 files changed, 128 insertions(+), 92 deletions(-) diff --git a/zomes/rea_commitment/lib/src/lib.rs b/zomes/rea_commitment/lib/src/lib.rs index df70e421d..f5292f443 100644 --- a/zomes/rea_commitment/lib/src/lib.rs +++ b/zomes/rea_commitment/lib/src/lib.rs @@ -33,16 +33,16 @@ pub fn handle_create_commitment(entry_def_id: S, commitment: CreateRequest) - // handle link fields // :TODO: propogate errors if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &commitment { - let e = create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address))?; - hdk::prelude::debug!("handle_create_commitment::input_of::create_index: {:?}", e); + let e = create_index!(commitment.input_of(input_of), process.committed_inputs(&base_address)); + hdk::prelude::debug!("handle_create_commitment::input_of index {:?}", e); }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &commitment { - let e = create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address))?; - hdk::prelude::debug!("handle_create_commitment::output_of::create_index: {:?}", e); + let e = create_index!(commitment.output_of(output_of), process.committed_outputs(&base_address)); + hdk::prelude::debug!("handle_create_commitment::output_of index {:?}", e); }; if let CreateRequest { clause_of: MaybeUndefined::Some(clause_of), .. } = &commitment { - let e = create_index!(commitment.clause_of(clause_of), agreement.commitments(&base_address))?; - hdk::prelude::debug!("handle_create_commitment::clause_of::create_index: {:?}", e); + let e = create_index!(commitment.clause_of(clause_of), agreement.commitments(&base_address)); + hdk::prelude::debug!("handle_create_commitment::clause_of index {:?}", e); }; // :TODO: pass results from link creation rather than re-reading @@ -66,32 +66,35 @@ pub fn handle_update_commitment(entry_def_id: S, commitment: UpdateRequest) - if new_entry.input_of != prev_entry.input_of { let new_value = match &new_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!( + let e = update_index!( commitment .input_of(new_value.as_slice()) .not(prev_value.as_slice()), process.committed_inputs(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_commitment::input_of index {:?}", e); } if new_entry.output_of != prev_entry.output_of { let new_value = match &new_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!( + let e = update_index!( commitment .output_of(new_value.as_slice()) .not(prev_value.as_slice()), process.committed_outputs(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_commitment::output_of index {:?}", e); } if new_entry.clause_of != prev_entry.clause_of { let new_value = match &new_entry.clause_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.clause_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!( + let e = update_index!( commitment .clause_of(new_value.as_slice()) .not(prev_value.as_slice()), agreement.commitments(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_commitment::clause_of index {:?}", e); } construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) @@ -104,13 +107,16 @@ pub fn handle_delete_commitment(revision_id: HeaderHash) -> RecordAPIResult(entry_def_id: S, event: &EconomicEvent // handle link fields // :TODO: propagate errors https://github.com/h-REA/hREA/issues/264 if let EconomicEventCreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = event { - let e = create_index!(economic_event.input_of(input_of), process.inputs(&base_address))?; - hdk::prelude::debug!("input_of results: {:?}", e); + let e = create_index!(economic_event.input_of(input_of), process.inputs(&base_address)); + hdk::prelude::debug!("handle_create_economic_event_record::input_of index {:?}", e); }; if let EconomicEventCreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = event { - let e = create_index!(economic_event.output_of(output_of), process.outputs(&base_address))?; - hdk::prelude::debug!("output_of results: {:?}", e); + let e = create_index!(economic_event.output_of(output_of), process.outputs(&base_address)); + hdk::prelude::debug!("handle_create_economic_event_record::output_of index {:?}", e); }; if let EconomicEventCreateRequest { realization_of: MaybeUndefined::Some(realization_of), .. } = event { - let e = create_index!(economic_event.realization_of(realization_of), agreement.economic_events(&base_address))?; - hdk::prelude::debug!("realization_of results: {:?}", e); + let e = create_index!(economic_event.realization_of(realization_of), agreement.economic_events(&base_address)); + hdk::prelude::debug!("handle_create_economic_event_record::realization_of index {:?}", e); }; Ok((revision_id, base_address, entry_resp)) diff --git a/zomes/rea_economic_resource/lib/src/lib.rs b/zomes/rea_economic_resource/lib/src/lib.rs index ada38edec..844427f3a 100644 --- a/zomes/rea_economic_resource/lib/src/lib.rs +++ b/zomes/rea_economic_resource/lib/src/lib.rs @@ -88,11 +88,11 @@ impl API for EconomicResourceZomePermissableDefault { // :NOTE: this will always run- resource without a specification ID would fail entry validation (implicit in the above) if let Some(conforms_to) = resource_spec { let e = create_index!(economic_resource.conforms_to(conforms_to), resource_specification.conforming_resources(&base_address)); - hdk::prelude::debug!("create_inventory_from_event::conforms_to::create_index!: {:?}", e); + hdk::prelude::debug!("create_inventory_from_event::conforms_to index {:?}", e); } if let Some(contained_in) = resource_params.get_contained_in() { - let e = create_index!(economic_resource(&base_address).contained_in(&contained_in))?; - hdk::prelude::debug!("create_inventory_from_event::contained_in::create_index!: {:?}", e); + let e = create_index!(economic_resource(&base_address).contained_in(&contained_in)); + hdk::prelude::debug!("create_inventory_from_event::contained_in index {:?}", e); }; Ok((revision_id, base_address, entry_resp)) @@ -144,7 +144,8 @@ impl API for EconomicResourceZomePermissableDefault { if entry.contained_in != prev_entry.contained_in { let now_contained = if let Some(contained) = &entry.contained_in { vec![contained.clone()] } else { vec![] }; let prev_contained = if let Some(contained) = &prev_entry.contained_in { vec![contained.clone()] } else { vec![] }; - update_index!(economic_resource(&identity_address).contained_in(now_contained.as_slice()).not(prev_contained.as_slice()))?; + let e = update_index!(economic_resource(&identity_address).contained_in(now_contained.as_slice()).not(prev_contained.as_slice())); + hdk::prelude::debug!("update_economic_resource::contained_in index {:?}", e); } // :TODO: optimise this- should pass results from `replace_direct_index` instead of retrieving from `get_link_fields` where updates diff --git a/zomes/rea_fulfillment/lib_destination/src/lib.rs b/zomes/rea_fulfillment/lib_destination/src/lib.rs index 9b1eb3715..37365a756 100644 --- a/zomes/rea_fulfillment/lib_destination/src/lib.rs +++ b/zomes/rea_fulfillment/lib_destination/src/lib.rs @@ -35,8 +35,8 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - let create_index_results = create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), economic_event.fulfills(&fulfillment_address))?; - hdk::prelude::debug!("handle_create_fulfillment::fulfilled_by::create_index!: {:?}", create_index_results); + let e = create_index!(fulfillment.fulfilled_by(fulfillment.get_fulfilled_by()), economic_event.fulfills(&fulfillment_address)); + hdk::prelude::debug!("handle_create_fulfillment::fulfilled_by index (destination) {:?}", e); // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs @@ -56,12 +56,13 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) let (revision_id, base_address, new_entry, prev_entry): (_, FulfillmentAddress, EntryData, EntryData) = update_record(&entry_def_id, &fulfillment.get_revision_id(), fulfillment.to_owned())?; if new_entry.fulfilled_by != prev_entry.fulfilled_by { - update_index!( + let e = update_index!( fulfillment .fulfilled_by(&vec![new_entry.fulfilled_by.clone()]) .not(&vec![prev_entry.fulfilled_by]), economic_event.fulfills(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_fulfillment::fulfilled_by index (destination) {:?}", e); } construct_response(&base_address, &revision_id, &new_entry) @@ -73,7 +74,8 @@ pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // handle link fields - update_index!(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), economic_event.fulfills(&base_address))?; + let e = update_index!(fulfillment.fulfilled_by.not(&vec![fulfillment.fulfilled_by]), economic_event.fulfills(&base_address)); + hdk::prelude::debug!("handle_delete_fulfillment::fulfilled_by index (destination) {:?}", e); delete_record::(&revision_id) } diff --git a/zomes/rea_fulfillment/lib_origin/src/lib.rs b/zomes/rea_fulfillment/lib_origin/src/lib.rs index 8eaafcb0c..d42c3ef5e 100644 --- a/zomes/rea_fulfillment/lib_origin/src/lib.rs +++ b/zomes/rea_fulfillment/lib_origin/src/lib.rs @@ -37,9 +37,9 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) let (revision_id, fulfillment_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, fulfillment.to_owned())?; // link entries in the local DNA - let create_index_results = create_index!(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address))?; - hdk::prelude::debug!("handle_create_fulfillment::fulfills::create_index!: {:?}", create_index_results); - + let e = create_index!(fulfillment.fulfills(fulfillment.get_fulfills()), commitment.fulfilled_by(&fulfillment_address)); + hdk::prelude::debug!("handle_create_fulfillment::fulfills index (origin) {:?}", e); + // :TODO: report any error // update in the associated foreign DNA as well let pingback: OtherCellResult = call_zome_method( @@ -47,7 +47,7 @@ pub fn handle_create_fulfillment(entry_def_id: S, fulfillment: CreateRequest) &REPLICATE_CREATE_API_METHOD, CreateParams { fulfillment: fulfillment.to_owned() }, ); - hdk::prelude::debug!("handle_create_fulfillment::call_zome_method::{:?}: {:?}", REPLICATE_CREATE_API_METHOD, pingback); + hdk::prelude::debug!("handle_create_fulfillment::call_zome_method::{:?} {:?}", REPLICATE_CREATE_API_METHOD, pingback); construct_response(&fulfillment_address, &revision_id, &entry_resp) } @@ -66,23 +66,25 @@ pub fn handle_update_fulfillment(entry_def_id: S, fulfillment: UpdateRequest) // update commitment indexes in local DNA if new_entry.fulfills != prev_entry.fulfills { - update_index!( + let e = update_index!( fulfillment .fulfills(&vec![new_entry.fulfills.clone()]) .not(&vec![prev_entry.fulfills]), commitment.fulfilled_by(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_fulfillment::fulfills index (origin) {:?}", e); } // update fulfillment records in remote DNA (and by proxy, event indexes in remote DNA) if new_entry.fulfilled_by != prev_entry.fulfilled_by { - let _pingback: OtherCellResult = call_zome_method( + let pingback: OtherCellResult = call_zome_method( // :TODO: update to intelligently call remote DNAs if new & old target record are not in same network &prev_entry.fulfilled_by, &REPLICATE_UPDATE_API_METHOD, UpdateParams { fulfillment: fulfillment.to_owned() }, ); // :TODO: report any error + hdk::prelude::debug!("handle_update_fulfillment::call_zome_method::{:?} {:?}", REPLICATE_UPDATE_API_METHOD, pingback); } construct_response(&base_address, &revision_id, &new_entry) @@ -93,15 +95,17 @@ pub fn handle_delete_fulfillment(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // update commitment indexes in local DNA - update_index!(fulfillment.fulfills.not(&vec![entry.fulfills]), commitment.fulfilled_by(&base_address))?; + let e = update_index!(fulfillment.fulfills.not(&vec![entry.fulfills]), commitment.fulfilled_by(&base_address)); + hdk::prelude::debug!("handle_delete_fulfillment::fulfills index (origin) {:?}", e); // update fulfillment records in remote DNA (and by proxy, event indexes in remote DNA) - let _pingback: OtherCellResult = call_zome_method( + let pingback: OtherCellResult = call_zome_method( &entry.fulfilled_by, &REPLICATE_DELETE_API_METHOD, ByHeader { address: revision_id.to_owned() }, ); // :TODO: report any error + hdk::prelude::debug!("handle_delete_fulfillment::call_zome_method::{:?} {:?}", REPLICATE_DELETE_API_METHOD, pingback); delete_record::(&revision_id) } diff --git a/zomes/rea_intent/lib/src/lib.rs b/zomes/rea_intent/lib/src/lib.rs index 379ce31a0..bf9b43327 100644 --- a/zomes/rea_intent/lib/src/lib.rs +++ b/zomes/rea_intent/lib/src/lib.rs @@ -33,12 +33,12 @@ pub fn handle_create_intent(entry_def_id: S, intent: CreateRequest) -> Record // handle link fields if let CreateRequest { input_of: MaybeUndefined::Some(input_of), .. } = &intent { - let e = create_index!(intent.input_of(input_of), process.intended_inputs(&base_address))?; - hdk::prelude::debug!("handle_create_intent::input_of::create_index!: {:?}", e); + let e = create_index!(intent.input_of(input_of), process.intended_inputs(&base_address)); + hdk::prelude::debug!("handle_create_intent::input_of index {:?}", e); }; if let CreateRequest { output_of: MaybeUndefined::Some(output_of), .. } = &intent { - let e = create_index!(intent.output_of(output_of), process.intended_outputs(&base_address))?; - hdk::prelude::debug!("handle_create_intent::output_of::create_index!: {:?}", e); + let e = create_index!(intent.output_of(output_of), process.intended_outputs(&base_address)); + hdk::prelude::debug!("handle_create_intent::output_of index {:?}", e); }; // return entire record structure @@ -62,22 +62,24 @@ pub fn handle_update_intent(entry_def_id: S, intent: UpdateRequest) -> Record if new_entry.input_of != prev_entry.input_of { let new_value = match &new_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.input_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!( + let e = update_index!( intent .input_of(new_value.as_slice()) .not(prev_value.as_slice()), process.intended_inputs(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_intent::input_of index {:?}", e); } if new_entry.output_of != prev_entry.output_of { let new_value = match &new_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; let prev_value = match &prev_entry.output_of { Some(val) => vec![val.to_owned()], None => vec![] }; - update_index!( + let e = update_index!( intent .output_of(new_value.as_slice()) .not(prev_value.as_slice()), process.intended_outputs(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_intent::output_of index {:?}", e); } construct_response(&base_address, &revision_id, &new_entry, get_link_fields(&base_address)?) @@ -90,10 +92,12 @@ pub fn handle_delete_intent(revision_id: HeaderHash) -> RecordAPIResult // handle link fields if let Some(process_address) = entry.input_of { - update_index!(intent.input_of.not(&vec![process_address]), process.intended_inputs(&base_address))?; + let e = update_index!(intent.input_of.not(&vec![process_address]), process.intended_inputs(&base_address)); + hdk::prelude::debug!("handle_delete_intent::input_of index {:?}", e); } if let Some(process_address) = entry.output_of { - update_index!(intent.output_of.not(&vec![process_address]), process.intended_outputs(&base_address))?; + let e = update_index!(intent.output_of.not(&vec![process_address]), process.intended_outputs(&base_address)); + hdk::prelude::debug!("handle_delete_intent::output_of index {:?}", e); } // delete entry last, as it must be present in order for links to be removed diff --git a/zomes/rea_proposed_intent/lib/src/lib.rs b/zomes/rea_proposed_intent/lib/src/lib.rs index b943e4fa6..7bb7388f9 100644 --- a/zomes/rea_proposed_intent/lib/src/lib.rs +++ b/zomes/rea_proposed_intent/lib/src/lib.rs @@ -30,10 +30,10 @@ pub fn handle_create_proposed_intent(entry_def_id: S, proposed_intent: Create let (revision_id, base_address, entry_resp): (_, ProposedIntentAddress, EntryData) = create_record(&entry_def_id, proposed_intent.to_owned())?; // handle link fields - let r1 = create_index!(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address))?; - hdk::prelude::debug!("handle_create_proposed_intent::published_in::create_index!: {:?}", r1); - let r2 = create_index!(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address))?; - hdk::prelude::debug!("handle_create_proposed_intent::publishes::create_index!: {:?}", r2); + let r1 = create_index!(proposed_intent.published_in(&proposed_intent.published_in), proposal.publishes(&base_address)); + hdk::prelude::debug!("handle_create_proposed_intent::published_in index {:?}", r1); + let r2 = create_index!(proposed_intent.publishes(proposed_intent.publishes.to_owned()), intent.proposed_in(&base_address)); + hdk::prelude::debug!("handle_create_proposed_intent::publishes index {:?}", r2); Ok(construct_response(&base_address, &revision_id, &entry_resp)) } @@ -51,15 +51,17 @@ pub fn handle_delete_proposed_intent(revision_id: &HeaderHash) -> RecordAPIResul // Notify indexing zomes in local DNA (& validate). // Allows authors of indexing modules to intervene in the deletion of a record. - update_index!(proposed_intent.published_in.not(&vec![entry.published_in]), proposal.publishes(&base_address))?; + let r1 = update_index!(proposed_intent.published_in.not(&vec![entry.published_in]), proposal.publishes(&base_address)); + hdk::prelude::debug!("handle_delete_proposed_intent::published_in index {:?}", r1); // manage record deletion let res = delete_record::(&revision_id); // Update in associated foreign DNAs as well. - // :TODO: In this pattern, foreign cells can also intervene in record deletion, and cause rollback. + // :TODO: If we caught errors here, foreign cells can also intervene in record deletion, and cause rollback. // Is this desirable? Should the behaviour be configurable? - update_index!(proposed_intent.publishes.not(&vec![entry.publishes]), intent.proposed_in(&base_address))?; + let r2 = update_index!(proposed_intent.publishes.not(&vec![entry.publishes]), intent.proposed_in(&base_address)); + hdk::prelude::debug!("handle_delete_proposed_intent::publishes index {:?}", r2); res } diff --git a/zomes/rea_proposed_to/lib/src/lib.rs b/zomes/rea_proposed_to/lib/src/lib.rs index 84fa444b9..963aea06c 100644 --- a/zomes/rea_proposed_to/lib/src/lib.rs +++ b/zomes/rea_proposed_to/lib/src/lib.rs @@ -27,8 +27,8 @@ pub fn handle_create_proposed_to(entry_def_id: S, proposed_to: CreateRequest) let (revision_id, base_address, entry_resp): (_, ProposedToAddress, EntryData) = create_record(&entry_def_id, proposed_to.to_owned())?; // handle link fields - let r1 = create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.published_to(&base_address))?; - hdk::prelude::debug!("handle_create_proposed_to::proposed::create_index!: {:?}", r1); + let r1 = create_index!(proposed_to.proposed(&proposed_to.proposed), proposal.published_to(&base_address)); + hdk::prelude::debug!("handle_create_proposed_to::proposed index {:?}", r1); // :TODO: create index for retrieving all proposals for an agent @@ -46,7 +46,8 @@ pub fn handle_delete_proposed_to(revision_id: &HeaderHash) -> RecordAPIResult(&revision_id)?; - update_index!(proposed_to.proposed.not(&vec![entry.proposed]), proposal.published_to(&base_address))?; + let e = update_index!(proposed_to.proposed.not(&vec![entry.proposed]), proposal.published_to(&base_address)); + hdk::prelude::debug!("handle_delete_proposed_to::proposed index {:?}", e); delete_record::(&revision_id) } diff --git a/zomes/rea_satisfaction/lib_destination/src/lib.rs b/zomes/rea_satisfaction/lib_destination/src/lib.rs index 13cf3b5f2..a9a6fd425 100644 --- a/zomes/rea_satisfaction/lib_destination/src/lib.rs +++ b/zomes/rea_satisfaction/lib_destination/src/lib.rs @@ -35,8 +35,8 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - let r1 = create_index!(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address))?; - hdk::prelude::debug!("destination::handle_create_satisfaction::satisfied_by::create_index!: {:?}", r1); + let r1 = create_index!(satisfaction.satisfied_by(satisfaction.get_satisfied_by()), economic_event.satisfies(&satisfaction_address)); + hdk::prelude::debug!("handle_create_satisfaction::satisfied_by index (destination) {:?}", r1); // :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs @@ -56,12 +56,13 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques let (revision_id, base_address, new_entry, prev_entry): (_, SatisfactionAddress, EntryData, EntryData) = update_record(&entry_def_id, &satisfaction.get_revision_id(), satisfaction.to_owned())?; if new_entry.satisfied_by != prev_entry.satisfied_by { - update_index!( + let e = update_index!( satisfaction .satisfied_by(&vec![new_entry.satisfied_by.to_owned()]) .not(&vec![prev_entry.satisfied_by]), economic_event.satisfies(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (destination) {:?}", e); } construct_response(&base_address, &revision_id, &new_entry) @@ -73,7 +74,8 @@ pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // handle link fields - update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), economic_event.satisfies(&base_address))?; + let e = update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), economic_event.satisfies(&base_address)); + hdk::prelude::debug!("handle_delete_satisfaction::satisfied_by index (destination) {:?}", e); delete_record::(&revision_id) } diff --git a/zomes/rea_satisfaction/lib_origin/src/lib.rs b/zomes/rea_satisfaction/lib_origin/src/lib.rs index 3965e1481..aa93af95c 100644 --- a/zomes/rea_satisfaction/lib_origin/src/lib.rs +++ b/zomes/rea_satisfaction/lib_origin/src/lib.rs @@ -13,7 +13,7 @@ use paste::paste; use hdk::prelude::*; use crate::holo_hash::DnaHash; use hdk_records::{ - RecordAPIResult, + RecordAPIResult, OtherCellResult, records::{ create_record, read_record_entry, @@ -39,26 +39,26 @@ pub fn handle_create_satisfaction(entry_def_id: S, satisfaction: CreateReques let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?; // link entries in the local DNA - let r1 = create_index!(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address))?; - hdk::prelude::debug!("origin::handle_create_satisfaction::satisfies::create_index!: {:?}", r1); + let r1 = create_index!(satisfaction.satisfies(satisfaction.get_satisfies()), intent.satisfied_by(&satisfaction_address)); + hdk::prelude::debug!("handle_create_satisfaction::satisfies index (origin) {:?}", r1); // link entries which may be local or remote let event_or_commitment = satisfaction.get_satisfied_by(); if is_satisfiedby_local_commitment(event_or_commitment)? { // links to local commitment, create link index pair - let r2 = create_index!(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address))?; - hdk::prelude::debug!("origin::handle_create_satisfaction::satisfied_by::create_index!: {:?}", r2); + let r2 = create_index!(satisfaction.satisfied_by(event_or_commitment), commitment.satisfies(&satisfaction_address)); + hdk::prelude::debug!("handle_create_satisfaction::satisfied_by index (origin) {:?}", r2); } else { // links to remote event, ping associated foreign DNA & fail if there's an error // :TODO: consider the implications of this in loosely coordinated multi-network spaces // we assign a type to the response so that call_zome_method can // effectively deserialize the response without failing - let _result: ResponseData = call_zome_method( + let result: OtherCellResult = call_zome_method( event_or_commitment, &REPLICATE_CREATE_API_METHOD, CreateParams { satisfaction: satisfaction.to_owned() }, - )?; - // hdk::prelude::debug!("origin::handle_create_satisfaction::call_zome_method::{:?}: {:?}", REPLICATE_CREATE_API_METHOD, result); + ); + hdk::prelude::debug!("handle_create_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_CREATE_API_METHOD, result); } construct_response(&satisfaction_address, &revision_id, &entry_resp) @@ -78,12 +78,13 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques // update intent indexes in local DNA if new_entry.satisfies != prev_entry.satisfies { - update_index!( + let e = update_index!( satisfaction .satisfies(&vec![new_entry.satisfies.to_owned()]) .not(&vec![prev_entry.satisfies]), intent.satisfied_by(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::satisfies index (origin) {:?}", e); } // update commitment / event indexes in local and/or remote DNA @@ -95,39 +96,44 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques if same_dna { if is_satisfiedby_local_commitment(&prev_entry.satisfied_by)? { // both values were local, update the index directly - update_index!( + let e = update_index!( satisfaction .satisfied_by(&vec![new_entry.satisfied_by.to_owned()]) .not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address) - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (origin) {:?}", e); } else { // both values were remote and in the same DNA, forward the update - call_zome_method( + let result: OtherCellResult = call_zome_method( &prev_entry.satisfied_by, &REPLICATE_UPDATE_API_METHOD, UpdateParams { satisfaction: satisfaction.to_owned() }, - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_UPDATE_API_METHOD, result); } } else { if is_satisfiedby_local_commitment(&prev_entry.satisfied_by)? { // previous value was local, clear the index directly - update_index!(satisfaction.satisfied_by.not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address))?; + let e = update_index!(satisfaction.satisfied_by.not(&vec![prev_entry.satisfied_by]), commitment.satisfies(&base_address)); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (origin) {:?}", e); } else { // previous value was remote, handle the remote update as a deletion - call_zome_method( + let result: OtherCellResult = call_zome_method( &prev_entry.satisfied_by, &REPLICATE_DELETE_API_METHOD, ByHeader { address: satisfaction.get_revision_id().to_owned() }, - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_DELETE_API_METHOD, result); } if is_satisfiedby_local_commitment(&new_entry.satisfied_by)? { // new value was local, add the index directly - update_index!(satisfaction.satisfied_by(&vec![new_entry.satisfied_by.to_owned()]), commitment.satisfies(&base_address))?; + let e = update_index!(satisfaction.satisfied_by(&vec![new_entry.satisfied_by.to_owned()]), commitment.satisfies(&base_address)); + hdk::prelude::debug!("handle_update_satisfaction::satisfied_by index (origin) {:?}", e); } else { // new value was remote, handle the remote update as a creation - call_zome_method( + let result: OtherCellResult = call_zome_method( &new_entry.satisfied_by, &REPLICATE_CREATE_API_METHOD, CreateParams { satisfaction: CreateRequest { @@ -137,7 +143,8 @@ pub fn handle_update_satisfaction(entry_def_id: S, satisfaction: UpdateReques effort_quantity: new_entry.effort_quantity.to_owned().into(), note: new_entry.note.to_owned().into(), } }, - )?; + ); + hdk::prelude::debug!("handle_update_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_CREATE_API_METHOD, result); } } @@ -152,20 +159,23 @@ pub fn handle_delete_satisfaction(revision_id: HeaderHash) -> RecordAPIResult(&revision_id)?; // update intent indexes in local DNA - update_index!(satisfaction.satisfies.not(&vec![entry.satisfies]), intent.satisfied_by(&base_address))?; + let e = update_index!(satisfaction.satisfies.not(&vec![entry.satisfies]), intent.satisfied_by(&base_address)); + hdk::prelude::debug!("handle_delete_satisfaction::satisfies index (origin) {:?}", e); // update commitment & event indexes in local or remote DNAs let event_or_commitment = entry.satisfied_by.to_owned(); if is_satisfiedby_local_commitment(&event_or_commitment)? { - update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), commitment.satisfies(&base_address))?; + let e = update_index!(satisfaction.satisfied_by.not(&vec![entry.satisfied_by]), commitment.satisfies(&base_address)); + hdk::prelude::debug!("handle_delete_satisfaction::satisfied_by index (origin) {:?}", e); } else { // links to remote event, ping associated foreign DNA & fail if there's an error // :TODO: consider the implications of this in loosely coordinated multi-network spaces - call_zome_method( + let result: OtherCellResult = call_zome_method( &event_or_commitment, &REPLICATE_DELETE_API_METHOD, ByHeader { address: revision_id.to_owned() }, - )?; + ); + hdk::prelude::debug!("handle_delete_satisfaction::call_zome_method::{:?} {:?}", REPLICATE_DELETE_API_METHOD, result); } delete_record::(&revision_id) From aefedb072ff3d06c6bcc465a207085f8bf9bfe4c Mon Sep 17 00:00:00 2001 From: pospi Date: Mon, 16 May 2022 20:28:52 +1000 Subject: [PATCH 175/181] remove unneeded storage consts as per #263 --- zomes/rea_agreement/storage_consts/src/lib.rs | 6 ------ .../rea_commitment/storage_consts/src/lib.rs | 20 ------------------- .../storage_consts/src/lib.rs | 7 ------- .../storage_consts/src/lib.rs | 13 ------------ .../rea_fulfillment/storage_consts/src/lib.rs | 10 ---------- zomes/rea_intent/storage_consts/src/lib.rs | 15 -------------- zomes/rea_process/storage_consts/src/lib.rs | 13 ------------ .../storage_consts/src/lib.rs | 2 -- zomes/rea_proposal/storage_consts/src/lib.rs | 6 ------ .../storage_consts/src/lib.rs | 13 ------------ .../rea_proposed_to/storage_consts/src/lib.rs | 6 ------ .../storage_consts/src/lib.rs | 4 ---- .../storage_consts/src/lib.rs | 12 ----------- 13 files changed, 127 deletions(-) diff --git a/zomes/rea_agreement/storage_consts/src/lib.rs b/zomes/rea_agreement/storage_consts/src/lib.rs index a58aadcd0..2b6a3834b 100644 --- a/zomes/rea_agreement/storage_consts/src/lib.rs +++ b/zomes/rea_agreement/storage_consts/src/lib.rs @@ -6,9 +6,3 @@ * @package Holo-REA */ pub const AGREEMENT_ENTRY_TYPE: &str = "vf_agreement"; - -pub const AGREEMENT_EVENTS_LINK_TAG: &str = "economic_events"; -pub const AGREEMENT_COMMITMENTS_LINK_TAG: &str = "commitments"; - -pub const AGREEMENT_COMMITMENTS_READ_API_METHOD: &str = "_internal_read_agreement_clauses"; -pub const AGREEMENT_EVENTS_READ_API_METHOD: &str = "_internal_read_agreement_realizations"; diff --git a/zomes/rea_commitment/storage_consts/src/lib.rs b/zomes/rea_commitment/storage_consts/src/lib.rs index b4cd72b79..afacedab3 100644 --- a/zomes/rea_commitment/storage_consts/src/lib.rs +++ b/zomes/rea_commitment/storage_consts/src/lib.rs @@ -6,23 +6,3 @@ * @package Holo-REA */ pub const COMMITMENT_ENTRY_TYPE: &str = "vf_commitment"; -pub const COMMITMENT_FULFILLEDBY_LINK_TAG: &str = "fulfilled_by"; -pub const COMMITMENT_SATISFIES_LINK_TAG: &str = "satisfies"; -pub const COMMITMENT_INPUT_OF_LINK_TAG: &str = "input_of"; -pub const COMMITMENT_OUTPUT_OF_LINK_TAG: &str = "output_of"; -pub const COMMITMENT_CLAUSE_OF_LINK_TAG: &str = "clause_of"; - -pub const COMMITMENT_FULFILLEDBY_READ_API_METHOD: &str = "_internal_read_commitment_fulfillments"; -pub const COMMITMENT_SATISFIES_READ_API_METHOD: &str = "_internal_read_commitment_satisfactions"; - -pub const COMMITMENT_INPUT_READ_API_METHOD: &str = "_internal_read_commitment_process_inputs"; -pub const COMMITMENT_INPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_inputs"; -pub const PROCESS_INPUT_INDEXING_API_METHOD: &str = "index_process_input_commitments"; - -pub const COMMITMENT_OUTPUT_READ_API_METHOD: &str = "_internal_read_commitment_process_outputs"; -pub const COMMITMENT_OUTPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_outputs"; -pub const PROCESS_OUTPUT_INDEXING_API_METHOD: &str = "index_process_output_commitments"; - -pub const COMMITMENT_CLAUSEOF_READ_API_METHOD: &str = "_internal_read_commitment_agreements"; -pub const COMMITMENT_CLAUSEOF_INDEXING_API_METHOD: &str = "_internal_reindex_agreement_clauses"; -pub const AGREEMENT_CLAUSE_INDEXING_API_METHOD: &str = "index_agreement_clauses"; diff --git a/zomes/rea_economic_event/storage_consts/src/lib.rs b/zomes/rea_economic_event/storage_consts/src/lib.rs index fd420ed27..3dcd11561 100644 --- a/zomes/rea_economic_event/storage_consts/src/lib.rs +++ b/zomes/rea_economic_event/storage_consts/src/lib.rs @@ -7,12 +7,5 @@ */ pub const EVENT_ENTRY_TYPE: &str = "vf_economic_event"; -pub const EVENT_FULFILLS_LINK_TAG: &str = "fulfills"; -pub const EVENT_SATISFIES_LINK_TAG: &str = "satisfies"; -pub const EVENT_INPUT_OF_LINK_TAG: &str = "input_of"; -pub const EVENT_OUTPUT_OF_LINK_TAG: &str = "output_of"; -pub const EVENT_REALIZATION_OF_LINK_TAG: &str = "realization_of"; -pub const EVENT_AFFECTS_RESOURCE_LINK_TAG: &str = "affects"; - pub const INVENTORY_CREATION_API_METHOD: &str = "_internal_create_inventory"; pub const INVENTORY_UPDATE_API_METHOD: &str = "_internal_update_inventory"; diff --git a/zomes/rea_economic_resource/storage_consts/src/lib.rs b/zomes/rea_economic_resource/storage_consts/src/lib.rs index 1cb0713a3..f941ed0c2 100644 --- a/zomes/rea_economic_resource/storage_consts/src/lib.rs +++ b/zomes/rea_economic_resource/storage_consts/src/lib.rs @@ -6,16 +6,3 @@ * @package Holo-REA */ pub const RESOURCE_ENTRY_TYPE: &str = "vf_economic_resource"; - -pub const RESOURCE_CONTAINS_LINK_TAG: &str = "contains"; -pub const RESOURCE_CONTAINED_IN_LINK_TAG: &str = "contained_in"; -pub const RESOURCE_AFFECTED_BY_EVENT_LINK_TAG: &str = "affected_by"; -pub const RESOURCE_CONFORMS_TO_LINK_TAG: &str = "conforms_to"; - -pub const RESOURCE_CONTAINS_INDEXING_API_METHOD: &str = "_internal_reindex_contained_resources"; -pub const RESOURCE_CONTAINS_READ_API_METHOD: &str = "_internal_read_contained_resources"; -pub const RESOURCE_CONTAINEDIN_INDEXING_API_METHOD: &str = "_internal_reindex_container_resources"; -pub const RESOURCE_CONTAINEDIN_READ_API_METHOD: &str = "_internal_read_container_resource"; -pub const RESOURCE_AFFECTED_BY_READ_API_METHOD: &str = "_internal_read_affecting_events"; -pub const RESOURCE_SPECIFICATION_RESOURCES_INDEXING_API_METHOD: &str = "index_resource_specification_resources"; -pub const RESOURCE_CONFORMSTO_INDEXING_API_METHOD: &str = "_internal_reindex_resource_specifications"; diff --git a/zomes/rea_fulfillment/storage_consts/src/lib.rs b/zomes/rea_fulfillment/storage_consts/src/lib.rs index 40ed7767a..cc6e11f81 100644 --- a/zomes/rea_fulfillment/storage_consts/src/lib.rs +++ b/zomes/rea_fulfillment/storage_consts/src/lib.rs @@ -5,18 +5,8 @@ * * @package Holo-REA */ -pub const FULFILLMENT_BASE_ENTRY_TYPE: &str = "vf_fulfillment_baseurl"; -pub const FULFILLMENT_INITIAL_ENTRY_LINK_TYPE: &str = "vf_fulfillment_entry"; pub const FULFILLMENT_ENTRY_TYPE: &str = "vf_fulfillment"; -pub const FULFILLMENT_FULFILLS_LINK_TYPE: &str = "vf_fulfillment_fulfills"; -pub const FULFILLMENT_FULFILLS_LINK_TAG: &str = "fulfills"; -pub const FULFILLMENT_FULFILLEDBY_LINK_TYPE: &str = "vf_fulfillment_fulfilled_by"; -pub const FULFILLMENT_FULFILLEDBY_LINK_TAG: &str = "fulfilled_by"; pub const REPLICATE_CREATE_API_METHOD: &str = "create_fulfillment"; pub const REPLICATE_UPDATE_API_METHOD: &str = "update_fulfillment"; pub const REPLICATE_DELETE_API_METHOD: &str = "delete_fulfillment"; -pub const COMMITMENT_FULFILLEDBY_INDEXING_API_METHOD: &str = "_internal_reindex_fulfillments"; -pub const FULFILLMENT_FULFILLS_INDEXING_API_METHOD: &str = "_internal_reindex_commitments"; -pub const EVENT_FULFILLS_INDEXING_API_METHOD: &str = "_internal_reindex_fulfillments"; -pub const FULFILLMENT_FULFILLEDBY_INDEXING_API_METHOD: &str = "_internal_reindex_events"; diff --git a/zomes/rea_intent/storage_consts/src/lib.rs b/zomes/rea_intent/storage_consts/src/lib.rs index 3d9e4fe24..4a6407015 100644 --- a/zomes/rea_intent/storage_consts/src/lib.rs +++ b/zomes/rea_intent/storage_consts/src/lib.rs @@ -6,18 +6,3 @@ * @package Holo-REA */ pub const INTENT_ENTRY_TYPE: &str = "vf_intent"; -pub const INTENT_SATISFIEDBY_LINK_TAG: &str = "satisfied_by"; -pub const INTENT_INPUT_OF_LINK_TAG: &str = "input_of"; -pub const INTENT_OUTPUT_OF_LINK_TAG: &str = "output_of"; - -pub const INTENT_PUBLISHED_IN_LINK_TAG: &str = "published_in"; - -pub const INTENT_SATISFIEDBY_READ_API_METHOD: &str = "_internal_read_intent_satisfactions"; - -pub const INTENT_INPUT_READ_API_METHOD: &str = "_internal_read_intent_process_inputs"; -pub const INTENT_INPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_inputs"; -pub const PROCESS_INPUT_INDEXING_API_METHOD: &str = "index_process_input_intents"; - -pub const INTENT_OUTPUT_READ_API_METHOD: &str = "_internal_read_intent_process_outputs"; -pub const INTENT_OUTPUT_INDEXING_API_METHOD: &str = "_internal_reindex_process_outputs"; -pub const PROCESS_OUTPUT_INDEXING_API_METHOD: &str = "index_process_output_intents"; diff --git a/zomes/rea_process/storage_consts/src/lib.rs b/zomes/rea_process/storage_consts/src/lib.rs index 5ceb2ef13..df9c6da2a 100644 --- a/zomes/rea_process/storage_consts/src/lib.rs +++ b/zomes/rea_process/storage_consts/src/lib.rs @@ -6,16 +6,3 @@ * @package Holo-REA */ pub const PROCESS_ENTRY_TYPE: &str = "vf_process"; -pub const PROCESS_EVENT_INPUTS_LINK_TAG: &str = "inputs"; -pub const PROCESS_EVENT_OUTPUTS_LINK_TAG: &str = "outputs"; -pub const PROCESS_COMMITMENT_INPUTS_LINK_TAG: &str = "committed_inputs"; -pub const PROCESS_COMMITMENT_OUTPUTS_LINK_TAG: &str = "committed_outputs"; -pub const PROCESS_INTENT_INPUTS_LINK_TAG: &str = "intended_inputs"; -pub const PROCESS_INTENT_OUTPUTS_LINK_TAG: &str = "intended_outputs"; - -pub const PROCESS_EVENT_INPUTS_READ_API_METHOD: &str = "_internal_read_process_inputs"; -pub const PROCESS_EVENT_OUTPUTS_READ_API_METHOD: &str = "_internal_read_process_outputs"; -pub const PROCESS_COMMITMENT_INPUTS_READ_API_METHOD: &str = "_internal_read_process_committed_inputs"; -pub const PROCESS_COMMITMENT_OUTPUTS_READ_API_METHOD: &str = "_internal_read_process_committed_outputs"; -pub const PROCESS_INTENT_INPUTS_READ_API_METHOD: &str = "_internal_read_process_intended_inputs"; -pub const PROCESS_INTENT_OUTPUTS_READ_API_METHOD: &str = "_internal_read_process_intended_outputs"; diff --git a/zomes/rea_process_specification/storage_consts/src/lib.rs b/zomes/rea_process_specification/storage_consts/src/lib.rs index 30d6c6efd..bd123619a 100644 --- a/zomes/rea_process_specification/storage_consts/src/lib.rs +++ b/zomes/rea_process_specification/storage_consts/src/lib.rs @@ -6,5 +6,3 @@ * @package Holo-REA */ pub const PROCESS_SPECIFICATION_ENTRY_TYPE: &str = "vf_process_specification"; -pub const PROCESS_SPECIFICATION_BASE_ENTRY_TYPE: &str = "vf_process_specification_baseurl"; -pub const PROCESS_SPECIFICATION_INITIAL_ENTRY_LINK_TYPE: &str = "vf_process_specification_entry"; diff --git a/zomes/rea_proposal/storage_consts/src/lib.rs b/zomes/rea_proposal/storage_consts/src/lib.rs index 09fbf555c..661fcf894 100644 --- a/zomes/rea_proposal/storage_consts/src/lib.rs +++ b/zomes/rea_proposal/storage_consts/src/lib.rs @@ -6,9 +6,3 @@ * @package Holo-REA */ pub const PROPOSAL_ENTRY_TYPE: &str = "vf_proposal"; - -pub const PROPOSAL_PUBLISHES_LINK_TAG: &str = "publishes"; -pub const PROPOSAL_PUBLISHED_TO_LINK_TAG: &str = "published_to"; - -pub const PROPOSAL_PUBLISHES_READ_API_METHOD: &str = "_internal_read_proposal_proposed_intents"; -pub const PROPOSAL_PUBLISHED_TO_READ_API_METHOD: &str = "_internal_read_proposal_participants"; diff --git a/zomes/rea_proposed_intent/storage_consts/src/lib.rs b/zomes/rea_proposed_intent/storage_consts/src/lib.rs index 422189d64..17ca2b867 100644 --- a/zomes/rea_proposed_intent/storage_consts/src/lib.rs +++ b/zomes/rea_proposed_intent/storage_consts/src/lib.rs @@ -6,16 +6,3 @@ * @package Holo-REA */ pub const PROPOSED_INTENT_ENTRY_TYPE: &str = "vf_proposed_intent"; - -pub const PROPOSED_INTENT_PUBLISHED_IN_LINK_TYPE: &str = "vf_proposed_intent_published_in"; -pub const PROPOSED_INTENT_PUBLISHED_IN_LINK_TAG: &str = "published_in"; - -pub const PROPOSED_INTENT_PUBLISHES_LINK_TYPE: &str = "vf_proposed_intent_publishes"; -pub const PROPOSED_INTENT_PUBLISHES_LINK_TAG: &str = "publishes"; - -pub const PROPOSED_INTENT_PROPOSAL_INDEXING_API_METHOD: &str = "_internal_reindex_proposals"; -pub const PROPOSAL_PROPOSED_INTENT_INDEXING_API_METHOD: &str = "_internal_reindex_proposed_intents"; - -pub const INTENT_PUBLISHEDIN_INDEXING_API_METHOD: &str = "index_intent_proposals"; - -pub const PROPOSED_INTENT_PROPOSES_INDEXING_API_METHOD: &str = "_internal_index_"; diff --git a/zomes/rea_proposed_to/storage_consts/src/lib.rs b/zomes/rea_proposed_to/storage_consts/src/lib.rs index d365475ac..6bf8fa893 100644 --- a/zomes/rea_proposed_to/storage_consts/src/lib.rs +++ b/zomes/rea_proposed_to/storage_consts/src/lib.rs @@ -6,9 +6,3 @@ * @package Holo-REA */ pub const PROPOSED_TO_ENTRY_TYPE: &str = "vf_proposed_to"; - -pub const PROPOSED_TO_PROPOSED_LINK_TAG: &str = "proposed"; -pub const PROPOSED_TO_PROPOSED_TO_LINK_TAG: &str = "proposed_to"; - -pub const PROPOSED_TO_PROPOSAL_INDEXING_API_METHOD: &str = "_internal_reindex_proposals"; -pub const PROPOSAL_PROPOSED_TO_INDEXING_API_METHOD: &str = "_internal_reindex_proposed_to"; diff --git a/zomes/rea_resource_specification/storage_consts/src/lib.rs b/zomes/rea_resource_specification/storage_consts/src/lib.rs index e3562f009..72474370a 100644 --- a/zomes/rea_resource_specification/storage_consts/src/lib.rs +++ b/zomes/rea_resource_specification/storage_consts/src/lib.rs @@ -6,7 +6,3 @@ * @package Holo-REA */ pub const ECONOMIC_RESOURCE_SPECIFICATION_ENTRY_TYPE: &str = "vf_resource_specification"; -pub const ECONOMIC_RESOURCE_SPECIFICATION_BASE_ENTRY_TYPE: &str = "vf_resource_specification_baseurl"; -pub const ECONOMIC_RESOURCE_SPECIFICATION_INITIAL_ENTRY_LINK_TYPE: &str = "vf_resource_specification_entry"; -pub const RESOURCE_SPECIFICATION_CONFORMING_RESOURCE_LINK_TYPE: &str = "vf_resource_specification_conforming_resource"; -pub const RESOURCE_SPECIFICATION_CONFORMING_RESOURCE_LINK_TAG: &str = "conforming_resource"; diff --git a/zomes/rea_satisfaction/storage_consts/src/lib.rs b/zomes/rea_satisfaction/storage_consts/src/lib.rs index cf96092bd..0b8695a4f 100644 --- a/zomes/rea_satisfaction/storage_consts/src/lib.rs +++ b/zomes/rea_satisfaction/storage_consts/src/lib.rs @@ -5,20 +5,8 @@ * * @package Holo-REA */ -pub const SATISFACTION_BASE_ENTRY_TYPE: &str = "vf_satisfaction_baseurl"; -pub const SATISFACTION_INITIAL_ENTRY_LINK_TYPE: &str = "vf_satisfaction_entry"; pub const SATISFACTION_ENTRY_TYPE: &str = "vf_satisfaction"; -pub const SATISFACTION_SATISFIES_LINK_TYPE: &str = "vf_satisfaction_satisfies"; -pub const SATISFACTION_SATISFIES_LINK_TAG: &str = "satisfies"; -pub const SATISFACTION_SATISFIEDBY_LINK_TYPE: &str = "vf_satisfaction_satisfied_by"; -pub const SATISFACTION_SATISFIEDBY_LINK_TAG: &str = "satisfied_by"; pub const REPLICATE_CREATE_API_METHOD: &str = "create_satisfaction"; pub const REPLICATE_UPDATE_API_METHOD: &str = "update_satisfaction"; pub const REPLICATE_DELETE_API_METHOD: &str = "delete_satisfaction"; -pub const CHECK_COMMITMENT_API_METHOD: &str = "get_commitment"; -pub const INTENT_INDEXING_API_METHOD: &str = "_internal_reindex_satisfactions"; -pub const COMMITMENT_INDEXING_API_METHOD: &str = "_internal_reindex_satisfactions"; -pub const EVENT_INDEXING_API_METHOD: &str = "_internal_reindex_satisfactions"; -pub const SATISFACTION_SATISFIEDBY_INDEXING_API_METHOD: &str = "_internal_reindex_satisfiedby"; // :NOTE: same in both observation and planning zome APIs -pub const SATISFACTION_SATISFIES_INDEXING_API_METHOD: &str = "_internal_reindex_intents"; From 7dc7e8871c05aee3087c46cd462b910e181eb34b Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 19 May 2022 09:56:07 -0700 Subject: [PATCH 176/181] Update test.yml --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3869c0112..f55396481 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -64,5 +64,5 @@ jobs: - name: Prepare Nix environment run: nix-shell --command "echo Completed" - - run: nix-shell --pure --run 'cargo build' - - run: nix-shell --pure --run 'cargo test' + - name: Execute integration tests + - run: nix-shell --pure --run 'pnpm install && npm run build && npm run test:integration' From 098ebe67af87754293bb4a911e72d11642935051 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 19 May 2022 09:57:28 -0700 Subject: [PATCH 177/181] Update test.yml --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f55396481..520ccab15 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -65,4 +65,4 @@ jobs: run: nix-shell --command "echo Completed" - name: Execute integration tests - - run: nix-shell --pure --run 'pnpm install && npm run build && npm run test:integration' + run: nix-shell --pure --run 'pnpm install && npm run build && npm run test:integration' From 3d7de3157a640621d5f884718bfd12cc2d8f9322 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 19 May 2022 12:19:43 -0700 Subject: [PATCH 178/181] Update test.yml --- .github/workflows/test.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 520ccab15..853a4d3c1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -64,5 +64,11 @@ jobs: - name: Prepare Nix environment run: nix-shell --command "echo Completed" - - name: Execute integration tests - run: nix-shell --pure --run 'pnpm install && npm run build && npm run test:integration' + - name: Install pnpm dependencies + run: nix-shell --pure --run 'pnpm install' + + - name: Build WASM and typescript + run: nix-shell --pure --run 'npm run build' + + - name: Run integration tests + run: nix-shell --pure --run 'npm run test:integration' From aac3c89c22912413dc0429bef86831a597fa9cdb Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 19 May 2022 16:09:49 -0700 Subject: [PATCH 179/181] increase timeout --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 853a4d3c1..c42cc0698 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,7 +4,7 @@ on: [push, pull_request] jobs: checks: - timeout-minutes: 45 + timeout-minutes: 55 runs-on: ${{ matrix.os }} strategy: matrix: From e51e2666294010b81cca2e2340661df0502e5f21 Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 19 May 2022 16:11:19 -0700 Subject: [PATCH 180/181] Delete config.yml --- .circleci/config.yml | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index f91113725..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: 2 - -jobs: - build: - docker: - - image: holochain/holonix:latest - steps: - - checkout - - run: - name: pnpm setup - command: nix-shell --run 'npm i -g pnpm' - no_output_timeout: 30s - - run: - name: sim2h server - command: nix-shell --run 'npm run dht:sim2h' - background: true - no_output_timeout: 20m - - run: nix-shell --run 'pnpm install && npm run build && npm run test:integration:test' From 2339d601bf4066612924585579b55679fe04196a Mon Sep 17 00:00:00 2001 From: Connor Turland Date: Thu, 19 May 2022 16:11:52 -0700 Subject: [PATCH 181/181] Delete .travis.yml --- .travis.yml | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 4d96eaae1..000000000 --- a/.travis.yml +++ /dev/null @@ -1,26 +0,0 @@ -language: nix -before_script: nix-shell --run 'npm i -g pnpm' && nix-shell --run 'npm run dht:sim2h &' -script: nix-shell --run 'pnpm install --network-concurrency 1 && npm run build && npm run test:integration:test' -git: - quiet: true - depth: false -# disable default submodule behaviour - submodules: false -# replace submodule URIs with HTTPS (public) ones, then clone -before_install: - - sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules - - git submodule update --init - - pushd thirdparty/happ-agent-registration - - sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules - - git submodule update --init - - popd -branches: - only: - - master - # :NOTE: pull request builds are on, so this takes care of feature branches - - /^(release|hotfix)\/.*/ -# cache: -# directories: -# - "/tmp/holochain/target" -# yarn: true -# cargo: true