Skip to content

Commit

Permalink
alter code to conform to hc 0.3 standards
Browse files Browse the repository at this point in the history
  • Loading branch information
LeosPrograms committed Apr 3, 2024
1 parent beb2300 commit 0a50225
Show file tree
Hide file tree
Showing 72 changed files with 179 additions and 163 deletions.
38 changes: 15 additions & 23 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions docs/repository-structure.md
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ Third-party code using the [interface struct crates](#1-interface-struct-crates-

**You should create your own customised zome definitions if:**

- You wish to combine multiple separate hREA zomes in the same DNA and isolate the storage into different `entry_def` types.
- You wish to combine multiple separate hREA zomes in the same DNA and isolate the storage into different `entry_type` types.
- You wish to define private variants of ValueFlows record types or customise sharding and validation rules.
- You wish to add handling of bespoke organisational logic and related records that needs to be validated tightly against REA data or coordinated as a coherent unit of information.

Expand All @@ -137,7 +137,7 @@ Each module exports an `EntryData` for the record information of relevance, and

In cases where records have standard CRUD features, `EntryData` is convertible `From<CreateRequest>` in its associated [interface struct crate](#1-interface-struct-crates-rust-interface); and implements `Updateable<UpdateRequest>` from the [`hdk_records`](#hdk_records) library. These traits are used by [library crates](#3-library-crates-system-core) to handle the underlying storage logic.

It is unlikely that there should be a need to create customised versions of these files. For maintenance reasons it is much better to compose additional fields and functionality onto the REA record types as *new* `entry_defs` in zome crates if adding additional fields is a requirement for your use-case.
It is unlikely that there should be a need to create customised versions of these files. For maintenance reasons it is much better to compose additional fields and functionality onto the REA record types as *new* `entry_types` in zome crates if adding additional fields is a requirement for your use-case.

#### 6. Storage constants (database internals)

Expand Down
3 changes: 2 additions & 1 deletion lib/hdk_records/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@ edition = "2018"
[dependencies]
thiserror = "1.0"
serde = { workspace = true }
chrono = { version = "=0.4.22", default-features = false, features = ["clock", "std", "oldtime", "serde"] }
chrono = { version = "0.4.22", default-features = false, features = ["clock", "std", "oldtime", "serde"] }
hdk = { workspace = true }
holo_hash = { workspace = true }
zome_utils = { workspace = true }

serde_maybe_undefined = { path = "../serde_maybe_undefined" }
hdk_rpc_errors = { path = "../hdk_rpc_errors" }
Expand Down
21 changes: 12 additions & 9 deletions lib/hdk_records/src/anchored_record_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
* @since 2021-09-15
*/
use hdk::prelude::*;
use zome_utils::*;
use hdk_uuid_types::{
DnaAddressable, DnaIdentifiable,
};
Expand Down Expand Up @@ -63,11 +64,11 @@ fn read_entry_anchor_id(
link_type: impl LinkTypeFilterExt,
identity_path_address: &EntryHash,
) -> RecordAPIResult<String> {
get_links(
get_links(link_input(
identity_path_address.to_owned(),
link_type,
Some(LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))
)?
))?
.first()
.map(|link| {
let bytes = &link.tag.to_owned().into_inner()[3..];
Expand All @@ -83,11 +84,11 @@ fn read_anchor_identity(
link_type: impl LinkTypeFilterExt,
anchor_path_address: &EntryHash,
) -> RecordAPIResult<EntryHash> {
get_links(
get_links(link_input(
anchor_path_address.to_owned(),
link_type,
Some(LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))
)?
))?
.first()
.map(|l| Ok(l.target.to_owned().into_entry_hash().unwrap()))
.ok_or(SemanticIndexError::IndexNotFound((*anchor_path_address).clone()))?
Expand Down Expand Up @@ -130,7 +131,7 @@ pub fn read_anchored_record_entry<LT, T, R, B, A, I>(
pub fn create_anchored_record<LT, I, B, A, C, R, T, E, S, F, G>(
link_type: LT,
indexing_zome_name_from_config: F,
entry_def_id: &S,
entry_type_id: &S,
create_payload: C,
) -> RecordAPIResult<(SignedActionHashed, A, I)>
where S: AsRef<str> + std::fmt::Display,
Expand All @@ -157,7 +158,7 @@ pub fn create_anchored_record<LT, I, B, A, C, R, T, E, S, F, G>(
// write base record and identity index path
let (meta, entry_internal_id, entry_data) = create_record::<T, I, R,_,_,_,_,_,_>(
indexing_zome_name_from_config,
&entry_def_id, create_payload,
&entry_type_id, create_payload,
)?;

// link the hash identifier to a new manually assigned identifier so we can determine the anchor when reading & updating
Expand Down Expand Up @@ -217,11 +218,11 @@ pub fn update_anchored_record<LT, I, R, A, B, U, E>(
Some(new_id) => {
if new_id != final_id {
// clear any old identity path, ensuring the link structure is as expected
let mut addrs = get_links(
let mut addrs = get_links(link_input(
identity_hash.to_owned(),
[link_type.to_owned()],
Some(LinkTag::new(crate::identifiers::RECORD_IDENTITY_ANCHOR_LINK_TAG))
)?;
))?;
if addrs.len() != 1 {
return Err(DataIntegrityError::IndexNotFound(identity_hash.to_owned()));
}
Expand Down Expand Up @@ -306,7 +307,9 @@ fn link_if_not_linked<LT, E>(
ScopedLinkType: TryFrom<LT, Error = E>,
WasmError: From<E>,
{
if false == get_links(origin_hash.to_owned(), link_type.to_owned(), Some(link_tag.to_owned()))?
if false == get_links(link_input(
origin_hash.to_owned(), link_type.to_owned(), Some(link_tag.to_owned())
))?
.iter().any(|l| { l.target.to_owned().into_entry_hash().unwrap() == dest_hash })
{
Ok(Some(create_link(
Expand Down
10 changes: 5 additions & 5 deletions lib/hdk_records/src/entry_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ pub (crate) fn try_decode_entry<T>(entry: Entry) -> RecordAPIResult<T>
pub fn get_entry_by_address<R>(address: &EntryHash) -> RecordAPIResult<(SignedActionHashed, R)>
where SerializedBytes: TryInto<R, Error = SerializedBytesError>,
{
let maybe_result = get((*address).clone(), GetOptions { strategy: GetStrategy::Latest });
let maybe_result = get((*address).clone(), GetOptions { strategy: GetStrategy::Network });
let record = match maybe_result {
Ok(Some(el)) => el,
_ => return Err(DataIntegrityError::EntryNotFound),
Expand All @@ -75,7 +75,7 @@ pub fn get_entry_by_address<R>(address: &EntryHash) -> RecordAPIResult<(SignedAc
pub fn get_entry_by_action<R>(address: &ActionHash) -> RecordAPIResult<(SignedActionHashed, R)>
where SerializedBytes: TryInto<R, Error = SerializedBytesError>,
{
let maybe_result = get(address.clone(), GetOptions { strategy: GetStrategy::Latest });
let maybe_result = get(address.clone(), GetOptions { strategy: GetStrategy::Network });
let record = match maybe_result {
Ok(Some(el)) => el,
_ => return Err(DataIntegrityError::EntryNotFound),
Expand Down Expand Up @@ -129,7 +129,7 @@ pub fn create_entry<T, I: Clone, E>(
let action_hash = hdk_create(create_input)?;

// retrieve written `Record` for returning signature information
let maybe_result = get(action_hash, GetOptions { strategy: GetStrategy::Latest });
let maybe_result = get(action_hash, GetOptions { strategy: GetStrategy::Network });
let record = match maybe_result {
Ok(Some(el)) => el,
_ => return Err(DataIntegrityError::EntryNotFound),
Expand Down Expand Up @@ -172,7 +172,7 @@ pub fn update_entry<'a, I: Clone, E>(
};
let updated_action = hdk_update(input)?;

let maybe_result = get(updated_action, GetOptions { strategy: GetStrategy::Latest });
let maybe_result = get(updated_action, GetOptions { strategy: GetStrategy::Network });
let record = match maybe_result {
Ok(Some(el)) => el,
_ => return Err(DataIntegrityError::EntryNotFound),
Expand Down Expand Up @@ -205,7 +205,7 @@ pub fn delete_entry<T>(
mod tests {
use super::*;

#[hdk_entry_defs]
#[hdk_entry_types]
#[derive(Clone)]
#[unit_enum(UnitTypes)]
enum EntryTypes {
Expand Down
6 changes: 3 additions & 3 deletions lib/hdk_records/src/identity_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ pub fn infer_local_entry_identity<A>(
///
pub fn create_entry_identity<A, S, F, C>(
zome_name_from_config: F,
entry_def_id: S,
entry_type_id: S,
initial_address: &A,
) -> RecordAPIResult<bool>
where S: AsRef<str> + std::fmt::Display,
Expand All @@ -75,7 +75,7 @@ pub fn create_entry_identity<A, S, F, C>(
SerializedBytes: TryInto<C, Error = SerializedBytesError>,
{
// @see hdk_semantic_indexes_zome_derive::index_zome
let append_fn_name = format!("record_new_{}", entry_def_id);
let append_fn_name = format!("record_new_{}", entry_type_id);

// :TODO: use timestamp from written Record action rather than system time at time of RPC call
let now = sys_time()?.as_seconds_and_nanos();
Expand All @@ -89,5 +89,5 @@ pub fn create_entry_identity<A, S, F, C>(
address: initial_address.to_owned(),
timestamp: now_stamp,
},
).map_err(|e| { DataIntegrityError::LocalIndexNotConfigured(entry_def_id.to_string(), e.to_string()) })?)
).map_err(|e| { DataIntegrityError::LocalIndexNotConfigured(entry_type_id.to_string(), e.to_string()) })?)
}
8 changes: 4 additions & 4 deletions lib/hdk_records/src/metadata_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ pub fn read_revision_metadata_abbreviated(header: &SignedActionHashed) -> Record
*/
pub fn read_revision_metadata_full(header: &SignedActionHashed) -> RecordAPIResult<RecordMeta>
{
match get_details(get_action_hash(header), GetOptions { strategy: GetStrategy::Latest }) {
match get_details(get_action_hash(header), GetOptions { strategy: GetStrategy::Network }) {
Ok(Some(Details::Record(details))) => match details.validation_status {
ValidationStatus::Valid => {
// find previous Element first so we can reuse it to recurse backwards to original
Expand Down Expand Up @@ -138,15 +138,15 @@ fn get_previous_revision(signed_action: &SignedActionHashed) -> RecordAPIResult<
},
// this is an Update, so previous revision exists
SignedHashed { hashed: HoloHashed { content: Action::Update(update), .. }, .. } => {
let previous_record = get(update.original_action_address.clone(), GetOptions { strategy: GetStrategy::Latest })?;
let previous_record = get(update.original_action_address.clone(), GetOptions { strategy: GetStrategy::Network })?;
match previous_record {
None => Ok(None),
Some(el) => Ok(Some(el)),
}
},
// this is a Delete, so previous revision is what was deleted
SignedHashed { hashed: HoloHashed { content: Action::Delete(delete), .. }, .. } => {
let previous_record = get(delete.deletes_address.clone(), GetOptions { strategy: GetStrategy::Latest })?;
let previous_record = get(delete.deletes_address.clone(), GetOptions { strategy: GetStrategy::Network })?;
match previous_record {
None => Ok(None),
Some(el) => Ok(Some(el)),
Expand Down Expand Up @@ -184,7 +184,7 @@ fn find_latest_revision(updates: &[SignedActionHashed], revisions_until: u32) ->
sortlist.sort_by_key(by_action_time);
let most_recent = sortlist.last().unwrap().to_owned();

match get_details(get_action_hash(&most_recent), GetOptions { strategy: GetStrategy::Latest }) {
match get_details(get_action_hash(&most_recent), GetOptions { strategy: GetStrategy::Network }) {
Ok(Some(Details::Record(details))) => match details.validation_status {
ValidationStatus::Valid => match details.updates.len() {
// found latest revision
Expand Down
8 changes: 4 additions & 4 deletions lib/hdk_records/src/record_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ use crate::{
/// changes outlined in issue https://github.com/h-REA/hREA/issues/196
///
pub fn get_latest_action_hash(entry_hash: EntryHash) -> RecordAPIResult<ActionHash> {
match get_details(entry_hash.clone(), GetOptions { strategy: GetStrategy::Latest })? {
match get_details(entry_hash.clone(), GetOptions { strategy: GetStrategy::Network })? {
Some(Details::Entry(details)) => match details.entry_dht_status {
metadata::EntryDhtStatus::Live => match details.updates.len() {
EntryDhtStatus::Live => match details.updates.len() {
0 => {
// https://docs.rs/hdk/latest/hdk/prelude/struct.EntryDetails.html#structfield.actions
Ok(get_action_hash(details.actions.first().unwrap()))
Expand Down Expand Up @@ -140,7 +140,7 @@ pub fn read_record_entry<T, R, B>(
///
pub fn create_record<T, I, R: Clone, B, C, E, S, F, G>(
indexing_zome_name_from_config: F,
entry_def_id: S,
entry_type_id: S,
create_payload: C,
) -> RecordAPIResult<(SignedActionHashed, B, I)>
where S: AsRef<str> + std::fmt::Display,
Expand Down Expand Up @@ -170,7 +170,7 @@ pub fn create_record<T, I, R: Clone, B, C, E, S, F, G>(
let identity = B::new(dna_info()?.hash, entry_hash.clone());
create_entry_identity(
indexing_zome_name_from_config,
&entry_def_id, &identity,
&entry_type_id, &identity,
)?;

Ok((meta, identity, entry_data))
Expand Down
2 changes: 1 addition & 1 deletion lib/hdk_semantic_indexes/rpc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ edition = "2018"

[dependencies]
serde = { workspace = true }
chrono = { version = "=0.4.22", default-features = false, features = ["clock", "std", "oldtime", "serde"] }
chrono = { version = "0.4.22", default-features = false, features = ["clock", "std", "oldtime", "serde"] }
holochain_serialized_bytes = { workspace = true }
hdk_rpc_errors = { path = "../../hdk_rpc_errors" }
hdk_uuid_types = { path = "../../hdk_uuid_types" }
Expand Down
3 changes: 2 additions & 1 deletion lib/hdk_semantic_indexes/zome/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ edition = "2018"

[dependencies]
serde = { workspace = true }
chrono = { version = "=0.4.22", default-features = false, features = ["clock", "std", "oldtime", "serde"] }
chrono = { version = "0.4.22", default-features = false, features = ["clock", "std", "oldtime", "serde"] }
hdk = { workspace = true }
zome_utils = { workspace = true }

hdk_semantic_indexes_error = { path = "../error" }
hdk_semantic_indexes_zome_rpc = { path = "../rpc" }
Expand Down
Loading

0 comments on commit 0a50225

Please sign in to comment.